Detailed changes
@@ -30,6 +30,7 @@ jobs:
run_tests: ${{ steps.filter.outputs.run_tests }}
run_license: ${{ steps.filter.outputs.run_license }}
run_docs: ${{ steps.filter.outputs.run_docs }}
+ run_nix: ${{ steps.filter.outputs.run_nix }}
runs-on:
- ubuntu-latest
steps:
@@ -69,6 +70,12 @@ jobs:
else
echo "run_license=false" >> $GITHUB_OUTPUT
fi
+ NIX_REGEX='^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)'
+ if [[ $(git diff --name-only $COMPARE_REV ${{ github.sha }} | grep "$NIX_REGEX") ]]; then
+ echo "run_nix=true" >> $GITHUB_OUTPUT
+ else
+ echo "run_nix=false" >> $GITHUB_OUTPUT
+ fi
migration_checks:
name: Check Postgres and Protobuf migrations, mergability
@@ -746,7 +753,10 @@ jobs:
nix-build:
name: Build with Nix
uses: ./.github/workflows/nix.yml
- if: github.repository_owner == 'zed-industries' && contains(github.event.pull_request.labels.*.name, 'run-nix')
+ needs: [job_spec]
+ if: github.repository_owner == 'zed-industries' &&
+ (contains(github.event.pull_request.labels.*.name, 'run-nix') ||
+ needs.job_spec.outputs.run_nix == 'true')
secrets: inherit
with:
flake-output: debug
@@ -1911,7 +1911,6 @@ dependencies = [
"serde_json",
"strum 0.27.1",
"thiserror 2.0.12",
- "tokio",
"workspace-hack",
]
@@ -2077,7 +2076,7 @@ dependencies = [
[[package]]
name = "blade-graphics"
version = "0.6.0"
-source = "git+https://github.com/kvark/blade?rev=e0ec4e720957edd51b945b64dd85605ea54bcfe5#e0ec4e720957edd51b945b64dd85605ea54bcfe5"
+source = "git+https://github.com/kvark/blade?rev=416375211bb0b5826b3584dccdb6a43369e499ad#416375211bb0b5826b3584dccdb6a43369e499ad"
dependencies = [
"ash",
"ash-window",
@@ -2110,7 +2109,7 @@ dependencies = [
[[package]]
name = "blade-macros"
version = "0.3.0"
-source = "git+https://github.com/kvark/blade?rev=e0ec4e720957edd51b945b64dd85605ea54bcfe5#e0ec4e720957edd51b945b64dd85605ea54bcfe5"
+source = "git+https://github.com/kvark/blade?rev=416375211bb0b5826b3584dccdb6a43369e499ad#416375211bb0b5826b3584dccdb6a43369e499ad"
dependencies = [
"proc-macro2",
"quote",
@@ -2120,7 +2119,7 @@ dependencies = [
[[package]]
name = "blade-util"
version = "0.2.0"
-source = "git+https://github.com/kvark/blade?rev=e0ec4e720957edd51b945b64dd85605ea54bcfe5#e0ec4e720957edd51b945b64dd85605ea54bcfe5"
+source = "git+https://github.com/kvark/blade?rev=416375211bb0b5826b3584dccdb6a43369e499ad#416375211bb0b5826b3584dccdb6a43369e499ad"
dependencies = [
"blade-graphics",
"bytemuck",
@@ -4133,7 +4132,7 @@ dependencies = [
[[package]]
name = "dap-types"
version = "0.0.1"
-source = "git+https://github.com/zed-industries/dap-types?rev=b40956a7f4d1939da67429d941389ee306a3a308#b40956a7f4d1939da67429d941389ee306a3a308"
+source = "git+https://github.com/zed-industries/dap-types?rev=7f39295b441614ca9dbf44293e53c32f666897f9#7f39295b441614ca9dbf44293e53c32f666897f9"
dependencies = [
"schemars",
"serde",
@@ -4148,6 +4147,8 @@ dependencies = [
"async-trait",
"collections",
"dap",
+ "dotenvy",
+ "fs",
"futures 0.3.31",
"gpui",
"json_dotpath",
@@ -4676,12 +4677,6 @@ dependencies = [
"syn 2.0.101",
]
-[[package]]
-name = "dotenv"
-version = "0.15.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f"
-
[[package]]
name = "dotenvy"
version = "0.15.7"
@@ -4814,6 +4809,7 @@ dependencies = [
"pretty_assertions",
"project",
"rand 0.8.5",
+ "regex",
"release_channel",
"rpc",
"schemars",
@@ -4834,6 +4830,7 @@ dependencies = [
"tree-sitter-python",
"tree-sitter-rust",
"tree-sitter-typescript",
+ "tree-sitter-yaml",
"ui",
"unicode-script",
"unicode-segmentation",
@@ -5114,7 +5111,7 @@ dependencies = [
"collections",
"debug_adapter_extension",
"dirs 4.0.0",
- "dotenv",
+ "dotenvy",
"env_logger 0.11.8",
"extension",
"fs",
@@ -8847,6 +8844,7 @@ dependencies = [
"http_client",
"imara-diff",
"indoc",
+ "inventory",
"itertools 0.14.0",
"log",
"lsp",
@@ -8945,8 +8943,10 @@ dependencies = [
"aws-credential-types",
"aws_http_client",
"bedrock",
+ "chrono",
"client",
"collections",
+ "component",
"copilot",
"credentials_provider",
"deepseek",
@@ -12286,6 +12286,7 @@ dependencies = [
"language",
"log",
"lsp",
+ "markdown",
"node_runtime",
"parking_lot",
"pathdiff",
@@ -14080,12 +14081,13 @@ dependencies = [
[[package]]
name = "schemars"
-version = "0.8.22"
+version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615"
+checksum = "fe8c9d1c68d67dd9f97ecbc6f932b60eb289c5dbddd8aa1405484a8fd2fcd984"
dependencies = [
"dyn-clone",
"indexmap",
+ "ref-cast",
"schemars_derive",
"serde",
"serde_json",
@@ -14093,9 +14095,9 @@ dependencies = [
[[package]]
name = "schemars_derive"
-version = "0.8.22"
+version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d"
+checksum = "6ca9fcb757952f8e8629b9ab066fc62da523c46c2b247b1708a3be06dd82530b"
dependencies = [
"proc-macro2",
"quote",
@@ -14594,16 +14596,29 @@ dependencies = [
name = "settings_ui"
version = "0.1.0"
dependencies = [
+ "anyhow",
+ "collections",
+ "command_palette",
"command_palette_hooks",
+ "component",
+ "db",
"editor",
"feature_flags",
"fs",
+ "fuzzy",
"gpui",
+ "language",
"log",
+ "menu",
+ "paths",
+ "project",
"schemars",
+ "search",
"serde",
"settings",
"theme",
+ "tree-sitter-json",
+ "tree-sitter-rust",
"ui",
"util",
"workspace",
@@ -16037,6 +16052,7 @@ dependencies = [
"futures 0.3.31",
"gpui",
"indexmap",
+ "inventory",
"log",
"palette",
"parking_lot",
@@ -17359,6 +17375,7 @@ dependencies = [
"rand 0.8.5",
"regex",
"rust-embed",
+ "schemars",
"serde",
"serde_json",
"serde_json_lenient",
@@ -19956,7 +19973,7 @@ dependencies = [
[[package]]
name = "zed"
-version = "0.194.0"
+version = "0.195.0"
dependencies = [
"activity_indicator",
"agent",
@@ -20155,9 +20172,9 @@ dependencies = [
[[package]]
name = "zed_llm_client"
-version = "0.8.4"
+version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "de7d9523255f4e00ee3d0918e5407bd252d798a4a8e71f6d37f23317a1588203"
+checksum = "c740e29260b8797ad252c202ea09a255b3cbc13f30faaf92fb6b2490336106e0"
dependencies = [
"anyhow",
"serde",
@@ -427,9 +427,9 @@ aws-smithy-runtime-api = { version = "1.7.4", features = ["http-1x", "client"] }
aws-smithy-types = { version = "1.3.0", features = ["http-body-1-x"] }
base64 = "0.22"
bitflags = "2.6.0"
-blade-graphics = { git = "https://github.com/kvark/blade", rev = "e0ec4e720957edd51b945b64dd85605ea54bcfe5" }
-blade-macros = { git = "https://github.com/kvark/blade", rev = "e0ec4e720957edd51b945b64dd85605ea54bcfe5" }
-blade-util = { git = "https://github.com/kvark/blade", rev = "e0ec4e720957edd51b945b64dd85605ea54bcfe5" }
+blade-graphics = { git = "https://github.com/kvark/blade", rev = "416375211bb0b5826b3584dccdb6a43369e499ad" }
+blade-macros = { git = "https://github.com/kvark/blade", rev = "416375211bb0b5826b3584dccdb6a43369e499ad" }
+blade-util = { git = "https://github.com/kvark/blade", rev = "416375211bb0b5826b3584dccdb6a43369e499ad" }
blake3 = "1.5.3"
bytes = "1.0"
cargo_metadata = "0.19"
@@ -446,12 +446,12 @@ core-video = { version = "0.4.3", features = ["metal"] }
cpal = "0.16"
criterion = { version = "0.5", features = ["html_reports"] }
ctor = "0.4.0"
-dap-types = { git = "https://github.com/zed-industries/dap-types", rev = "b40956a7f4d1939da67429d941389ee306a3a308" }
+dap-types = { git = "https://github.com/zed-industries/dap-types", rev = "7f39295b441614ca9dbf44293e53c32f666897f9" }
dashmap = "6.0"
derive_more = "0.99.17"
dirs = "4.0"
documented = "0.9.1"
-dotenv = "0.15.0"
+dotenvy = "0.15.0"
ec4rs = "1.1"
emojis = "0.6.1"
env_logger = "0.11"
@@ -482,7 +482,7 @@ json_dotpath = "1.1"
jsonschema = "0.30.0"
jsonwebtoken = "9.3"
jupyter-protocol = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
-jupyter-websocket-client = { git = "https://github.com/ConradIrwin/runtimed" ,rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
+jupyter-websocket-client = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
libc = "0.2"
libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
linkify = "0.10.0"
@@ -493,7 +493,7 @@ metal = "0.29"
moka = { version = "0.12.10", features = ["sync"] }
naga = { version = "25.0", features = ["wgsl-in"] }
nanoid = "0.4"
-nbformat = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
+nbformat = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
nix = "0.29"
num-format = "0.4.4"
objc = "0.2"
@@ -533,7 +533,7 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "951c77
"stream",
] }
rsa = "0.9.6"
-runtimelib = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734", default-features = false, features = [
+runtimelib = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734", default-features = false, features = [
"async-dispatcher-runtime",
] }
rust-embed = { version = "8.4", features = ["include-exclude"] }
@@ -542,7 +542,7 @@ rustc-hash = "2.1.0"
rustls = { version = "0.23.26" }
rustls-platform-verifier = "0.5.0"
scap = { git = "https://github.com/zed-industries/scap", rev = "08f0a01417505cc0990b9931a37e5120db92e0d0", default-features = false }
-schemars = { version = "0.8", features = ["impl_json_schema", "indexmap2"] }
+schemars = { version = "1.0", features = ["indexmap2"] }
semver = "1.0"
serde = { version = "1.0", features = ["derive", "rc"] }
serde_derive = { version = "1.0", features = ["deserialize_in_place"] }
@@ -627,7 +627,7 @@ wasmtime = { version = "29", default-features = false, features = [
wasmtime-wasi = "29"
which = "6.0.0"
workspace-hack = "0.1.0"
-zed_llm_client = "0.8.4"
+zed_llm_client = "= 0.8.5"
zstd = "0.11"
[workspace.dependencies.async-stripe]
@@ -34,7 +34,7 @@
"ctrl-q": "zed::Quit",
"f4": "debugger::Start",
"shift-f5": "debugger::Stop",
- "ctrl-shift-f5": "debugger::Restart",
+ "ctrl-shift-f5": "debugger::RerunSession",
"f6": "debugger::Pause",
"f7": "debugger::StepOver",
"ctrl-f11": "debugger::StepInto",
@@ -557,6 +557,13 @@
"ctrl-b": "workspace::ToggleLeftDock",
"ctrl-j": "workspace::ToggleBottomDock",
"ctrl-alt-y": "workspace::CloseAllDocks",
+ "ctrl-alt-0": "workspace::ResetActiveDockSize",
+ // For 0px parameter, uses UI font size value.
+ "ctrl-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }],
+ "ctrl-alt-=": ["workspace::IncreaseActiveDockSize", { "px": 0 }],
+ "ctrl-alt-)": "workspace::ResetOpenDocksSize",
+ "ctrl-alt-_": ["workspace::DecreaseOpenDocksSize", { "px": 0 }],
+ "ctrl-alt-+": ["workspace::IncreaseOpenDocksSize", { "px": 0 }],
"shift-find": "pane::DeploySearch",
"ctrl-shift-f": "pane::DeploySearch",
"ctrl-shift-h": ["pane::DeploySearch", { "replace_enabled": true }],
@@ -598,7 +605,9 @@
// "foo-bar": ["task::Spawn", { "task_name": "MyTask", "reveal_target": "dock" }]
// or by tag:
// "foo-bar": ["task::Spawn", { "task_tag": "MyTag" }],
- "f5": "debugger::RerunLastSession"
+ "f5": "debugger::Rerun",
+ "ctrl-f4": "workspace::CloseActiveDock",
+ "ctrl-w": "workspace::CloseActiveDock"
}
},
{
@@ -701,6 +710,13 @@
"pagedown": "editor::ContextMenuLast"
}
},
+ {
+ "context": "Editor && showing_signature_help && !showing_completions",
+ "bindings": {
+ "up": "editor::SignatureHelpPrevious",
+ "down": "editor::SignatureHelpNext"
+ }
+ },
// Custom bindings
{
"bindings": {
@@ -1067,5 +1083,19 @@
"ctrl-tab": "pane::ActivateNextItem",
"ctrl-shift-tab": "pane::ActivatePreviousItem"
}
+ },
+ {
+ "context": "MarkdownPreview",
+ "bindings": {
+ "pageup": "markdown::MovePageUp",
+ "pagedown": "markdown::MovePageDown"
+ }
+ },
+ {
+ "context": "KeymapEditor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-f": "search::FocusSearch"
+ }
}
]
@@ -5,10 +5,10 @@
"bindings": {
"f4": "debugger::Start",
"shift-f5": "debugger::Stop",
- "shift-cmd-f5": "debugger::Restart",
+ "shift-cmd-f5": "debugger::RerunSession",
"f6": "debugger::Pause",
"f7": "debugger::StepOver",
- "f11": "debugger::StepInto",
+ "ctrl-f11": "debugger::StepInto",
"shift-f11": "debugger::StepOut",
"home": "menu::SelectFirst",
"shift-pageup": "menu::SelectFirst",
@@ -624,6 +624,13 @@
"cmd-r": "workspace::ToggleRightDock",
"cmd-j": "workspace::ToggleBottomDock",
"alt-cmd-y": "workspace::CloseAllDocks",
+ // For 0px parameter, uses UI font size value.
+ "ctrl-alt-0": "workspace::ResetActiveDockSize",
+ "ctrl-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }],
+ "ctrl-alt-=": ["workspace::IncreaseActiveDockSize", { "px": 0 }],
+ "ctrl-alt-)": "workspace::ResetOpenDocksSize",
+ "ctrl-alt-_": ["workspace::DecreaseOpenDocksSize", { "px": 0 }],
+ "ctrl-alt-+": ["workspace::IncreaseOpenDocksSize", { "px": 0 }],
"cmd-shift-f": "pane::DeploySearch",
"cmd-shift-h": ["pane::DeploySearch", { "replace_enabled": true }],
"cmd-shift-t": "pane::ReopenClosedItem",
@@ -652,7 +659,8 @@
"cmd-k shift-up": "workspace::SwapPaneUp",
"cmd-k shift-down": "workspace::SwapPaneDown",
"cmd-shift-x": "zed::Extensions",
- "f5": "debugger::RerunLastSession"
+ "f5": "debugger::Rerun",
+ "cmd-w": "workspace::CloseActiveDock"
}
},
{
@@ -766,6 +774,13 @@
"pagedown": "editor::ContextMenuLast"
}
},
+ {
+ "context": "Editor && showing_signature_help && !showing_completions",
+ "bindings": {
+ "up": "editor::SignatureHelpPrevious",
+ "down": "editor::SignatureHelpNext"
+ }
+ },
// Custom bindings
{
"use_key_equivalents": true,
@@ -1167,5 +1182,19 @@
"ctrl-tab": "pane::ActivateNextItem",
"ctrl-shift-tab": "pane::ActivatePreviousItem"
}
+ },
+ {
+ "context": "MarkdownPreview",
+ "bindings": {
+ "pageup": "markdown::MovePageUp",
+ "pagedown": "markdown::MovePageDown"
+ }
+ },
+ {
+ "context": "KeymapEditor",
+ "use_key_equivalents": true,
+ "bindings": {
+ "cmd-f": "search::FocusSearch"
+ }
}
]
@@ -98,6 +98,13 @@
"ctrl-n": "editor::ContextMenuNext"
}
},
+ {
+ "context": "Editor && showing_signature_help && !showing_completions",
+ "bindings": {
+ "ctrl-p": "editor::SignatureHelpPrevious",
+ "ctrl-n": "editor::SignatureHelpNext"
+ }
+ },
{
"context": "Workspace",
"bindings": {
@@ -98,6 +98,13 @@
"ctrl-n": "editor::ContextMenuNext"
}
},
+ {
+ "context": "Editor && showing_signature_help && !showing_completions",
+ "bindings": {
+ "ctrl-p": "editor::SignatureHelpPrevious",
+ "ctrl-n": "editor::SignatureHelpNext"
+ }
+ },
{
"context": "Workspace",
"bindings": {
@@ -210,7 +210,8 @@
"ctrl-w space": "editor::OpenExcerptsSplit",
"ctrl-w g space": "editor::OpenExcerptsSplit",
"ctrl-6": "pane::AlternateFile",
- "ctrl-^": "pane::AlternateFile"
+ "ctrl-^": "pane::AlternateFile",
+ ".": "vim::Repeat"
}
},
{
@@ -219,7 +220,6 @@
"ctrl-[": "editor::Cancel",
"escape": "editor::Cancel",
":": "command_palette::Toggle",
- ".": "vim::Repeat",
"c": "vim::PushChange",
"shift-c": "vim::ChangeToEndOfLine",
"d": "vim::PushDelete",
@@ -477,6 +477,13 @@
"ctrl-n": "editor::ShowWordCompletions"
}
},
+ {
+ "context": "vim_mode == insert && showing_signature_help && !showing_completions",
+ "bindings": {
+ "ctrl-p": "editor::SignatureHelpPrevious",
+ "ctrl-n": "editor::SignatureHelpNext"
+ }
+ },
{
"context": "vim_mode == replace",
"bindings": {
@@ -849,6 +856,25 @@
"shift-u": "git::UnstageAll"
}
},
+ {
+ "context": "Editor && mode == auto_height && VimControl",
+ "bindings": {
+ // TODO: Implement search
+ "/": null,
+ "?": null,
+ "#": null,
+ "*": null,
+ "n": null,
+ "shift-n": null
+ }
+ },
+ {
+ "context": "GitCommit > Editor && VimControl && vim_mode == normal",
+ "bindings": {
+ "ctrl-c": "menu::Cancel",
+ "escape": "menu::Cancel"
+ }
+ },
{
"context": "Editor && edit_prediction",
"bindings": {
@@ -860,14 +886,7 @@
{
"context": "MessageEditor > Editor && VimControl",
"bindings": {
- "enter": "agent::Chat",
- // TODO: Implement search
- "/": null,
- "?": null,
- "#": null,
- "*": null,
- "n": null,
- "shift-n": null
+ "enter": "agent::Chat"
}
},
{
@@ -617,6 +617,8 @@
// 3. Mark files with errors and warnings:
// "all"
"show_diagnostics": "all",
+ // Whether to stick parent directories at top of the project panel.
+ "sticky_scroll": true,
// Settings related to indent guides in the project panel.
"indent_guides": {
// When to show indent guides in the project panel.
@@ -746,8 +748,6 @@
"default_width": 380
},
"agent": {
- // Version of this setting.
- "version": "2",
// Whether the agent is enabled.
"enabled": true,
/// What completion mode to start new threads in, if available. Can be 'normal' or 'burn'.
@@ -1292,6 +1292,8 @@
// Whether or not selecting text in the terminal will automatically
// copy to the system clipboard.
"copy_on_select": false,
+ // Whether to keep the text selection after copying it to the clipboard
+ "keep_selection_on_copy": false,
// Whether to show the terminal button in the status bar
"button": true,
// Any key-value pairs added to this list will be added to the terminal's
@@ -1656,7 +1658,6 @@
// Different settings for specific language models.
"language_models": {
"anthropic": {
- "version": "1",
"api_url": "https://api.anthropic.com"
},
"google": {
@@ -1666,7 +1667,6 @@
"api_url": "http://localhost:11434"
},
"openai": {
- "version": "1",
"api_url": "https://api.openai.com/v1"
},
"open_router": {
@@ -1784,7 +1784,8 @@
// `socks5h`. `http` will be used when no scheme is specified.
//
// By default no proxy will be used, or Zed will try get proxy settings from
- // environment variables.
+ // environment variables. If certain hosts should not be proxied,
+ // set the `no_proxy` environment variable and provide a comma-separated list.
//
// Examples:
// - "proxy": "socks5h://localhost:10808"
@@ -31,7 +31,13 @@ use workspace::{StatusItemView, Workspace, item::ItemHandle};
const GIT_OPERATION_DELAY: Duration = Duration::from_millis(0);
-actions!(activity_indicator, [ShowErrorMessage]);
+actions!(
+ activity_indicator,
+ [
+ /// Displays error messages from language servers in the status bar.
+ ShowErrorMessage
+ ]
+);
pub enum Event {
ShowStatus {
@@ -1,7 +1,7 @@
use std::sync::Arc;
use agent_settings::{AgentProfileId, AgentProfileSettings, AgentSettings};
-use assistant_tool::{Tool, ToolSource, ToolWorkingSet};
+use assistant_tool::{Tool, ToolSource, ToolWorkingSet, UniqueToolName};
use collections::IndexMap;
use convert_case::{Case, Casing};
use fs::Fs;
@@ -72,7 +72,7 @@ impl AgentProfile {
&self.id
}
- pub fn enabled_tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
+ pub fn enabled_tools(&self, cx: &App) -> Vec<(UniqueToolName, Arc<dyn Tool>)> {
let Some(settings) = AgentSettings::get_global(cx).profiles.get(&self.id) else {
return Vec::new();
};
@@ -81,7 +81,7 @@ impl AgentProfile {
.read(cx)
.tools(cx)
.into_iter()
- .filter(|tool| Self::is_enabled(settings, tool.source(), tool.name()))
+ .filter(|(_, tool)| Self::is_enabled(settings, tool.source(), tool.name()))
.collect()
}
@@ -96,16 +96,11 @@ impl AgentProfile {
fn is_enabled(settings: &AgentProfileSettings, source: ToolSource, name: String) -> bool {
match source {
ToolSource::Native => *settings.tools.get(name.as_str()).unwrap_or(&false),
- ToolSource::ContextServer { id } => {
- if settings.enable_all_context_servers {
- return true;
- }
-
- let Some(preset) = settings.context_servers.get(id.as_ref()) else {
- return false;
- };
- *preset.tools.get(name.as_str()).unwrap_or(&false)
- }
+ ToolSource::ContextServer { id } => settings
+ .context_servers
+ .get(id.as_ref())
+ .and_then(|preset| preset.tools.get(name.as_str()).copied())
+ .unwrap_or(settings.enable_all_context_servers),
}
}
}
@@ -142,7 +137,7 @@ mod tests {
let mut enabled_tools = cx
.read(|cx| profile.enabled_tools(cx))
.into_iter()
- .map(|tool| tool.name())
+ .map(|(_, tool)| tool.name())
.collect::<Vec<_>>();
enabled_tools.sort();
@@ -179,7 +174,7 @@ mod tests {
let mut enabled_tools = cx
.read(|cx| profile.enabled_tools(cx))
.into_iter()
- .map(|tool| tool.name())
+ .map(|(_, tool)| tool.name())
.collect::<Vec<_>>();
enabled_tools.sort();
@@ -212,7 +207,7 @@ mod tests {
let mut enabled_tools = cx
.read(|cx| profile.enabled_tools(cx))
.into_iter()
- .map(|tool| tool.name())
+ .map(|(_, tool)| tool.name())
.collect::<Vec<_>>();
enabled_tools.sort();
@@ -272,10 +267,10 @@ mod tests {
}
fn default_tool_set(cx: &mut TestAppContext) -> Entity<ToolWorkingSet> {
- cx.new(|_| {
+ cx.new(|cx| {
let mut tool_set = ToolWorkingSet::default();
- tool_set.insert(Arc::new(FakeTool::new("enabled_mcp_tool", "mcp")));
- tool_set.insert(Arc::new(FakeTool::new("disabled_mcp_tool", "mcp")));
+ tool_set.insert(Arc::new(FakeTool::new("enabled_mcp_tool", "mcp")), cx);
+ tool_set.insert(Arc::new(FakeTool::new("disabled_mcp_tool", "mcp")), cx);
tool_set
})
}
@@ -13,7 +13,7 @@ use anyhow::{Result, anyhow};
use assistant_tool::{ActionLog, AnyToolCard, Tool, ToolWorkingSet};
use chrono::{DateTime, Utc};
use client::{ModelRequestUsage, RequestUsage};
-use collections::{HashMap, HashSet};
+use collections::HashMap;
use feature_flags::{self, FeatureFlagAppExt};
use futures::{FutureExt, StreamExt as _, future::Shared};
use git::repository::DiffType;
@@ -23,11 +23,10 @@ use gpui::{
};
use language_model::{
ConfiguredModel, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
- LanguageModelId, LanguageModelKnownError, LanguageModelRegistry, LanguageModelRequest,
- LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
- LanguageModelToolResultContent, LanguageModelToolUseId, MessageContent,
- ModelRequestLimitReachedError, PaymentRequiredError, Role, SelectedModel, StopReason,
- TokenUsage,
+ LanguageModelId, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
+ LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolResultContent,
+ LanguageModelToolUseId, MessageContent, ModelRequestLimitReachedError, PaymentRequiredError,
+ Role, SelectedModel, StopReason, TokenUsage,
};
use postage::stream::Stream as _;
use project::{
@@ -961,13 +960,14 @@ impl Thread {
model: Arc<dyn LanguageModel>,
) -> Vec<LanguageModelRequestTool> {
if model.supports_tools() {
- resolve_tool_name_conflicts(self.profile.enabled_tools(cx).as_slice())
+ self.profile
+ .enabled_tools(cx)
.into_iter()
.filter_map(|(name, tool)| {
// Skip tools that cannot be supported
let input_schema = tool.input_schema(model.tool_input_format()).ok()?;
Some(LanguageModelRequestTool {
- name,
+ name: name.into(),
description: tool.description(),
input_schema,
})
@@ -1531,82 +1531,7 @@ impl Thread {
}
thread.update(cx, |thread, cx| {
- let event = match event {
- Ok(event) => event,
- Err(error) => {
- match error {
- LanguageModelCompletionError::RateLimitExceeded { retry_after } => {
- anyhow::bail!(LanguageModelKnownError::RateLimitExceeded { retry_after });
- }
- LanguageModelCompletionError::Overloaded => {
- anyhow::bail!(LanguageModelKnownError::Overloaded);
- }
- LanguageModelCompletionError::ApiInternalServerError =>{
- anyhow::bail!(LanguageModelKnownError::ApiInternalServerError);
- }
- LanguageModelCompletionError::PromptTooLarge { tokens } => {
- let tokens = tokens.unwrap_or_else(|| {
- // We didn't get an exact token count from the API, so fall back on our estimate.
- thread.total_token_usage()
- .map(|usage| usage.total)
- .unwrap_or(0)
- // We know the context window was exceeded in practice, so if our estimate was
- // lower than max tokens, the estimate was wrong; return that we exceeded by 1.
- .max(model.max_token_count().saturating_add(1))
- });
-
- anyhow::bail!(LanguageModelKnownError::ContextWindowLimitExceeded { tokens })
- }
- LanguageModelCompletionError::ApiReadResponseError(io_error) => {
- anyhow::bail!(LanguageModelKnownError::ReadResponseError(io_error));
- }
- LanguageModelCompletionError::UnknownResponseFormat(error) => {
- anyhow::bail!(LanguageModelKnownError::UnknownResponseFormat(error));
- }
- LanguageModelCompletionError::HttpResponseError { status, ref body } => {
- if let Some(known_error) = LanguageModelKnownError::from_http_response(status, body) {
- anyhow::bail!(known_error);
- } else {
- return Err(error.into());
- }
- }
- LanguageModelCompletionError::DeserializeResponse(error) => {
- anyhow::bail!(LanguageModelKnownError::DeserializeResponse(error));
- }
- LanguageModelCompletionError::BadInputJson {
- id,
- tool_name,
- raw_input: invalid_input_json,
- json_parse_error,
- } => {
- thread.receive_invalid_tool_json(
- id,
- tool_name,
- invalid_input_json,
- json_parse_error,
- window,
- cx,
- );
- return Ok(());
- }
- // These are all errors we can't automatically attempt to recover from (e.g. by retrying)
- err @ LanguageModelCompletionError::BadRequestFormat |
- err @ LanguageModelCompletionError::AuthenticationError |
- err @ LanguageModelCompletionError::PermissionError |
- err @ LanguageModelCompletionError::ApiEndpointNotFound |
- err @ LanguageModelCompletionError::SerializeRequest(_) |
- err @ LanguageModelCompletionError::BuildRequestBody(_) |
- err @ LanguageModelCompletionError::HttpSend(_) => {
- anyhow::bail!(err);
- }
- LanguageModelCompletionError::Other(error) => {
- return Err(error);
- }
- }
- }
- };
-
- match event {
+ match event? {
LanguageModelCompletionEvent::StartMessage { .. } => {
request_assistant_message_id =
Some(thread.insert_assistant_message(
@@ -1683,9 +1608,7 @@ impl Thread {
};
}
}
- LanguageModelCompletionEvent::RedactedThinking {
- data
- } => {
+ LanguageModelCompletionEvent::RedactedThinking { data } => {
thread.received_chunk();
if let Some(last_message) = thread.messages.last_mut() {
@@ -1734,6 +1657,21 @@ impl Thread {
});
}
}
+ LanguageModelCompletionEvent::ToolUseJsonParseError {
+ id,
+ tool_name,
+ raw_input: invalid_input_json,
+ json_parse_error,
+ } => {
+ thread.receive_invalid_tool_json(
+ id,
+ tool_name,
+ invalid_input_json,
+ json_parse_error,
+ window,
+ cx,
+ );
+ }
LanguageModelCompletionEvent::StatusUpdate(status_update) => {
if let Some(completion) = thread
.pending_completions
@@ -1741,23 +1679,34 @@ impl Thread {
.find(|completion| completion.id == pending_completion_id)
{
match status_update {
- CompletionRequestStatus::Queued {
- position,
- } => {
- completion.queue_state = QueueState::Queued { position };
+ CompletionRequestStatus::Queued { position } => {
+ completion.queue_state =
+ QueueState::Queued { position };
}
CompletionRequestStatus::Started => {
- completion.queue_state = QueueState::Started;
+ completion.queue_state = QueueState::Started;
}
CompletionRequestStatus::Failed {
- code, message, request_id
+ code,
+ message,
+ request_id: _,
+ retry_after,
} => {
- anyhow::bail!("completion request failed. request_id: {request_id}, code: {code}, message: {message}");
+ return Err(
+ LanguageModelCompletionError::from_cloud_failure(
+ model.upstream_provider_name(),
+ code,
+ message,
+ retry_after.map(Duration::from_secs_f64),
+ ),
+ );
}
- CompletionRequestStatus::UsageUpdated {
- amount, limit
- } => {
- thread.update_model_request_usage(amount as u32, limit, cx);
+ CompletionRequestStatus::UsageUpdated { amount, limit } => {
+ thread.update_model_request_usage(
+ amount as u32,
+ limit,
+ cx,
+ );
}
CompletionRequestStatus::ToolUseLimitReached => {
thread.tool_use_limit_reached = true;
@@ -1808,10 +1757,11 @@ impl Thread {
Ok(stop_reason) => {
match stop_reason {
StopReason::ToolUse => {
- let tool_uses = thread.use_pending_tools(window, model.clone(), cx);
+ let tool_uses =
+ thread.use_pending_tools(window, model.clone(), cx);
cx.emit(ThreadEvent::UsePendingTools { tool_uses });
}
- StopReason::EndTurn | StopReason::MaxTokens => {
+ StopReason::EndTurn | StopReason::MaxTokens => {
thread.project.update(cx, |project, cx| {
project.set_agent_location(None, cx);
});
@@ -1827,7 +1777,9 @@ impl Thread {
{
let mut messages_to_remove = Vec::new();
- for (ix, message) in thread.messages.iter().enumerate().rev() {
+ for (ix, message) in
+ thread.messages.iter().enumerate().rev()
+ {
messages_to_remove.push(message.id);
if message.role == Role::User {
@@ -1835,7 +1787,9 @@ impl Thread {
break;
}
- if let Some(prev_message) = thread.messages.get(ix - 1) {
+ if let Some(prev_message) =
+ thread.messages.get(ix - 1)
+ {
if prev_message.role == Role::Assistant {
break;
}
@@ -1850,14 +1804,16 @@ impl Thread {
cx.emit(ThreadEvent::ShowError(ThreadError::Message {
header: "Language model refusal".into(),
- message: "Model refused to generate content for safety reasons.".into(),
+ message:
+ "Model refused to generate content for safety reasons."
+ .into(),
}));
}
}
// We successfully completed, so cancel any remaining retries.
thread.retry_state = None;
- },
+ }
Err(error) => {
thread.project.update(cx, |project, cx| {
project.set_agent_location(None, cx);
@@ -1883,26 +1839,38 @@ impl Thread {
cx.emit(ThreadEvent::ShowError(
ThreadError::ModelRequestLimitReached { plan: error.plan },
));
- } else if let Some(known_error) =
- error.downcast_ref::<LanguageModelKnownError>()
+ } else if let Some(completion_error) =
+ error.downcast_ref::<LanguageModelCompletionError>()
{
- match known_error {
- LanguageModelKnownError::ContextWindowLimitExceeded { tokens } => {
+ use LanguageModelCompletionError::*;
+ match &completion_error {
+ PromptTooLarge { tokens, .. } => {
+ let tokens = tokens.unwrap_or_else(|| {
+ // We didn't get an exact token count from the API, so fall back on our estimate.
+ thread
+ .total_token_usage()
+ .map(|usage| usage.total)
+ .unwrap_or(0)
+ // We know the context window was exceeded in practice, so if our estimate was
+ // lower than max tokens, the estimate was wrong; return that we exceeded by 1.
+ .max(model.max_token_count().saturating_add(1))
+ });
thread.exceeded_window_error = Some(ExceededWindowError {
model_id: model.id(),
- token_count: *tokens,
+ token_count: tokens,
});
cx.notify();
}
- LanguageModelKnownError::RateLimitExceeded { retry_after } => {
- let provider_name = model.provider_name();
- let error_message = format!(
- "{}'s API rate limit exceeded",
- provider_name.0.as_ref()
- );
-
+ RateLimitExceeded {
+ retry_after: Some(retry_after),
+ ..
+ }
+ | ServerOverloaded {
+ retry_after: Some(retry_after),
+ ..
+ } => {
thread.handle_rate_limit_error(
- &error_message,
+ &completion_error,
*retry_after,
model.clone(),
intent,
@@ -1911,15 +1879,9 @@ impl Thread {
);
retry_scheduled = true;
}
- LanguageModelKnownError::Overloaded => {
- let provider_name = model.provider_name();
- let error_message = format!(
- "{}'s API servers are overloaded right now",
- provider_name.0.as_ref()
- );
-
+ RateLimitExceeded { .. } | ServerOverloaded { .. } => {
retry_scheduled = thread.handle_retryable_error(
- &error_message,
+ &completion_error,
model.clone(),
intent,
window,
@@ -1929,15 +1891,11 @@ impl Thread {
emit_generic_error(error, cx);
}
}
- LanguageModelKnownError::ApiInternalServerError => {
- let provider_name = model.provider_name();
- let error_message = format!(
- "{}'s API server reported an internal server error",
- provider_name.0.as_ref()
- );
-
+ ApiInternalServerError { .. }
+ | ApiReadResponseError { .. }
+ | HttpSend { .. } => {
retry_scheduled = thread.handle_retryable_error(
- &error_message,
+ &completion_error,
model.clone(),
intent,
window,
@@ -1947,12 +1905,16 @@ impl Thread {
emit_generic_error(error, cx);
}
}
- LanguageModelKnownError::ReadResponseError(_) |
- LanguageModelKnownError::DeserializeResponse(_) |
- LanguageModelKnownError::UnknownResponseFormat(_) => {
- // In the future we will attempt to re-roll response, but only once
- emit_generic_error(error, cx);
- }
+ NoApiKey { .. }
+ | HttpResponseError { .. }
+ | BadRequestFormat { .. }
+ | AuthenticationError { .. }
+ | PermissionError { .. }
+ | ApiEndpointNotFound { .. }
+ | SerializeRequest { .. }
+ | BuildRequestBody { .. }
+ | DeserializeResponse { .. }
+ | Other { .. } => emit_generic_error(error, cx),
}
} else {
emit_generic_error(error, cx);
@@ -2084,7 +2046,7 @@ impl Thread {
fn handle_rate_limit_error(
&mut self,
- error_message: &str,
+ error: &LanguageModelCompletionError,
retry_after: Duration,
model: Arc<dyn LanguageModel>,
intent: CompletionIntent,
@@ -2092,9 +2054,10 @@ impl Thread {
cx: &mut Context<Self>,
) {
// For rate limit errors, we only retry once with the specified duration
- let retry_message = format!(
- "{error_message}. Retrying in {} seconds…",
- retry_after.as_secs()
+ let retry_message = format!("{error}. Retrying in {} seconds…", retry_after.as_secs());
+ log::warn!(
+ "Retrying completion request in {} seconds: {error:?}",
+ retry_after.as_secs(),
);
// Add a UI-only message instead of a regular message
@@ -2127,18 +2090,18 @@ impl Thread {
fn handle_retryable_error(
&mut self,
- error_message: &str,
+ error: &LanguageModelCompletionError,
model: Arc<dyn LanguageModel>,
intent: CompletionIntent,
window: Option<AnyWindowHandle>,
cx: &mut Context<Self>,
) -> bool {
- self.handle_retryable_error_with_delay(error_message, None, model, intent, window, cx)
+ self.handle_retryable_error_with_delay(error, None, model, intent, window, cx)
}
fn handle_retryable_error_with_delay(
&mut self,
- error_message: &str,
+ error: &LanguageModelCompletionError,
custom_delay: Option<Duration>,
model: Arc<dyn LanguageModel>,
intent: CompletionIntent,
@@ -2168,8 +2131,12 @@ impl Thread {
// Add a transient message to inform the user
let delay_secs = delay.as_secs();
let retry_message = format!(
- "{}. Retrying (attempt {} of {}) in {} seconds...",
- error_message, attempt, max_attempts, delay_secs
+ "{error}. Retrying (attempt {attempt} of {max_attempts}) \
+ in {delay_secs} seconds..."
+ );
+ log::warn!(
+ "Retrying completion request (attempt {attempt} of {max_attempts}) \
+ in {delay_secs} seconds: {error:?}",
);
// Add a UI-only message instead of a regular message
@@ -2420,7 +2387,7 @@ impl Thread {
let tool_list = available_tools
.iter()
- .map(|tool| format!("- {}: {}", tool.name(), tool.description()))
+ .map(|(name, tool)| format!("- {}: {}", name, tool.description()))
.collect::<Vec<_>>()
.join("\n");
@@ -2640,7 +2607,7 @@ impl Thread {
.profile
.enabled_tools(cx)
.iter()
- .map(|tool| tool.name())
+ .map(|(name, _)| name.clone().into())
.collect();
self.message_feedback.insert(message_id, feedback);
@@ -3178,85 +3145,6 @@ struct PendingCompletion {
_task: Task<()>,
}
-/// Resolves tool name conflicts by ensuring all tool names are unique.
-///
-/// When multiple tools have the same name, this function applies the following rules:
-/// 1. Native tools always keep their original name
-/// 2. Context server tools get prefixed with their server ID and an underscore
-/// 3. All tool names are truncated to MAX_TOOL_NAME_LENGTH (64 characters)
-/// 4. If conflicts still exist after prefixing, the conflicting tools are filtered out
-///
-/// Note: This function assumes that built-in tools occur before MCP tools in the tools list.
-fn resolve_tool_name_conflicts(tools: &[Arc<dyn Tool>]) -> Vec<(String, Arc<dyn Tool>)> {
- fn resolve_tool_name(tool: &Arc<dyn Tool>) -> String {
- let mut tool_name = tool.name();
- tool_name.truncate(MAX_TOOL_NAME_LENGTH);
- tool_name
- }
-
- const MAX_TOOL_NAME_LENGTH: usize = 64;
-
- let mut duplicated_tool_names = HashSet::default();
- let mut seen_tool_names = HashSet::default();
- for tool in tools {
- let tool_name = resolve_tool_name(tool);
- if seen_tool_names.contains(&tool_name) {
- debug_assert!(
- tool.source() != assistant_tool::ToolSource::Native,
- "There are two built-in tools with the same name: {}",
- tool_name
- );
- duplicated_tool_names.insert(tool_name);
- } else {
- seen_tool_names.insert(tool_name);
- }
- }
-
- if duplicated_tool_names.is_empty() {
- return tools
- .into_iter()
- .map(|tool| (resolve_tool_name(tool), tool.clone()))
- .collect();
- }
-
- tools
- .into_iter()
- .filter_map(|tool| {
- let mut tool_name = resolve_tool_name(tool);
- if !duplicated_tool_names.contains(&tool_name) {
- return Some((tool_name, tool.clone()));
- }
- match tool.source() {
- assistant_tool::ToolSource::Native => {
- // Built-in tools always keep their original name
- Some((tool_name, tool.clone()))
- }
- assistant_tool::ToolSource::ContextServer { id } => {
- // Context server tools are prefixed with the context server ID, and truncated if necessary
- tool_name.insert(0, '_');
- if tool_name.len() + id.len() > MAX_TOOL_NAME_LENGTH {
- let len = MAX_TOOL_NAME_LENGTH - tool_name.len();
- let mut id = id.to_string();
- id.truncate(len);
- tool_name.insert_str(0, &id);
- } else {
- tool_name.insert_str(0, &id);
- }
-
- tool_name.truncate(MAX_TOOL_NAME_LENGTH);
-
- if seen_tool_names.contains(&tool_name) {
- log::error!("Cannot resolve tool name conflict for tool {}", tool.name());
- None
- } else {
- Some((tool_name, tool.clone()))
- }
- }
- }
- })
- .collect()
-}
-
#[cfg(test)]
mod tests {
use super::*;
@@ -3272,7 +3160,6 @@ mod tests {
use futures::future::BoxFuture;
use futures::stream::BoxStream;
use gpui::TestAppContext;
- use icons::IconName;
use language_model::fake_provider::{FakeLanguageModel, FakeLanguageModelProvider};
use language_model::{
LanguageModelCompletionError, LanguageModelName, LanguageModelProviderId,
@@ -3917,148 +3804,6 @@ fn main() {{
});
}
- #[gpui::test]
- fn test_resolve_tool_name_conflicts() {
- use assistant_tool::{Tool, ToolSource};
-
- assert_resolve_tool_name_conflicts(
- vec![
- TestTool::new("tool1", ToolSource::Native),
- TestTool::new("tool2", ToolSource::Native),
- TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-1".into() }),
- ],
- vec!["tool1", "tool2", "tool3"],
- );
-
- assert_resolve_tool_name_conflicts(
- vec![
- TestTool::new("tool1", ToolSource::Native),
- TestTool::new("tool2", ToolSource::Native),
- TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-1".into() }),
- TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-2".into() }),
- ],
- vec!["tool1", "tool2", "mcp-1_tool3", "mcp-2_tool3"],
- );
-
- assert_resolve_tool_name_conflicts(
- vec![
- TestTool::new("tool1", ToolSource::Native),
- TestTool::new("tool2", ToolSource::Native),
- TestTool::new("tool3", ToolSource::Native),
- TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-1".into() }),
- TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-2".into() }),
- ],
- vec!["tool1", "tool2", "tool3", "mcp-1_tool3", "mcp-2_tool3"],
- );
-
- // Test that tool with very long name is always truncated
- assert_resolve_tool_name_conflicts(
- vec![TestTool::new(
- "tool-with-more-then-64-characters-blah-blah-blah-blah-blah-blah-blah-blah",
- ToolSource::Native,
- )],
- vec!["tool-with-more-then-64-characters-blah-blah-blah-blah-blah-blah-"],
- );
-
- // Test deduplication of tools with very long names, in this case the mcp server name should be truncated
- assert_resolve_tool_name_conflicts(
- vec![
- TestTool::new("tool-with-very-very-very-long-name", ToolSource::Native),
- TestTool::new(
- "tool-with-very-very-very-long-name",
- ToolSource::ContextServer {
- id: "mcp-with-very-very-very-long-name".into(),
- },
- ),
- ],
- vec![
- "tool-with-very-very-very-long-name",
- "mcp-with-very-very-very-long-_tool-with-very-very-very-long-name",
- ],
- );
-
- fn assert_resolve_tool_name_conflicts(
- tools: Vec<TestTool>,
- expected: Vec<impl Into<String>>,
- ) {
- let tools: Vec<Arc<dyn Tool>> = tools
- .into_iter()
- .map(|t| Arc::new(t) as Arc<dyn Tool>)
- .collect();
- let tools = resolve_tool_name_conflicts(&tools);
- assert_eq!(tools.len(), expected.len());
- for (i, expected_name) in expected.into_iter().enumerate() {
- let expected_name = expected_name.into();
- let actual_name = &tools[i].0;
- assert_eq!(
- actual_name, &expected_name,
- "Expected '{}' got '{}' at index {}",
- expected_name, actual_name, i
- );
- }
- }
-
- struct TestTool {
- name: String,
- source: ToolSource,
- }
-
- impl TestTool {
- fn new(name: impl Into<String>, source: ToolSource) -> Self {
- Self {
- name: name.into(),
- source,
- }
- }
- }
-
- impl Tool for TestTool {
- fn name(&self) -> String {
- self.name.clone()
- }
-
- fn icon(&self) -> IconName {
- IconName::Ai
- }
-
- fn may_perform_edits(&self) -> bool {
- false
- }
-
- fn needs_confirmation(&self, _input: &serde_json::Value, _cx: &App) -> bool {
- true
- }
-
- fn source(&self) -> ToolSource {
- self.source.clone()
- }
-
- fn description(&self) -> String {
- "Test tool".to_string()
- }
-
- fn ui_text(&self, _input: &serde_json::Value) -> String {
- "Test tool".to_string()
- }
-
- fn run(
- self: Arc<Self>,
- _input: serde_json::Value,
- _request: Arc<LanguageModelRequest>,
- _project: Entity<Project>,
- _action_log: Entity<ActionLog>,
- _model: Arc<dyn LanguageModel>,
- _window: Option<AnyWindowHandle>,
- _cx: &mut App,
- ) -> assistant_tool::ToolResult {
- assistant_tool::ToolResult {
- output: Task::ready(Err(anyhow::anyhow!("No content"))),
- card: None,
- }
- }
- }
- }
-
// Helper to create a model that returns errors
enum TestError {
Overloaded,
@@ -4139,9 +3884,15 @@ fn main() {{
>,
> {
let error = match self.error_type {
- TestError::Overloaded => LanguageModelCompletionError::Overloaded,
+ TestError::Overloaded => LanguageModelCompletionError::ServerOverloaded {
+ provider: self.provider_name(),
+ retry_after: None,
+ },
TestError::InternalServerError => {
- LanguageModelCompletionError::ApiInternalServerError
+ LanguageModelCompletionError::ApiInternalServerError {
+ provider: self.provider_name(),
+ message: "I'm a teapot orbiting the sun".to_string(),
+ }
}
};
async move {
@@ -4649,9 +4400,13 @@ fn main() {{
> {
if !*self.failed_once.lock() {
*self.failed_once.lock() = true;
+ let provider = self.provider_name();
// Return error on first attempt
let stream = futures::stream::once(async move {
- Err(LanguageModelCompletionError::Overloaded)
+ Err(LanguageModelCompletionError::ServerOverloaded {
+ provider,
+ retry_after: None,
+ })
});
async move { Ok(stream.boxed()) }.boxed()
} else {
@@ -4814,9 +4569,13 @@ fn main() {{
> {
if !*self.failed_once.lock() {
*self.failed_once.lock() = true;
+ let provider = self.provider_name();
// Return error on first attempt
let stream = futures::stream::once(async move {
- Err(LanguageModelCompletionError::Overloaded)
+ Err(LanguageModelCompletionError::ServerOverloaded {
+ provider,
+ retry_after: None,
+ })
});
async move { Ok(stream.boxed()) }.boxed()
} else {
@@ -4969,10 +4728,12 @@ fn main() {{
LanguageModelCompletionError,
>,
> {
+ let provider = self.provider_name();
async move {
let stream = futures::stream::once(async move {
Err(LanguageModelCompletionError::RateLimitExceeded {
- retry_after: Duration::from_secs(TEST_RATE_LIMIT_RETRY_SECS),
+ provider,
+ retry_after: Some(Duration::from_secs(TEST_RATE_LIMIT_RETRY_SECS)),
})
});
Ok(stream.boxed())
@@ -6,7 +6,7 @@ use crate::{
};
use agent_settings::{AgentProfileId, CompletionMode};
use anyhow::{Context as _, Result, anyhow};
-use assistant_tool::{ToolId, ToolWorkingSet};
+use assistant_tool::{Tool, ToolId, ToolWorkingSet};
use chrono::{DateTime, Utc};
use collections::HashMap;
use context_server::ContextServerId;
@@ -537,8 +537,8 @@ impl ThreadStore {
}
ContextServerStatus::Stopped | ContextServerStatus::Error(_) => {
if let Some(tool_ids) = self.context_server_tool_ids.remove(server_id) {
- tool_working_set.update(cx, |tool_working_set, _| {
- tool_working_set.remove(&tool_ids);
+ tool_working_set.update(cx, |tool_working_set, cx| {
+ tool_working_set.remove(&tool_ids, cx);
});
}
}
@@ -569,19 +569,17 @@ impl ThreadStore {
.log_err()
{
let tool_ids = tool_working_set
- .update(cx, |tool_working_set, _| {
- response
- .tools
- .into_iter()
- .map(|tool| {
- log::info!("registering context server tool: {:?}", tool.name);
- tool_working_set.insert(Arc::new(ContextServerTool::new(
+ .update(cx, |tool_working_set, cx| {
+ tool_working_set.extend(
+ response.tools.into_iter().map(|tool| {
+ Arc::new(ContextServerTool::new(
context_server_store.clone(),
server.id(),
tool,
- )))
- })
- .collect::<Vec<_>>()
+ )) as Arc<dyn Tool>
+ }),
+ cx,
+ )
})
.log_err();
@@ -6,9 +6,10 @@ use anyhow::{Result, bail};
use collections::IndexMap;
use gpui::{App, Pixels, SharedString};
use language_model::LanguageModel;
-use schemars::{JsonSchema, schema::Schema};
+use schemars::{JsonSchema, json_schema};
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsSources};
+use std::borrow::Cow;
pub use crate::agent_profile::*;
@@ -49,7 +50,7 @@ pub struct AgentSettings {
pub dock: AgentDockPosition,
pub default_width: Pixels,
pub default_height: Pixels,
- pub default_model: LanguageModelSelection,
+ pub default_model: Option<LanguageModelSelection>,
pub inline_assistant_model: Option<LanguageModelSelection>,
pub commit_message_model: Option<LanguageModelSelection>,
pub thread_summary_model: Option<LanguageModelSelection>,
@@ -211,7 +212,6 @@ impl AgentSettingsContent {
}
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
-#[schemars(deny_unknown_fields)]
pub struct AgentSettingsContent {
/// Whether the Agent is enabled.
///
@@ -321,29 +321,27 @@ pub struct LanguageModelSelection {
pub struct LanguageModelProviderSetting(pub String);
impl JsonSchema for LanguageModelProviderSetting {
- fn schema_name() -> String {
+ fn schema_name() -> Cow<'static, str> {
"LanguageModelProviderSetting".into()
}
- fn json_schema(_: &mut schemars::r#gen::SchemaGenerator) -> Schema {
- schemars::schema::SchemaObject {
- enum_values: Some(vec![
- "anthropic".into(),
- "amazon-bedrock".into(),
- "google".into(),
- "lmstudio".into(),
- "ollama".into(),
- "openai".into(),
- "zed.dev".into(),
- "copilot_chat".into(),
- "deepseek".into(),
- "openrouter".into(),
- "mistral".into(),
- "vercel".into(),
- ]),
- ..Default::default()
- }
- .into()
+ fn json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema {
+ json_schema!({
+ "enum": [
+ "anthropic",
+ "amazon-bedrock",
+ "google",
+ "lmstudio",
+ "ollama",
+ "openai",
+ "zed.dev",
+ "copilot_chat",
+ "deepseek",
+ "openrouter",
+ "mistral",
+ "vercel"
+ ]
+ })
}
}
@@ -359,15 +357,6 @@ impl From<&str> for LanguageModelProviderSetting {
}
}
-impl Default for LanguageModelSelection {
- fn default() -> Self {
- Self {
- provider: LanguageModelProviderSetting("openai".to_string()),
- model: "gpt-4".to_string(),
- }
- }
-}
-
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
pub struct AgentProfileContent {
pub name: Arc<str>,
@@ -411,7 +400,10 @@ impl Settings for AgentSettings {
&mut settings.default_height,
value.default_height.map(Into::into),
);
- merge(&mut settings.default_model, value.default_model.clone());
+ settings.default_model = value
+ .default_model
+ .clone()
+ .or(settings.default_model.take());
settings.inline_assistant_model = value
.inline_assistant_model
.clone()
@@ -1,9 +1,7 @@
use crate::context_picker::{ContextPicker, MentionLink};
use crate::context_strip::{ContextStrip, ContextStripEvent, SuggestContextKind};
use crate::message_editor::{extract_message_creases, insert_message_creases};
-use crate::ui::{
- AddedContext, AgentNotification, AgentNotificationEvent, AnimatedLabel, ContextPill,
-};
+use crate::ui::{AddedContext, AgentNotification, AgentNotificationEvent, ContextPill};
use crate::{AgentPanel, ModelUsageContext};
use agent::{
ContextStore, LastRestoreCheckpoint, MessageCrease, MessageId, MessageSegment, TextThreadStore,
@@ -47,8 +45,8 @@ use std::time::Duration;
use text::ToPoint;
use theme::ThemeSettings;
use ui::{
- Disclosure, KeyBinding, PopoverMenuHandle, Scrollbar, ScrollbarState, TextSize, Tooltip,
- prelude::*,
+ Banner, Disclosure, KeyBinding, PopoverMenuHandle, Scrollbar, ScrollbarState, TextSize,
+ Tooltip, prelude::*,
};
use util::ResultExt as _;
use util::markdown::MarkdownCodeBlock;
@@ -58,6 +56,7 @@ use zed_llm_client::CompletionIntent;
const CODEBLOCK_CONTAINER_GROUP: &str = "codeblock_container";
const EDIT_PREVIOUS_MESSAGE_MIN_LINES: usize = 1;
+const RESPONSE_PADDING_X: Pixels = px(19.);
pub struct ActiveThread {
context_store: Entity<ContextStore>,
@@ -1025,6 +1024,7 @@ impl ActiveThread {
}
}
ThreadEvent::MessageAdded(message_id) => {
+ self.clear_last_error();
if let Some(rendered_message) = self.thread.update(cx, |thread, cx| {
thread.message(*message_id).map(|message| {
RenderedMessage::from_segments(
@@ -1041,6 +1041,7 @@ impl ActiveThread {
cx.notify();
}
ThreadEvent::MessageEdited(message_id) => {
+ self.clear_last_error();
if let Some(index) = self.messages.iter().position(|id| id == message_id) {
if let Some(rendered_message) = self.thread.update(cx, |thread, cx| {
thread.message(*message_id).map(|message| {
@@ -1817,7 +1818,7 @@ impl ActiveThread {
.my_3()
.mx_5()
.when(is_generating_stale || message.is_hidden, |this| {
- this.child(AnimatedLabel::new("").size(LabelSize::Small))
+ this.child(LoadingLabel::new("").size(LabelSize::Small))
})
});
@@ -1874,9 +1875,6 @@ impl ActiveThread {
this.scroll_to_top(cx);
}));
- // For all items that should be aligned with the LLM's response.
- const RESPONSE_PADDING_X: Pixels = px(19.);
-
let show_feedback = thread.is_turn_end(ix);
let feedback_container = h_flex()
.group("feedback_container")
@@ -2537,34 +2535,18 @@ impl ActiveThread {
ix: usize,
cx: &mut Context<Self>,
) -> Stateful<Div> {
- let colors = cx.theme().colors();
- div().id(("message-container", ix)).py_1().px_2().child(
- v_flex()
- .w_full()
- .bg(colors.editor_background)
- .rounded_sm()
- .child(
- h_flex()
- .w_full()
- .p_2()
- .gap_2()
- .child(
- div().flex_none().child(
- Icon::new(IconName::Warning)
- .size(IconSize::Small)
- .color(Color::Warning),
- ),
- )
- .child(
- v_flex()
- .flex_1()
- .min_w_0()
- .text_size(TextSize::Small.rems(cx))
- .text_color(cx.theme().colors().text_muted)
- .children(message_content),
- ),
- ),
- )
+ let message = div()
+ .flex_1()
+ .min_w_0()
+ .text_size(TextSize::XSmall.rems(cx))
+ .text_color(cx.theme().colors().text_muted)
+ .children(message_content);
+
+ div()
+ .id(("message-container", ix))
+ .py_1()
+ .px_2p5()
+ .child(Banner::new().severity(ui::Severity::Warning).child(message))
}
fn render_message_thinking_segment(
@@ -2602,7 +2584,7 @@ impl ActiveThread {
.size(IconSize::XSmall)
.color(Color::Muted),
)
- .child(AnimatedLabel::new("Thinking").size(LabelSize::Small)),
+ .child(LoadingLabel::new("Thinking").size(LabelSize::Small)),
)
.child(
h_flex()
@@ -3171,7 +3153,7 @@ impl ActiveThread {
.border_color(self.tool_card_border_color(cx))
.rounded_b_lg()
.child(
- AnimatedLabel::new("Waiting for Confirmation").size(LabelSize::Small)
+ LoadingLabel::new("Waiting for Confirmation").size(LabelSize::Small)
)
.child(
h_flex()
@@ -16,7 +16,9 @@ use gpui::{
Focusable, ScrollHandle, Subscription, Task, Transformation, WeakEntity, percentage,
};
use language::LanguageRegistry;
-use language_model::{LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry};
+use language_model::{
+ LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry, ZED_CLOUD_PROVIDER_ID,
+};
use notifications::status_toast::{StatusToast, ToastIcon};
use project::{
context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore},
@@ -24,8 +26,8 @@ use project::{
};
use settings::{Settings, update_settings_file};
use ui::{
- ContextMenu, Disclosure, ElevationIndex, Indicator, PopoverMenu, Scrollbar, ScrollbarState,
- Switch, SwitchColor, Tooltip, prelude::*,
+ ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex, Indicator, PopoverMenu,
+ Scrollbar, ScrollbarState, Switch, SwitchColor, Tooltip, prelude::*,
};
use util::ResultExt as _;
use workspace::Workspace;
@@ -86,6 +88,14 @@ impl AgentConfiguration {
let scroll_handle = ScrollHandle::new();
let scrollbar_state = ScrollbarState::new(scroll_handle.clone());
+ let mut expanded_provider_configurations = HashMap::default();
+ if LanguageModelRegistry::read_global(cx)
+ .provider(&ZED_CLOUD_PROVIDER_ID)
+ .map_or(false, |cloud_provider| cloud_provider.must_accept_terms(cx))
+ {
+ expanded_provider_configurations.insert(ZED_CLOUD_PROVIDER_ID, true);
+ }
+
let mut this = Self {
fs,
language_registry,
@@ -94,7 +104,7 @@ impl AgentConfiguration {
configuration_views_by_provider: HashMap::default(),
context_server_store,
expanded_context_server_tools: HashMap::default(),
- expanded_provider_configurations: HashMap::default(),
+ expanded_provider_configurations,
tools,
_registry_subscription: registry_subscription,
scroll_handle,
@@ -162,19 +172,29 @@ impl AgentConfiguration {
.unwrap_or(false);
v_flex()
- .py_2()
- .gap_1p5()
- .border_t_1()
- .border_color(cx.theme().colors().border.opacity(0.6))
+ .when(is_expanded, |this| this.mb_2())
+ .child(
+ div()
+ .opacity(0.6)
+ .px_2()
+ .child(Divider::horizontal().color(DividerColor::Border)),
+ )
.child(
h_flex()
+ .map(|this| {
+ if is_expanded {
+ this.mt_2().mb_1()
+ } else {
+ this.my_2()
+ }
+ })
.w_full()
- .gap_1()
.justify_between()
.child(
h_flex()
.id(provider_id_string.clone())
.cursor_pointer()
+ .px_2()
.py_0p5()
.w_full()
.justify_between()
@@ -237,12 +257,16 @@ impl AgentConfiguration {
)
}),
)
- .when(is_expanded, |parent| match configuration_view {
- Some(configuration_view) => parent.child(configuration_view),
- None => parent.child(Label::new(format!(
- "No configuration view for {provider_name}",
- ))),
- })
+ .child(
+ div()
+ .px_2()
+ .when(is_expanded, |parent| match configuration_view {
+ Some(configuration_view) => parent.child(configuration_view),
+ None => parent.child(Label::new(format!(
+ "No configuration view for {provider_name}",
+ ))),
+ }),
+ )
}
fn render_provider_configuration_section(
@@ -252,12 +276,11 @@ impl AgentConfiguration {
let providers = LanguageModelRegistry::read_global(cx).providers();
v_flex()
- .p(DynamicSpacing::Base16.rems(cx))
- .pr(DynamicSpacing::Base20.rems(cx))
- .border_b_1()
- .border_color(cx.theme().colors().border)
.child(
v_flex()
+ .p(DynamicSpacing::Base16.rems(cx))
+ .pr(DynamicSpacing::Base20.rems(cx))
+ .pb_0()
.mb_2p5()
.gap_0p5()
.child(Headline::new("LLM Providers"))
@@ -266,10 +289,15 @@ impl AgentConfiguration {
.color(Color::Muted),
),
)
- .children(
- providers
- .into_iter()
- .map(|provider| self.render_provider_configuration_block(&provider, cx)),
+ .child(
+ div()
+ .pl(DynamicSpacing::Base08.rems(cx))
+ .pr(DynamicSpacing::Base20.rems(cx))
+ .children(
+ providers.into_iter().map(|provider| {
+ self.render_provider_configuration_block(&provider, cx)
+ }),
+ ),
)
}
@@ -408,7 +436,7 @@ impl AgentConfiguration {
window: &mut Window,
cx: &mut Context<Self>,
) -> impl IntoElement {
- let context_server_ids = self.context_server_store.read(cx).all_server_ids().clone();
+ let context_server_ids = self.context_server_store.read(cx).configured_server_ids();
v_flex()
.p(DynamicSpacing::Base16.rems(cx))
@@ -379,6 +379,14 @@ impl ConfigureContextServerModal {
};
self.state = State::Waiting;
+
+ let existing_server = self.context_server_store.read(cx).get_running_server(&id);
+ if existing_server.is_some() {
+ self.context_server_store.update(cx, |store, cx| {
+ store.stop_server(&id, cx).log_err();
+ });
+ }
+
let wait_for_context_server_task =
wait_for_context_server(&self.context_server_store, id.clone(), cx);
cx.spawn({
@@ -399,13 +407,21 @@ impl ConfigureContextServerModal {
})
.detach();
- // When we write the settings to the file, the context server will be restarted.
- workspace.update(cx, |workspace, cx| {
- let fs = workspace.app_state().fs.clone();
- update_settings_file::<ProjectSettings>(fs.clone(), cx, |project_settings, _| {
- project_settings.context_servers.insert(id.0, settings);
+ let settings_changed =
+ ProjectSettings::get_global(cx).context_servers.get(&id.0) != Some(&settings);
+
+ if settings_changed {
+ // When we write the settings to the file, the context server will be restarted.
+ workspace.update(cx, |workspace, cx| {
+ let fs = workspace.app_state().fs.clone();
+ update_settings_file::<ProjectSettings>(fs.clone(), cx, |project_settings, _| {
+ project_settings.context_servers.insert(id.0, settings);
+ });
});
- });
+ } else if let Some(existing_server) = existing_server {
+ self.context_server_store
+ .update(cx, |store, cx| store.start_server(existing_server, cx));
+ }
}
fn cancel(&mut self, _: &menu::Cancel, cx: &mut Context<Self>) {
@@ -41,7 +41,7 @@ use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer};
use fs::Fs;
use gpui::{
Action, Animation, AnimationExt as _, AnyElement, App, AsyncWindowContext, ClipboardItem,
- Corner, DismissEvent, Entity, EventEmitter, ExternalPaths, FocusHandle, Focusable, FontWeight,
+ Corner, DismissEvent, Entity, EventEmitter, ExternalPaths, FocusHandle, Focusable, Hsla,
KeyContext, Pixels, Subscription, Task, UpdateGlobal, WeakEntity, linear_color_stop,
linear_gradient, prelude::*, pulsating_between,
};
@@ -59,7 +59,7 @@ use theme::ThemeSettings;
use time::UtcOffset;
use ui::utils::WithRemSize;
use ui::{
- Banner, CheckboxWithLabel, ContextMenu, ElevationIndex, KeyBinding, PopoverMenu,
+ Banner, Callout, CheckboxWithLabel, ContextMenu, ElevationIndex, KeyBinding, PopoverMenu,
PopoverMenuHandle, ProgressBar, Tab, Tooltip, Vector, VectorName, prelude::*,
};
use util::ResultExt as _;
@@ -2025,9 +2025,7 @@ impl AgentPanel {
.thread()
.read(cx)
.configured_model()
- .map_or(false, |model| {
- model.provider.id().0 == ZED_CLOUD_PROVIDER_ID
- });
+ .map_or(false, |model| model.provider.id() == ZED_CLOUD_PROVIDER_ID);
if !is_using_zed_provider {
return false;
@@ -2600,7 +2598,7 @@ impl AgentPanel {
Some(ConfigurationError::ProviderPendingTermsAcceptance(provider)) => {
parent.child(Banner::new().severity(ui::Severity::Warning).child(
h_flex().w_full().children(provider.render_accept_terms(
- LanguageModelProviderTosView::ThreadtEmptyState,
+ LanguageModelProviderTosView::ThreadEmptyState,
cx,
)),
))
@@ -2691,58 +2689,90 @@ impl AgentPanel {
Some(div().px_2().pb_2().child(banner).into_any_element())
}
+ fn create_copy_button(&self, message: impl Into<String>) -> impl IntoElement {
+ let message = message.into();
+
+ IconButton::new("copy", IconName::Copy)
+ .icon_size(IconSize::Small)
+ .icon_color(Color::Muted)
+ .tooltip(Tooltip::text("Copy Error Message"))
+ .on_click(move |_, _, cx| {
+ cx.write_to_clipboard(ClipboardItem::new_string(message.clone()))
+ })
+ }
+
+ fn dismiss_error_button(
+ &self,
+ thread: &Entity<ActiveThread>,
+ cx: &mut Context<Self>,
+ ) -> impl IntoElement {
+ IconButton::new("dismiss", IconName::Close)
+ .icon_size(IconSize::Small)
+ .icon_color(Color::Muted)
+ .tooltip(Tooltip::text("Dismiss Error"))
+ .on_click(cx.listener({
+ let thread = thread.clone();
+ move |_, _, _, cx| {
+ thread.update(cx, |this, _cx| {
+ this.clear_last_error();
+ });
+
+ cx.notify();
+ }
+ }))
+ }
+
+ fn upgrade_button(
+ &self,
+ thread: &Entity<ActiveThread>,
+ cx: &mut Context<Self>,
+ ) -> impl IntoElement {
+ Button::new("upgrade", "Upgrade")
+ .label_size(LabelSize::Small)
+ .style(ButtonStyle::Tinted(ui::TintColor::Accent))
+ .on_click(cx.listener({
+ let thread = thread.clone();
+ move |_, _, _, cx| {
+ thread.update(cx, |this, _cx| {
+ this.clear_last_error();
+ });
+
+ cx.open_url(&zed_urls::account_url(cx));
+ cx.notify();
+ }
+ }))
+ }
+
+ fn error_callout_bg(&self, cx: &Context<Self>) -> Hsla {
+ cx.theme().status().error.opacity(0.08)
+ }
+
fn render_payment_required_error(
&self,
thread: &Entity<ActiveThread>,
cx: &mut Context<Self>,
) -> AnyElement {
- const ERROR_MESSAGE: &str = "Free tier exceeded. Subscribe and add payment to continue using Zed LLMs. You'll be billed at cost for tokens used.";
-
- v_flex()
- .gap_0p5()
- .child(
- h_flex()
- .gap_1p5()
- .items_center()
- .child(Icon::new(IconName::XCircle).color(Color::Error))
- .child(Label::new("Free Usage Exceeded").weight(FontWeight::MEDIUM)),
- )
- .child(
- div()
- .id("error-message")
- .max_h_24()
- .overflow_y_scroll()
- .child(Label::new(ERROR_MESSAGE)),
- )
- .child(
- h_flex()
- .justify_end()
- .mt_1()
- .gap_1()
- .child(self.create_copy_button(ERROR_MESSAGE))
- .child(Button::new("subscribe", "Subscribe").on_click(cx.listener({
- let thread = thread.clone();
- move |_, _, _, cx| {
- thread.update(cx, |this, _cx| {
- this.clear_last_error();
- });
+ const ERROR_MESSAGE: &str =
+ "You reached your free usage limit. Upgrade to Zed Pro for more prompts.";
- cx.open_url(&zed_urls::account_url(cx));
- cx.notify();
- }
- })))
- .child(Button::new("dismiss", "Dismiss").on_click(cx.listener({
- let thread = thread.clone();
- move |_, _, _, cx| {
- thread.update(cx, |this, _cx| {
- this.clear_last_error();
- });
+ let icon = Icon::new(IconName::XCircle)
+ .size(IconSize::Small)
+ .color(Color::Error);
- cx.notify();
- }
- }))),
+ div()
+ .border_t_1()
+ .border_color(cx.theme().colors().border)
+ .child(
+ Callout::new()
+ .icon(icon)
+ .title("Free Usage Exceeded")
+ .description(ERROR_MESSAGE)
+ .tertiary_action(self.upgrade_button(thread, cx))
+ .secondary_action(self.create_copy_button(ERROR_MESSAGE))
+ .primary_action(self.dismiss_error_button(thread, cx))
+ .bg_color(self.error_callout_bg(cx)),
)
- .into_any()
+ .into_any_element()
}
fn render_model_request_limit_reached_error(
@@ -2752,67 +2782,28 @@ impl AgentPanel {
cx: &mut Context<Self>,
) -> AnyElement {
let error_message = match plan {
- Plan::ZedPro => {
- "Model request limit reached. Upgrade to usage-based billing for more requests."
- }
- Plan::ZedProTrial => {
- "Model request limit reached. Upgrade to Zed Pro for more requests."
- }
- Plan::Free => "Model request limit reached. Upgrade to Zed Pro for more requests.",
- };
- let call_to_action = match plan {
- Plan::ZedPro => "Upgrade to usage-based billing",
- Plan::ZedProTrial => "Upgrade to Zed Pro",
- Plan::Free => "Upgrade to Zed Pro",
+ Plan::ZedPro => "Upgrade to usage-based billing for more prompts.",
+ Plan::ZedProTrial | Plan::Free => "Upgrade to Zed Pro for more prompts.",
};
- v_flex()
- .gap_0p5()
- .child(
- h_flex()
- .gap_1p5()
- .items_center()
- .child(Icon::new(IconName::XCircle).color(Color::Error))
- .child(Label::new("Model Request Limit Reached").weight(FontWeight::MEDIUM)),
- )
- .child(
- div()
- .id("error-message")
- .max_h_24()
- .overflow_y_scroll()
- .child(Label::new(error_message)),
- )
- .child(
- h_flex()
- .justify_end()
- .mt_1()
- .gap_1()
- .child(self.create_copy_button(error_message))
- .child(
- Button::new("subscribe", call_to_action).on_click(cx.listener({
- let thread = thread.clone();
- move |_, _, _, cx| {
- thread.update(cx, |this, _cx| {
- this.clear_last_error();
- });
-
- cx.open_url(&zed_urls::account_url(cx));
- cx.notify();
- }
- })),
- )
- .child(Button::new("dismiss", "Dismiss").on_click(cx.listener({
- let thread = thread.clone();
- move |_, _, _, cx| {
- thread.update(cx, |this, _cx| {
- this.clear_last_error();
- });
+ let icon = Icon::new(IconName::XCircle)
+ .size(IconSize::Small)
+ .color(Color::Error);
- cx.notify();
- }
- }))),
+ div()
+ .border_t_1()
+ .border_color(cx.theme().colors().border)
+ .child(
+ Callout::new()
+ .icon(icon)
+ .title("Model Prompt Limit Reached")
+ .description(error_message)
+ .tertiary_action(self.upgrade_button(thread, cx))
+ .secondary_action(self.create_copy_button(error_message))
+ .primary_action(self.dismiss_error_button(thread, cx))
+ .bg_color(self.error_callout_bg(cx)),
)
- .into_any()
+ .into_any_element()
}
fn render_error_message(
@@ -2823,40 +2814,24 @@ impl AgentPanel {
cx: &mut Context<Self>,
) -> AnyElement {
let message_with_header = format!("{}\n{}", header, message);
- v_flex()
- .gap_0p5()
- .child(
- h_flex()
- .gap_1p5()
- .items_center()
- .child(Icon::new(IconName::XCircle).color(Color::Error))
- .child(Label::new(header).weight(FontWeight::MEDIUM)),
- )
- .child(
- div()
- .id("error-message")
- .max_h_32()
- .overflow_y_scroll()
- .child(Label::new(message.clone())),
- )
- .child(
- h_flex()
- .justify_end()
- .mt_1()
- .gap_1()
- .child(self.create_copy_button(message_with_header))
- .child(Button::new("dismiss", "Dismiss").on_click(cx.listener({
- let thread = thread.clone();
- move |_, _, _, cx| {
- thread.update(cx, |this, _cx| {
- this.clear_last_error();
- });
- cx.notify();
- }
- }))),
+ let icon = Icon::new(IconName::XCircle)
+ .size(IconSize::Small)
+ .color(Color::Error);
+
+ div()
+ .border_t_1()
+ .border_color(cx.theme().colors().border)
+ .child(
+ Callout::new()
+ .icon(icon)
+ .title(header)
+ .description(message.clone())
+ .primary_action(self.dismiss_error_button(thread, cx))
+ .secondary_action(self.create_copy_button(message_with_header))
+ .bg_color(self.error_callout_bg(cx)),
)
- .into_any()
+ .into_any_element()
}
fn render_prompt_editor(
@@ -3001,15 +2976,6 @@ impl AgentPanel {
}
}
- fn create_copy_button(&self, message: impl Into<String>) -> impl IntoElement {
- let message = message.into();
- IconButton::new("copy", IconName::Copy)
- .on_click(move |_, _, cx| {
- cx.write_to_clipboard(ClipboardItem::new_string(message.clone()))
- })
- .tooltip(Tooltip::text("Copy Error Message"))
- }
-
fn key_context(&self) -> KeyContext {
let mut key_context = KeyContext::new_with_defaults();
key_context.add("AgentPanel");
@@ -3091,18 +3057,9 @@ impl Render for AgentPanel {
thread.clone().into_any_element()
})
.children(self.render_tool_use_limit_reached(window, cx))
- .child(h_flex().child(message_editor.clone()))
.when_some(thread.read(cx).last_error(), |this, last_error| {
this.child(
div()
- .absolute()
- .right_3()
- .bottom_12()
- .max_w_96()
- .py_2()
- .px_3()
- .elevation_2(cx)
- .occlude()
.child(match last_error {
ThreadError::PaymentRequired => {
self.render_payment_required_error(thread, cx)
@@ -3116,6 +3073,7 @@ impl Render for AgentPanel {
.into_any(),
)
})
+ .child(h_flex().child(message_editor.clone()))
.child(self.render_drag_target(cx)),
ActiveView::History => parent.child(self.history.clone()),
ActiveView::TextThread {
@@ -54,51 +54,88 @@ pub use ui::preview::{all_agent_previews, get_agent_preview};
actions!(
agent,
[
+ /// Creates a new text-based conversation thread.
NewTextThread,
+ /// Toggles the context picker interface for adding files, symbols, or other context.
ToggleContextPicker,
+ /// Toggles the navigation menu for switching between threads and views.
ToggleNavigationMenu,
+ /// Toggles the options menu for agent settings and preferences.
ToggleOptionsMenu,
+ /// Deletes the recently opened thread from history.
DeleteRecentlyOpenThread,
+ /// Toggles the profile selector for switching between agent profiles.
ToggleProfileSelector,
+ /// Removes all added context from the current conversation.
RemoveAllContext,
+ /// Expands the message editor to full size.
ExpandMessageEditor,
+ /// Opens the conversation history view.
OpenHistory,
+ /// Adds a context server to the configuration.
AddContextServer,
+ /// Removes the currently selected thread.
RemoveSelectedThread,
+ /// Starts a chat conversation with the agent.
Chat,
+ /// Starts a chat conversation with follow-up enabled.
ChatWithFollow,
+ /// Cycles to the next inline assist suggestion.
CycleNextInlineAssist,
+ /// Cycles to the previous inline assist suggestion.
CyclePreviousInlineAssist,
+ /// Moves focus up in the interface.
FocusUp,
+ /// Moves focus down in the interface.
FocusDown,
+ /// Moves focus left in the interface.
FocusLeft,
+ /// Moves focus right in the interface.
FocusRight,
+ /// Removes the currently focused context item.
RemoveFocusedContext,
+ /// Accepts the suggested context item.
AcceptSuggestedContext,
+ /// Opens the active thread as a markdown file.
OpenActiveThreadAsMarkdown,
+ /// Opens the agent diff view to review changes.
OpenAgentDiff,
+ /// Keeps the current suggestion or change.
Keep,
+ /// Rejects the current suggestion or change.
Reject,
+ /// Rejects all suggestions or changes.
RejectAll,
+ /// Keeps all suggestions or changes.
KeepAll,
+ /// Follows the agent's suggestions.
Follow,
+ /// Resets the trial upsell notification.
ResetTrialUpsell,
+ /// Resets the trial end upsell notification.
ResetTrialEndUpsell,
+ /// Continues the current thread.
ContinueThread,
+ /// Continues the thread with burn mode enabled.
ContinueWithBurnMode,
+ /// Toggles burn mode for faster responses.
ToggleBurnMode,
]
);
+/// Creates a new conversation thread, optionally based on an existing thread.
#[derive(Default, Clone, PartialEq, Deserialize, JsonSchema, Action)]
#[action(namespace = agent)]
+#[serde(deny_unknown_fields)]
pub struct NewThread {
#[serde(default)]
from_thread_id: Option<ThreadId>,
}
+/// Opens the profile management interface for configuring agent tools and settings.
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)]
#[action(namespace = agent)]
+#[serde(deny_unknown_fields)]
pub struct ManageProfiles {
#[serde(default)]
pub customize_tools: Option<AgentProfileId>,
@@ -209,7 +246,7 @@ fn update_active_language_model_from_settings(cx: &mut App) {
}
}
- let default = to_selected_model(&settings.default_model);
+ let default = settings.default_model.as_ref().map(to_selected_model);
let inline_assistant = settings
.inline_assistant_model
.as_ref()
@@ -229,7 +266,7 @@ fn update_active_language_model_from_settings(cx: &mut App) {
.collect::<Vec<_>>();
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
- registry.select_default_model(Some(&default), cx);
+ registry.select_default_model(default.as_ref(), cx);
registry.select_inline_assistant_model(inline_assistant.as_ref(), cx);
registry.select_commit_message_model(commit_message.as_ref(), cx);
registry.select_thread_summary_model(thread_summary.as_ref(), cx);
@@ -686,6 +686,7 @@ impl ContextPickerCompletionProvider {
let mut label = CodeLabel::plain(symbol.name.clone(), None);
label.push_str(" ", None);
label.push_str(&file_name, comment_id);
+ label.push_str(&format!(" L{}", symbol.range.start.0.row + 1), comment_id);
let new_text = format!("{} ", MentionLink::for_symbol(&symbol.name, &full_path));
let new_text_len = new_text.len();
@@ -18,6 +18,7 @@ use ui::{ListItem, ListItemSpacing, prelude::*};
actions!(
agent,
[
+ /// Toggles the language model selector dropdown.
#[action(deprecated_aliases = ["assistant::ToggleModelSelector", "assistant2::ToggleModelSelector"])]
ToggleModelSelector
]
@@ -399,7 +400,7 @@ impl PickerDelegate for LanguageModelPickerDelegate {
cx: &mut Context<Picker<Self>>,
) -> Task<()> {
let all_models = self.all_models.clone();
- let current_index = self.selected_index;
+ let active_model = (self.get_active_model)(cx);
let bg_executor = cx.background_executor();
let language_model_registry = LanguageModelRegistry::global(cx);
@@ -441,12 +442,9 @@ impl PickerDelegate for LanguageModelPickerDelegate {
cx.spawn_in(window, async move |this, cx| {
this.update_in(cx, |this, window, cx| {
this.delegate.filtered_entries = filtered_models.entries();
- // Preserve selection focus
- let new_index = if current_index >= this.delegate.filtered_entries.len() {
- 0
- } else {
- current_index
- };
+ // Finds the currently selected model in the list
+ let new_index =
+ Self::get_active_model_index(&this.delegate.filtered_entries, active_model);
this.set_selected_index(new_index, Some(picker::Direction::Down), true, window, cx);
cx.notify();
})
@@ -1250,9 +1250,7 @@ impl MessageEditor {
self.thread
.read(cx)
.configured_model()
- .map_or(false, |model| {
- model.provider.id().0 == ZED_CLOUD_PROVIDER_ID
- })
+ .map_or(false, |model| model.provider.id() == ZED_CLOUD_PROVIDER_ID)
}
fn render_usage_callout(&self, line_height: Pixels, cx: &mut Context<Self>) -> Option<Div> {
@@ -85,16 +85,24 @@ use assistant_context::{
actions!(
assistant,
[
+ /// Sends the current message to the assistant.
Assist,
+ /// Confirms and executes the entered slash command.
ConfirmCommand,
+ /// Copies code from the assistant's response to the clipboard.
CopyCode,
+ /// Cycles between user and assistant message roles.
CycleMessageRole,
+ /// Inserts the selected text into the active editor.
InsertIntoEditor,
+ /// Quotes the current selection in the assistant conversation.
QuoteSelection,
+ /// Splits the conversation at the current cursor position.
Split,
]
);
+/// Inserts files that were dragged and dropped into the assistant conversation.
#[derive(PartialEq, Clone, Action)]
#[action(namespace = assistant, no_json, no_register)]
pub enum InsertDraggedFiles {
@@ -42,8 +42,8 @@ impl IncompatibleToolsState {
.profile()
.enabled_tools(cx)
.iter()
- .filter(|tool| tool.input_schema(model.tool_input_format()).is_err())
- .cloned()
+ .filter(|(_, tool)| tool.input_schema(model.tool_input_format()).is_err())
+ .map(|(_, tool)| tool.clone())
.collect()
})
}
@@ -1,5 +1,4 @@
mod agent_notification;
-mod animated_label;
mod burn_mode_tooltip;
mod context_pill;
mod onboarding_modal;
@@ -7,7 +6,6 @@ pub mod preview;
mod upsell;
pub use agent_notification::*;
-pub use animated_label::*;
pub use burn_mode_tooltip::*;
pub use context_pill::*;
pub use onboarding_modal::*;
@@ -6,7 +6,7 @@ use anyhow::{Context as _, Result, anyhow};
use chrono::{DateTime, Utc};
use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
use http_client::http::{self, HeaderMap, HeaderValue};
-use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
+use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest, StatusCode};
use serde::{Deserialize, Serialize};
use strum::{EnumIter, EnumString};
use thiserror::Error;
@@ -356,7 +356,7 @@ pub async fn complete(
.send(request)
.await
.map_err(AnthropicError::HttpSend)?;
- let status = response.status();
+ let status_code = response.status();
let mut body = String::new();
response
.body_mut()
@@ -364,12 +364,12 @@ pub async fn complete(
.await
.map_err(AnthropicError::ReadResponse)?;
- if status.is_success() {
+ if status_code.is_success() {
Ok(serde_json::from_str(&body).map_err(AnthropicError::DeserializeResponse)?)
} else {
Err(AnthropicError::HttpResponseError {
- status: status.as_u16(),
- body,
+ status_code,
+ message: body,
})
}
}
@@ -444,11 +444,7 @@ impl RateLimitInfo {
}
Self {
- retry_after: headers
- .get("retry-after")
- .and_then(|v| v.to_str().ok())
- .and_then(|v| v.parse::<u64>().ok())
- .map(Duration::from_secs),
+ retry_after: parse_retry_after(headers),
requests: RateLimit::from_headers("requests", headers).ok(),
tokens: RateLimit::from_headers("tokens", headers).ok(),
input_tokens: RateLimit::from_headers("input-tokens", headers).ok(),
@@ -457,6 +453,17 @@ impl RateLimitInfo {
}
}
+/// Parses the Retry-After header value as an integer number of seconds (anthropic always uses
+/// seconds). Note that other services might specify an HTTP date or some other format for this
+/// header. Returns `None` if the header is not present or cannot be parsed.
+pub fn parse_retry_after(headers: &HeaderMap<HeaderValue>) -> Option<Duration> {
+ headers
+ .get("retry-after")
+ .and_then(|v| v.to_str().ok())
+ .and_then(|v| v.parse::<u64>().ok())
+ .map(Duration::from_secs)
+}
+
fn get_header<'a>(key: &str, headers: &'a HeaderMap) -> anyhow::Result<&'a str> {
Ok(headers
.get(key)
@@ -520,6 +527,10 @@ pub async fn stream_completion_with_rate_limit_info(
})
.boxed();
Ok((stream, Some(rate_limits)))
+ } else if response.status().as_u16() == 529 {
+ Err(AnthropicError::ServerOverloaded {
+ retry_after: rate_limits.retry_after,
+ })
} else if let Some(retry_after) = rate_limits.retry_after {
Err(AnthropicError::RateLimit { retry_after })
} else {
@@ -532,10 +543,9 @@ pub async fn stream_completion_with_rate_limit_info(
match serde_json::from_str::<Event>(&body) {
Ok(Event::Error { error }) => Err(AnthropicError::ApiError(error)),
- Ok(_) => Err(AnthropicError::UnexpectedResponseFormat(body)),
- Err(_) => Err(AnthropicError::HttpResponseError {
- status: response.status().as_u16(),
- body: body,
+ Ok(_) | Err(_) => Err(AnthropicError::HttpResponseError {
+ status_code: response.status(),
+ message: body,
}),
}
}
@@ -801,16 +811,19 @@ pub enum AnthropicError {
ReadResponse(io::Error),
/// HTTP error response from the API
- HttpResponseError { status: u16, body: String },
+ HttpResponseError {
+ status_code: StatusCode,
+ message: String,
+ },
/// Rate limit exceeded
RateLimit { retry_after: Duration },
+ /// Server overloaded
+ ServerOverloaded { retry_after: Option<Duration> },
+
/// API returned an error response
ApiError(ApiError),
-
- /// Unexpected response format
- UnexpectedResponseFormat(String),
}
#[derive(Debug, Serialize, Deserialize, Error)]
@@ -2140,7 +2140,8 @@ impl AssistantContext {
);
}
LanguageModelCompletionEvent::ToolUse(_) |
- LanguageModelCompletionEvent::UsageUpdate(_) => {}
+ LanguageModelCompletionEvent::ToolUseJsonParseError { .. } |
+ LanguageModelCompletionEvent::UsageUpdate(_) => {}
}
});
@@ -22,6 +22,7 @@ gpui.workspace = true
icons.workspace = true
language.workspace = true
language_model.workspace = true
+log.workspace = true
parking_lot.workspace = true
project.workspace = true
regex.workspace = true
@@ -1,18 +1,52 @@
-use std::sync::Arc;
-
-use collections::{HashMap, IndexMap};
-use gpui::App;
+use std::{borrow::Borrow, sync::Arc};
use crate::{Tool, ToolRegistry, ToolSource};
+use collections::{HashMap, HashSet, IndexMap};
+use gpui::{App, SharedString};
+use util::debug_panic;
#[derive(Copy, Clone, PartialEq, Eq, Hash, Default)]
pub struct ToolId(usize);
+/// A unique identifier for a tool within a working set.
+#[derive(Clone, PartialEq, Eq, Hash, Default)]
+pub struct UniqueToolName(SharedString);
+
+impl Borrow<str> for UniqueToolName {
+ fn borrow(&self) -> &str {
+ &self.0
+ }
+}
+
+impl From<String> for UniqueToolName {
+ fn from(value: String) -> Self {
+ UniqueToolName(SharedString::new(value))
+ }
+}
+
+impl Into<String> for UniqueToolName {
+ fn into(self) -> String {
+ self.0.into()
+ }
+}
+
+impl std::fmt::Debug for UniqueToolName {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+impl std::fmt::Display for UniqueToolName {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, "{}", self.0.as_ref())
+ }
+}
+
/// A working set of tools for use in one instance of the Assistant Panel.
#[derive(Default)]
pub struct ToolWorkingSet {
context_server_tools_by_id: HashMap<ToolId, Arc<dyn Tool>>,
- context_server_tools_by_name: HashMap<String, Arc<dyn Tool>>,
+ context_server_tools_by_name: HashMap<UniqueToolName, Arc<dyn Tool>>,
next_tool_id: ToolId,
}
@@ -24,16 +58,20 @@ impl ToolWorkingSet {
.or_else(|| ToolRegistry::global(cx).tool(name))
}
- pub fn tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
- let mut tools = ToolRegistry::global(cx).tools();
- tools.extend(self.context_server_tools_by_id.values().cloned());
+ pub fn tools(&self, cx: &App) -> Vec<(UniqueToolName, Arc<dyn Tool>)> {
+ let mut tools = ToolRegistry::global(cx)
+ .tools()
+ .into_iter()
+ .map(|tool| (UniqueToolName(tool.name().into()), tool))
+ .collect::<Vec<_>>();
+ tools.extend(self.context_server_tools_by_name.clone());
tools
}
pub fn tools_by_source(&self, cx: &App) -> IndexMap<ToolSource, Vec<Arc<dyn Tool>>> {
let mut tools_by_source = IndexMap::default();
- for tool in self.tools(cx) {
+ for (_, tool) in self.tools(cx) {
tools_by_source
.entry(tool.source())
.or_insert_with(Vec::new)
@@ -49,27 +87,324 @@ impl ToolWorkingSet {
tools_by_source
}
- pub fn insert(&mut self, tool: Arc<dyn Tool>) -> ToolId {
+ pub fn insert(&mut self, tool: Arc<dyn Tool>, cx: &App) -> ToolId {
+ let tool_id = self.register_tool(tool);
+ self.tools_changed(cx);
+ tool_id
+ }
+
+ pub fn extend(&mut self, tools: impl Iterator<Item = Arc<dyn Tool>>, cx: &App) -> Vec<ToolId> {
+ let ids = tools.map(|tool| self.register_tool(tool)).collect();
+ self.tools_changed(cx);
+ ids
+ }
+
+ pub fn remove(&mut self, tool_ids_to_remove: &[ToolId], cx: &App) {
+ self.context_server_tools_by_id
+ .retain(|id, _| !tool_ids_to_remove.contains(id));
+ self.tools_changed(cx);
+ }
+
+ fn register_tool(&mut self, tool: Arc<dyn Tool>) -> ToolId {
let tool_id = self.next_tool_id;
self.next_tool_id.0 += 1;
self.context_server_tools_by_id
.insert(tool_id, tool.clone());
- self.tools_changed();
tool_id
}
- pub fn remove(&mut self, tool_ids_to_remove: &[ToolId]) {
- self.context_server_tools_by_id
- .retain(|id, _| !tool_ids_to_remove.contains(id));
- self.tools_changed();
+ fn tools_changed(&mut self, cx: &App) {
+ self.context_server_tools_by_name = resolve_context_server_tool_name_conflicts(
+ &self
+ .context_server_tools_by_id
+ .values()
+ .cloned()
+ .collect::<Vec<_>>(),
+ &ToolRegistry::global(cx).tools(),
+ );
+ }
+}
+
+fn resolve_context_server_tool_name_conflicts(
+ context_server_tools: &[Arc<dyn Tool>],
+ native_tools: &[Arc<dyn Tool>],
+) -> HashMap<UniqueToolName, Arc<dyn Tool>> {
+ fn resolve_tool_name(tool: &Arc<dyn Tool>) -> String {
+ let mut tool_name = tool.name();
+ tool_name.truncate(MAX_TOOL_NAME_LENGTH);
+ tool_name
}
- fn tools_changed(&mut self) {
- self.context_server_tools_by_name.clear();
- self.context_server_tools_by_name.extend(
- self.context_server_tools_by_id
- .values()
- .map(|tool| (tool.name(), tool.clone())),
+ const MAX_TOOL_NAME_LENGTH: usize = 64;
+
+ let mut duplicated_tool_names = HashSet::default();
+ let mut seen_tool_names = HashSet::default();
+ seen_tool_names.extend(native_tools.iter().map(|tool| tool.name()));
+ for tool in context_server_tools {
+ let tool_name = resolve_tool_name(tool);
+ if seen_tool_names.contains(&tool_name) {
+ debug_assert!(
+ tool.source() != ToolSource::Native,
+ "Expected MCP tool but got a native tool: {}",
+ tool_name
+ );
+ duplicated_tool_names.insert(tool_name);
+ } else {
+ seen_tool_names.insert(tool_name);
+ }
+ }
+
+ if duplicated_tool_names.is_empty() {
+ return context_server_tools
+ .into_iter()
+ .map(|tool| (resolve_tool_name(tool).into(), tool.clone()))
+ .collect();
+ }
+
+ context_server_tools
+ .into_iter()
+ .filter_map(|tool| {
+ let mut tool_name = resolve_tool_name(tool);
+ if !duplicated_tool_names.contains(&tool_name) {
+ return Some((tool_name.into(), tool.clone()));
+ }
+ match tool.source() {
+ ToolSource::Native => {
+ debug_panic!("Expected MCP tool but got a native tool: {}", tool_name);
+ // Built-in tools always keep their original name
+ Some((tool_name.into(), tool.clone()))
+ }
+ ToolSource::ContextServer { id } => {
+ // Context server tools are prefixed with the context server ID, and truncated if necessary
+ tool_name.insert(0, '_');
+ if tool_name.len() + id.len() > MAX_TOOL_NAME_LENGTH {
+ let len = MAX_TOOL_NAME_LENGTH - tool_name.len();
+ let mut id = id.to_string();
+ id.truncate(len);
+ tool_name.insert_str(0, &id);
+ } else {
+ tool_name.insert_str(0, &id);
+ }
+
+ tool_name.truncate(MAX_TOOL_NAME_LENGTH);
+
+ if seen_tool_names.contains(&tool_name) {
+ log::error!("Cannot resolve tool name conflict for tool {}", tool.name());
+ None
+ } else {
+ Some((tool_name.into(), tool.clone()))
+ }
+ }
+ }
+ })
+ .collect()
+}
+#[cfg(test)]
+mod tests {
+ use gpui::{AnyWindowHandle, Entity, Task, TestAppContext};
+ use language_model::{LanguageModel, LanguageModelRequest};
+ use project::Project;
+
+ use crate::{ActionLog, ToolResult};
+
+ use super::*;
+
+ #[gpui::test]
+ fn test_unique_tool_names(cx: &mut TestAppContext) {
+ fn assert_tool(
+ tool_working_set: &ToolWorkingSet,
+ unique_name: &str,
+ expected_name: &str,
+ expected_source: ToolSource,
+ cx: &App,
+ ) {
+ let tool = tool_working_set.tool(unique_name, cx).unwrap();
+ assert_eq!(tool.name(), expected_name);
+ assert_eq!(tool.source(), expected_source);
+ }
+
+ let tool_registry = cx.update(ToolRegistry::default_global);
+ tool_registry.register_tool(TestTool::new("tool1", ToolSource::Native));
+ tool_registry.register_tool(TestTool::new("tool2", ToolSource::Native));
+
+ let mut tool_working_set = ToolWorkingSet::default();
+ cx.update(|cx| {
+ tool_working_set.extend(
+ vec![
+ Arc::new(TestTool::new(
+ "tool2",
+ ToolSource::ContextServer { id: "mcp-1".into() },
+ )) as Arc<dyn Tool>,
+ Arc::new(TestTool::new(
+ "tool2",
+ ToolSource::ContextServer { id: "mcp-2".into() },
+ )) as Arc<dyn Tool>,
+ ]
+ .into_iter(),
+ cx,
+ );
+ });
+
+ cx.update(|cx| {
+ assert_tool(&tool_working_set, "tool1", "tool1", ToolSource::Native, cx);
+ assert_tool(&tool_working_set, "tool2", "tool2", ToolSource::Native, cx);
+ assert_tool(
+ &tool_working_set,
+ "mcp-1_tool2",
+ "tool2",
+ ToolSource::ContextServer { id: "mcp-1".into() },
+ cx,
+ );
+ assert_tool(
+ &tool_working_set,
+ "mcp-2_tool2",
+ "tool2",
+ ToolSource::ContextServer { id: "mcp-2".into() },
+ cx,
+ );
+ })
+ }
+
+ #[gpui::test]
+ fn test_resolve_context_server_tool_name_conflicts() {
+ assert_resolve_context_server_tool_name_conflicts(
+ vec![
+ TestTool::new("tool1", ToolSource::Native),
+ TestTool::new("tool2", ToolSource::Native),
+ ],
+ vec![TestTool::new(
+ "tool3",
+ ToolSource::ContextServer { id: "mcp-1".into() },
+ )],
+ vec!["tool3"],
);
+
+ assert_resolve_context_server_tool_name_conflicts(
+ vec![
+ TestTool::new("tool1", ToolSource::Native),
+ TestTool::new("tool2", ToolSource::Native),
+ ],
+ vec![
+ TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-1".into() }),
+ TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-2".into() }),
+ ],
+ vec!["mcp-1_tool3", "mcp-2_tool3"],
+ );
+
+ assert_resolve_context_server_tool_name_conflicts(
+ vec![
+ TestTool::new("tool1", ToolSource::Native),
+ TestTool::new("tool2", ToolSource::Native),
+ TestTool::new("tool3", ToolSource::Native),
+ ],
+ vec![
+ TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-1".into() }),
+ TestTool::new("tool3", ToolSource::ContextServer { id: "mcp-2".into() }),
+ ],
+ vec!["mcp-1_tool3", "mcp-2_tool3"],
+ );
+
+ // Test deduplication of tools with very long names, in this case the mcp server name should be truncated
+ assert_resolve_context_server_tool_name_conflicts(
+ vec![TestTool::new(
+ "tool-with-very-very-very-long-name",
+ ToolSource::Native,
+ )],
+ vec![TestTool::new(
+ "tool-with-very-very-very-long-name",
+ ToolSource::ContextServer {
+ id: "mcp-with-very-very-very-long-name".into(),
+ },
+ )],
+ vec!["mcp-with-very-very-very-long-_tool-with-very-very-very-long-name"],
+ );
+
+ fn assert_resolve_context_server_tool_name_conflicts(
+ builtin_tools: Vec<TestTool>,
+ context_server_tools: Vec<TestTool>,
+ expected: Vec<&'static str>,
+ ) {
+ let context_server_tools: Vec<Arc<dyn Tool>> = context_server_tools
+ .into_iter()
+ .map(|t| Arc::new(t) as Arc<dyn Tool>)
+ .collect();
+ let builtin_tools: Vec<Arc<dyn Tool>> = builtin_tools
+ .into_iter()
+ .map(|t| Arc::new(t) as Arc<dyn Tool>)
+ .collect();
+ let tools =
+ resolve_context_server_tool_name_conflicts(&context_server_tools, &builtin_tools);
+ assert_eq!(tools.len(), expected.len());
+ for (i, (name, _)) in tools.into_iter().enumerate() {
+ assert_eq!(
+ name.0.as_ref(),
+ expected[i],
+ "Expected '{}' got '{}' at index {}",
+ expected[i],
+ name,
+ i
+ );
+ }
+ }
+ }
+
+ struct TestTool {
+ name: String,
+ source: ToolSource,
+ }
+
+ impl TestTool {
+ fn new(name: impl Into<String>, source: ToolSource) -> Self {
+ Self {
+ name: name.into(),
+ source,
+ }
+ }
+ }
+
+ impl Tool for TestTool {
+ fn name(&self) -> String {
+ self.name.clone()
+ }
+
+ fn icon(&self) -> icons::IconName {
+ icons::IconName::Ai
+ }
+
+ fn may_perform_edits(&self) -> bool {
+ false
+ }
+
+ fn needs_confirmation(&self, _input: &serde_json::Value, _cx: &App) -> bool {
+ true
+ }
+
+ fn source(&self) -> ToolSource {
+ self.source.clone()
+ }
+
+ fn description(&self) -> String {
+ "Test tool".to_string()
+ }
+
+ fn ui_text(&self, _input: &serde_json::Value) -> String {
+ "Test tool".to_string()
+ }
+
+ fn run(
+ self: Arc<Self>,
+ _input: serde_json::Value,
+ _request: Arc<LanguageModelRequest>,
+ _project: Entity<Project>,
+ _action_log: Entity<ActionLog>,
+ _model: Arc<dyn LanguageModel>,
+ _window: Option<AnyWindowHandle>,
+ _cx: &mut App,
+ ) -> ToolResult {
+ ToolResult {
+ output: Task::ready(Err(anyhow::anyhow!("No content"))),
+ card: None,
+ }
+ }
}
}
@@ -29,6 +29,7 @@ use std::{
path::Path,
str::FromStr,
sync::mpsc,
+ time::Duration,
};
use util::path;
@@ -1658,12 +1659,14 @@ async fn retry_on_rate_limit<R>(mut request: impl AsyncFnMut() -> Result<R>) ->
match request().await {
Ok(result) => return Ok(result),
Err(err) => match err.downcast::<LanguageModelCompletionError>() {
- Ok(err) => match err {
- LanguageModelCompletionError::RateLimitExceeded { retry_after } => {
+ Ok(err) => match &err {
+ LanguageModelCompletionError::RateLimitExceeded { retry_after, .. }
+ | LanguageModelCompletionError::ServerOverloaded { retry_after, .. } => {
+ let retry_after = retry_after.unwrap_or(Duration::from_secs(5));
// Wait for the duration supplied, with some jitter to avoid all requests being made at the same time.
let jitter = retry_after.mul_f64(rand::thread_rng().gen_range(0.0..1.0));
eprintln!(
- "Attempt #{attempt}: Rate limit exceeded. Retry after {retry_after:?} + jitter of {jitter:?}"
+ "Attempt #{attempt}: {err}. Retry after {retry_after:?} + jitter of {jitter:?}"
);
Timer::after(retry_after + jitter).await;
continue;
@@ -9132,7 +9132,7 @@ impl Editor {
window: &mut Window,
cx: &mut Context<Self>,
) {
- self.manipulate_immutable_lines(window, cx, |lines| lines.sort())
+ self.manipulate_lines(window, cx, |lines| lines.sort())
}
pub fn sort_lines_case_insensitive(
@@ -9141,7 +9141,7 @@ impl Editor {
window: &mut Window,
cx: &mut Context<Self>,
) {
- self.manipulate_immutable_lines(window, cx, |lines| {
+ self.manipulate_lines(window, cx, |lines| {
lines.sort_by_key(|line| line.to_lowercase())
})
}
@@ -9152,7 +9152,7 @@ impl Editor {
window: &mut Window,
cx: &mut Context<Self>,
) {
- self.manipulate_immutable_lines(window, cx, |lines| {
+ self.manipulate_lines(window, cx, |lines| {
let mut seen = HashSet::default();
lines.retain(|line| seen.insert(line.to_lowercase()));
})
@@ -9164,7 +9164,7 @@ impl Editor {
window: &mut Window,
cx: &mut Context<Self>,
) {
- self.manipulate_immutable_lines(window, cx, |lines| {
+ self.manipulate_lines(window, cx, |lines| {
let mut seen = HashSet::default();
lines.retain(|line| seen.insert(*line));
})
@@ -9606,20 +9606,20 @@ impl Editor {
}
pub fn reverse_lines(&mut self, _: &ReverseLines, window: &mut Window, cx: &mut Context<Self>) {
- self.manipulate_immutable_lines(window, cx, |lines| lines.reverse())
+ self.manipulate_lines(window, cx, |lines| lines.reverse())
}
pub fn shuffle_lines(&mut self, _: &ShuffleLines, window: &mut Window, cx: &mut Context<Self>) {
- self.manipulate_immutable_lines(window, cx, |lines| lines.shuffle(&mut thread_rng()))
+ self.manipulate_lines(window, cx, |lines| lines.shuffle(&mut thread_rng()))
}
- fn manipulate_lines<M>(
+ fn manipulate_lines<Fn>(
&mut self,
window: &mut Window,
cx: &mut Context<Self>,
- mut manipulate: M,
+ mut callback: Fn,
) where
- M: FnMut(&str) -> LineManipulationResult,
+ Fn: FnMut(&mut Vec<&str>),
{
self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction);
@@ -9652,14 +9652,18 @@ impl Editor {
.text_for_range(start_point..end_point)
.collect::<String>();
- let LineManipulationResult { new_text, line_count_before, line_count_after} = manipulate(&text);
+ let mut lines = text.split('\n').collect_vec();
- edits.push((start_point..end_point, new_text));
+ let lines_before = lines.len();
+ callback(&mut lines);
+ let lines_after = lines.len();
+
+ edits.push((start_point..end_point, lines.join("\n")));
// Selections must change based on added and removed line count
let start_row =
MultiBufferRow(start_point.row + added_lines as u32 - removed_lines as u32);
- let end_row = MultiBufferRow(start_row.0 + line_count_after.saturating_sub(1) as u32);
+ let end_row = MultiBufferRow(start_row.0 + lines_after.saturating_sub(1) as u32);
new_selections.push(Selection {
id: selection.id,
start: start_row,
@@ -9668,10 +9672,10 @@ impl Editor {
reversed: selection.reversed,
});
- if line_count_after > line_count_before {
- added_lines += line_count_after - line_count_before;
- } else if line_count_before > line_count_after {
- removed_lines += line_count_before - line_count_after;
+ if lines_after > lines_before {
+ added_lines += lines_after - lines_before;
+ } else if lines_before > lines_after {
+ removed_lines += lines_before - lines_after;
}
}
@@ -9716,171 +9720,6 @@ impl Editor {
})
}
- fn manipulate_immutable_lines<Fn>(
- &mut self,
- window: &mut Window,
- cx: &mut Context<Self>,
- mut callback: Fn,
- ) where
- Fn: FnMut(&mut Vec<&str>),
- {
- self.manipulate_lines(window, cx, |text| {
- let mut lines: Vec<&str> = text.split('\n').collect();
- let line_count_before = lines.len();
-
- callback(&mut lines);
-
- LineManipulationResult {
- new_text: lines.join("\n"),
- line_count_before,
- line_count_after: lines.len(),
- }
- });
- }
-
- fn manipulate_mutable_lines<Fn>(
- &mut self,
- window: &mut Window,
- cx: &mut Context<Self>,
- mut callback: Fn,
- ) where
- Fn: FnMut(&mut Vec<Cow<'_, str>>),
- {
- self.manipulate_lines(window, cx, |text| {
- let mut lines: Vec<Cow<str>> = text.split('\n').map(Cow::from).collect();
- let line_count_before = lines.len();
-
- callback(&mut lines);
-
- LineManipulationResult {
- new_text: lines.join("\n"),
- line_count_before,
- line_count_after: lines.len(),
- }
- });
- }
-
- pub fn convert_indentation_to_spaces(
- &mut self,
- _: &ConvertIndentationToSpaces,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- let settings = self.buffer.read(cx).language_settings(cx);
- let tab_size = settings.tab_size.get() as usize;
-
- self.manipulate_mutable_lines(window, cx, |lines| {
- // Allocates a reasonably sized scratch buffer once for the whole loop
- let mut reindented_line = String::with_capacity(MAX_LINE_LEN);
- // Avoids recomputing spaces that could be inserted many times
- let space_cache: Vec<Vec<char>> = (1..=tab_size)
- .map(|n| IndentSize::spaces(n as u32).chars().collect())
- .collect();
-
- for line in lines.iter_mut().filter(|line| !line.is_empty()) {
- let mut chars = line.as_ref().chars();
- let mut col = 0;
- let mut changed = false;
-
- while let Some(ch) = chars.next() {
- match ch {
- ' ' => {
- reindented_line.push(' ');
- col += 1;
- }
- '\t' => {
- // \t are converted to spaces depending on the current column
- let spaces_len = tab_size - (col % tab_size);
- reindented_line.extend(&space_cache[spaces_len - 1]);
- col += spaces_len;
- changed = true;
- }
- _ => {
- // If we dont append before break, the character is consumed
- reindented_line.push(ch);
- break;
- }
- }
- }
-
- if !changed {
- reindented_line.clear();
- continue;
- }
- // Append the rest of the line and replace old reference with new one
- reindented_line.extend(chars);
- *line = Cow::Owned(reindented_line.clone());
- reindented_line.clear();
- }
- });
- }
-
- pub fn convert_indentation_to_tabs(
- &mut self,
- _: &ConvertIndentationToTabs,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- let settings = self.buffer.read(cx).language_settings(cx);
- let tab_size = settings.tab_size.get() as usize;
-
- self.manipulate_mutable_lines(window, cx, |lines| {
- // Allocates a reasonably sized buffer once for the whole loop
- let mut reindented_line = String::with_capacity(MAX_LINE_LEN);
- // Avoids recomputing spaces that could be inserted many times
- let space_cache: Vec<Vec<char>> = (1..=tab_size)
- .map(|n| IndentSize::spaces(n as u32).chars().collect())
- .collect();
-
- for line in lines.iter_mut().filter(|line| !line.is_empty()) {
- let mut chars = line.chars();
- let mut spaces_count = 0;
- let mut first_non_indent_char = None;
- let mut changed = false;
-
- while let Some(ch) = chars.next() {
- match ch {
- ' ' => {
- // Keep track of spaces. Append \t when we reach tab_size
- spaces_count += 1;
- changed = true;
- if spaces_count == tab_size {
- reindented_line.push('\t');
- spaces_count = 0;
- }
- }
- '\t' => {
- reindented_line.push('\t');
- spaces_count = 0;
- }
- _ => {
- // Dont append it yet, we might have remaining spaces
- first_non_indent_char = Some(ch);
- break;
- }
- }
- }
-
- if !changed {
- reindented_line.clear();
- continue;
- }
- // Remaining spaces that didn't make a full tab stop
- if spaces_count > 0 {
- reindented_line.extend(&space_cache[spaces_count - 1]);
- }
- // If we consume an extra character that was not indentation, add it back
- if let Some(extra_char) = first_non_indent_char {
- reindented_line.push(extra_char);
- }
- // Append the rest of the line and replace old reference with new one
- reindented_line.extend(chars);
- *line = Cow::Owned(reindented_line.clone());
- reindented_line.clear();
- }
- });
- }
-
pub fn convert_to_upper_case(
&mut self,
_: &ConvertToUpperCase,
@@ -21318,13 +21157,6 @@ pub struct LineHighlight {
pub type_id: Option<TypeId>,
}
-struct LineManipulationResult {
- pub new_text: String,
- pub line_count_before: usize,
- pub line_count_after: usize,
-}
-
-
fn render_diff_hunk_controls(
row: u32,
status: &DiffHunkStatus,
@@ -1,8 +1,9 @@
use anyhow::Result;
use language_model::LanguageModelToolSchemaFormat;
use schemars::{
- JsonSchema,
- schema::{RootSchema, Schema, SchemaObject},
+ JsonSchema, Schema,
+ generate::SchemaSettings,
+ transform::{Transform, transform_subschemas},
};
pub fn json_schema_for<T: JsonSchema>(
@@ -13,7 +14,7 @@ pub fn json_schema_for<T: JsonSchema>(
}
fn schema_to_json(
- schema: &RootSchema,
+ schema: &Schema,
format: LanguageModelToolSchemaFormat,
) -> Result<serde_json::Value> {
let mut value = serde_json::to_value(schema)?;
@@ -21,58 +22,40 @@ fn schema_to_json(
Ok(value)
}
-fn root_schema_for<T: JsonSchema>(format: LanguageModelToolSchemaFormat) -> RootSchema {
+fn root_schema_for<T: JsonSchema>(format: LanguageModelToolSchemaFormat) -> Schema {
let mut generator = match format {
- LanguageModelToolSchemaFormat::JsonSchema => schemars::SchemaGenerator::default(),
- LanguageModelToolSchemaFormat::JsonSchemaSubset => {
- schemars::r#gen::SchemaSettings::default()
- .with(|settings| {
- settings.meta_schema = None;
- settings.inline_subschemas = true;
- settings
- .visitors
- .push(Box::new(TransformToJsonSchemaSubsetVisitor));
- })
- .into_generator()
- }
+ LanguageModelToolSchemaFormat::JsonSchema => SchemaSettings::draft07().into_generator(),
+ LanguageModelToolSchemaFormat::JsonSchemaSubset => SchemaSettings::openapi3()
+ .with(|settings| {
+ settings.meta_schema = None;
+ settings.inline_subschemas = true;
+ })
+ .with_transform(ToJsonSchemaSubsetTransform)
+ .into_generator(),
};
generator.root_schema_for::<T>()
}
#[derive(Debug, Clone)]
-struct TransformToJsonSchemaSubsetVisitor;
-
-impl schemars::visit::Visitor for TransformToJsonSchemaSubsetVisitor {
- fn visit_root_schema(&mut self, root: &mut RootSchema) {
- schemars::visit::visit_root_schema(self, root)
- }
+struct ToJsonSchemaSubsetTransform;
- fn visit_schema(&mut self, schema: &mut Schema) {
- schemars::visit::visit_schema(self, schema)
- }
-
- fn visit_schema_object(&mut self, schema: &mut SchemaObject) {
+impl Transform for ToJsonSchemaSubsetTransform {
+ fn transform(&mut self, schema: &mut Schema) {
// Ensure that the type field is not an array, this happens when we use
// Option<T>, the type will be [T, "null"].
- if let Some(instance_type) = schema.instance_type.take() {
- schema.instance_type = match instance_type {
- schemars::schema::SingleOrVec::Single(t) => {
- Some(schemars::schema::SingleOrVec::Single(t))
+ if let Some(type_field) = schema.get_mut("type") {
+ if let Some(types) = type_field.as_array() {
+ if let Some(first_type) = types.first() {
+ *type_field = first_type.clone();
}
- schemars::schema::SingleOrVec::Vec(items) => items
- .into_iter()
- .next()
- .map(schemars::schema::SingleOrVec::from),
- };
+ }
}
- // One of is not supported, use anyOf instead.
- if let Some(subschema) = schema.subschemas.as_mut() {
- if let Some(one_of) = subschema.one_of.take() {
- subschema.any_of = Some(one_of);
- }
+ // oneOf is not supported, use anyOf instead
+ if let Some(one_of) = schema.remove("oneOf") {
+ schema.insert("anyOf".to_string(), one_of);
}
- schemars::visit::visit_schema_object(self, schema)
+ transform_subschemas(self, schema);
}
}
@@ -218,7 +218,7 @@ impl Tool for TerminalTool {
.update(cx, |project, cx| {
project.create_terminal(
TerminalKind::Task(task::SpawnInTerminal {
- command: program,
+ command: Some(program),
args,
cwd,
env,
@@ -28,7 +28,17 @@ use workspace::Workspace;
const SHOULD_SHOW_UPDATE_NOTIFICATION_KEY: &str = "auto-updater-should-show-updated-notification";
const POLL_INTERVAL: Duration = Duration::from_secs(60 * 60);
-actions!(auto_update, [Check, DismissErrorMessage, ViewReleaseNotes,]);
+actions!(
+ auto_update,
+ [
+ /// Checks for available updates.
+ Check,
+ /// Dismisses the update error message.
+ DismissErrorMessage,
+ /// Opens the release notes for the current version in a browser.
+ ViewReleaseNotes,
+ ]
+);
#[derive(Serialize)]
struct UpdateRequestBody {
@@ -12,7 +12,13 @@ use workspace::Workspace;
use workspace::notifications::simple_message_notification::MessageNotification;
use workspace::notifications::{NotificationId, show_app_notification};
-actions!(auto_update, [ViewReleaseNotesLocally]);
+actions!(
+ auto_update,
+ [
+ /// Opens the release notes for the current version in a new tab.
+ ViewReleaseNotesLocally
+ ]
+);
pub fn init(cx: &mut App) {
notify_if_app_was_updated(cx);
@@ -25,5 +25,4 @@ serde.workspace = true
serde_json.workspace = true
strum.workspace = true
thiserror.workspace = true
-tokio = { workspace = true, features = ["rt", "rt-multi-thread"] }
workspace-hack.workspace = true
@@ -1,9 +1,6 @@
mod models;
-use std::collections::HashMap;
-use std::pin::Pin;
-
-use anyhow::{Context as _, Error, Result, anyhow};
+use anyhow::{Context, Error, Result, anyhow};
use aws_sdk_bedrockruntime as bedrock;
pub use aws_sdk_bedrockruntime as bedrock_client;
pub use aws_sdk_bedrockruntime::types::{
@@ -24,9 +21,10 @@ pub use bedrock::types::{
ToolResultContentBlock as BedrockToolResultContentBlock,
ToolResultStatus as BedrockToolResultStatus, ToolUseBlock as BedrockToolUseBlock,
};
-use futures::stream::{self, BoxStream, Stream};
+use futures::stream::{self, BoxStream};
use serde::{Deserialize, Serialize};
use serde_json::{Number, Value};
+use std::collections::HashMap;
use thiserror::Error;
pub use crate::models::*;
@@ -34,70 +32,59 @@ pub use crate::models::*;
pub async fn stream_completion(
client: bedrock::Client,
request: Request,
- handle: tokio::runtime::Handle,
) -> Result<BoxStream<'static, Result<BedrockStreamingResponse, BedrockError>>, Error> {
- handle
- .spawn(async move {
- let mut response = bedrock::Client::converse_stream(&client)
- .model_id(request.model.clone())
- .set_messages(request.messages.into());
+ let mut response = bedrock::Client::converse_stream(&client)
+ .model_id(request.model.clone())
+ .set_messages(request.messages.into());
- if let Some(Thinking::Enabled {
- budget_tokens: Some(budget_tokens),
- }) = request.thinking
- {
- response =
- response.additional_model_request_fields(Document::Object(HashMap::from([(
- "thinking".to_string(),
- Document::from(HashMap::from([
- ("type".to_string(), Document::String("enabled".to_string())),
- (
- "budget_tokens".to_string(),
- Document::Number(AwsNumber::PosInt(budget_tokens)),
- ),
- ])),
- )])));
- }
+ if let Some(Thinking::Enabled {
+ budget_tokens: Some(budget_tokens),
+ }) = request.thinking
+ {
+ let thinking_config = HashMap::from([
+ ("type".to_string(), Document::String("enabled".to_string())),
+ (
+ "budget_tokens".to_string(),
+ Document::Number(AwsNumber::PosInt(budget_tokens)),
+ ),
+ ]);
+ response = response.additional_model_request_fields(Document::Object(HashMap::from([(
+ "thinking".to_string(),
+ Document::from(thinking_config),
+ )])));
+ }
- if request.tools.is_some() && !request.tools.as_ref().unwrap().tools.is_empty() {
- response = response.set_tool_config(request.tools);
- }
+ if request
+ .tools
+ .as_ref()
+ .map_or(false, |t| !t.tools.is_empty())
+ {
+ response = response.set_tool_config(request.tools);
+ }
- let response = response.send().await;
+ let output = response
+ .send()
+ .await
+ .context("Failed to send API request to Bedrock");
- match response {
- Ok(output) => {
- let stream: Pin<
- Box<
- dyn Stream<Item = Result<BedrockStreamingResponse, BedrockError>>
- + Send,
- >,
- > = Box::pin(stream::unfold(output.stream, |mut stream| async move {
- match stream.recv().await {
- Ok(Some(output)) => Some(({ Ok(output) }, stream)),
- Ok(None) => None,
- Err(err) => {
- Some((
- // TODO: Figure out how we can capture Throttling Exceptions
- Err(BedrockError::ClientError(anyhow!(
- "{:?}",
- aws_sdk_bedrockruntime::error::DisplayErrorContext(err)
- ))),
- stream,
- ))
- }
- }
- }));
- Ok(stream)
- }
- Err(err) => Err(anyhow!(
- "{:?}",
- aws_sdk_bedrockruntime::error::DisplayErrorContext(err)
+ let stream = Box::pin(stream::unfold(
+ output?.stream,
+ move |mut stream| async move {
+ match stream.recv().await {
+ Ok(Some(output)) => Some((Ok(output), stream)),
+ Ok(None) => None,
+ Err(err) => Some((
+ Err(BedrockError::ClientError(anyhow!(
+ "{:?}",
+ aws_sdk_bedrockruntime::error::DisplayErrorContext(err)
+ ))),
+ stream,
)),
}
- })
- .await
- .context("spawning a task")?
+ },
+ ));
+
+ Ok(stream)
}
pub fn aws_document_to_value(document: &Document) -> Value {
@@ -29,7 +29,7 @@ client.workspace = true
collections.workspace = true
fs.workspace = true
futures.workspace = true
-gpui.workspace = true
+gpui = { workspace = true, features = ["screen-capture"] }
language.workspace = true
log.workspace = true
postage.workspace = true
@@ -12,7 +12,6 @@ pub struct CallSettings {
/// Configuration of voice calls in Zed.
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
-#[schemars(deny_unknown_fields)]
pub struct CallSettingsContent {
/// Whether the microphone should be muted when joining a channel or a call.
///
@@ -81,7 +81,17 @@ pub const INITIAL_RECONNECTION_DELAY: Duration = Duration::from_millis(500);
pub const MAX_RECONNECTION_DELAY: Duration = Duration::from_secs(10);
pub const CONNECTION_TIMEOUT: Duration = Duration::from_secs(20);
-actions!(client, [SignIn, SignOut, Reconnect]);
+actions!(
+ client,
+ [
+ /// Signs in to Zed account.
+ SignIn,
+ /// Signs out of Zed account.
+ SignOut,
+ /// Reconnects to the collaboration server.
+ Reconnect
+ ]
+);
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
pub struct ClientSettingsContent {
@@ -35,6 +35,7 @@ dashmap.workspace = true
derive_more.workspace = true
envy = "0.4.2"
futures.workspace = true
+gpui = { workspace = true, features = ["screen-capture"] }
hex.workspace = true
http_client.workspace = true
jsonwebtoken.workspace = true
@@ -107,7 +107,7 @@ CREATE INDEX "index_worktree_entries_on_project_id" ON "worktree_entries" ("proj
CREATE INDEX "index_worktree_entries_on_project_id_and_worktree_id" ON "worktree_entries" ("project_id", "worktree_id");
CREATE TABLE "project_repositories" (
- "project_id" INTEGER NOT NULL,
+ "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
"abs_path" VARCHAR,
"id" INTEGER NOT NULL,
"entry_ids" VARCHAR,
@@ -124,7 +124,7 @@ CREATE TABLE "project_repositories" (
CREATE INDEX "index_project_repositories_on_project_id" ON "project_repositories" ("project_id");
CREATE TABLE "project_repository_statuses" (
- "project_id" INTEGER NOT NULL,
+ "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
"repository_id" INTEGER NOT NULL,
"repo_path" VARCHAR NOT NULL,
"status" INT8 NOT NULL,
@@ -0,0 +1,25 @@
+DELETE FROM project_repositories
+WHERE project_id NOT IN (SELECT id FROM projects);
+
+ALTER TABLE project_repositories
+ ADD CONSTRAINT fk_project_repositories_project_id
+ FOREIGN KEY (project_id)
+ REFERENCES projects (id)
+ ON DELETE CASCADE
+ NOT VALID;
+
+ALTER TABLE project_repositories
+ VALIDATE CONSTRAINT fk_project_repositories_project_id;
+
+DELETE FROM project_repository_statuses
+WHERE project_id NOT IN (SELECT id FROM projects);
+
+ALTER TABLE project_repository_statuses
+ ADD CONSTRAINT fk_project_repository_statuses_project_id
+ FOREIGN KEY (project_id)
+ REFERENCES projects (id)
+ ON DELETE CASCADE
+ NOT VALID;
+
+ALTER TABLE project_repository_statuses
+ VALIDATE CONSTRAINT fk_project_repository_statuses_project_id;
@@ -1404,6 +1404,9 @@ async fn sync_model_request_usage_with_stripe(
llm_db: &Arc<LlmDatabase>,
stripe_billing: &Arc<StripeBilling>,
) -> anyhow::Result<()> {
+ log::info!("Stripe usage sync: Starting");
+ let started_at = Utc::now();
+
let staff_users = app.db.get_staff_users().await?;
let staff_user_ids = staff_users
.iter()
@@ -1448,6 +1451,10 @@ async fn sync_model_request_usage_with_stripe(
.find_price_by_lookup_key("claude-3-7-sonnet-requests-max")
.await?;
+ let usage_meter_count = usage_meters.len();
+
+ log::info!("Stripe usage sync: Syncing {usage_meter_count} usage meters");
+
for (usage_meter, usage) in usage_meters {
maybe!(async {
let Some((billing_customer, billing_subscription)) =
@@ -1504,5 +1511,10 @@ async fn sync_model_request_usage_with_stripe(
.log_err();
}
+ log::info!(
+ "Stripe usage sync: Synced {usage_meter_count} usage meters in {:?}",
+ Utc::now() - started_at
+ );
+
Ok(())
}
@@ -4,20 +4,19 @@ mod tables;
#[cfg(test)]
pub mod tests;
-use crate::{Error, Result, executor::Executor};
+use crate::{Error, Result};
use anyhow::{Context as _, anyhow};
use collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use dashmap::DashMap;
use futures::StreamExt;
use project_repository_statuses::StatusKind;
-use rand::{Rng, SeedableRng, prelude::StdRng};
use rpc::ExtensionProvides;
use rpc::{
ConnectionId, ExtensionMetadata,
proto::{self},
};
use sea_orm::{
- ActiveValue, Condition, ConnectionTrait, DatabaseConnection, DatabaseTransaction, DbErr,
+ ActiveValue, Condition, ConnectionTrait, DatabaseConnection, DatabaseTransaction,
FromQueryResult, IntoActiveModel, IsolationLevel, JoinType, QueryOrder, QuerySelect, Statement,
TransactionTrait,
entity::prelude::*,
@@ -33,7 +32,6 @@ use std::{
ops::{Deref, DerefMut},
rc::Rc,
sync::Arc,
- time::Duration,
};
use time::PrimitiveDateTime;
use tokio::sync::{Mutex, OwnedMutexGuard};
@@ -58,6 +56,7 @@ pub use tables::*;
#[cfg(test)]
pub struct DatabaseTestOptions {
+ pub executor: gpui::BackgroundExecutor,
pub runtime: tokio::runtime::Runtime,
pub query_failure_probability: parking_lot::Mutex<f64>,
}
@@ -69,8 +68,6 @@ pub struct Database {
pool: DatabaseConnection,
rooms: DashMap<RoomId, Arc<Mutex<()>>>,
projects: DashMap<ProjectId, Arc<Mutex<()>>>,
- rng: Mutex<StdRng>,
- executor: Executor,
notification_kinds_by_id: HashMap<NotificationKindId, &'static str>,
notification_kinds_by_name: HashMap<String, NotificationKindId>,
#[cfg(test)]
@@ -81,17 +78,15 @@ pub struct Database {
// separate files in the `queries` folder.
impl Database {
/// Connects to the database with the given options
- pub async fn new(options: ConnectOptions, executor: Executor) -> Result<Self> {
+ pub async fn new(options: ConnectOptions) -> Result<Self> {
sqlx::any::install_default_drivers();
Ok(Self {
options: options.clone(),
pool: sea_orm::Database::connect(options).await?,
rooms: DashMap::with_capacity(16384),
projects: DashMap::with_capacity(16384),
- rng: Mutex::new(StdRng::seed_from_u64(0)),
notification_kinds_by_id: HashMap::default(),
notification_kinds_by_name: HashMap::default(),
- executor,
#[cfg(test)]
test_options: None,
})
@@ -107,48 +102,13 @@ impl Database {
self.projects.clear();
}
- /// Transaction runs things in a transaction. If you want to call other methods
- /// and pass the transaction around you need to reborrow the transaction at each
- /// call site with: `&*tx`.
pub async fn transaction<F, Fut, T>(&self, f: F) -> Result<T>
where
F: Send + Fn(TransactionHandle) -> Fut,
Fut: Send + Future<Output = Result<T>>,
{
let body = async {
- let mut i = 0;
- loop {
- let (tx, result) = self.with_transaction(&f).await?;
- match result {
- Ok(result) => match tx.commit().await.map_err(Into::into) {
- Ok(()) => return Ok(result),
- Err(error) => {
- if !self.retry_on_serialization_error(&error, i).await {
- return Err(error);
- }
- }
- },
- Err(error) => {
- tx.rollback().await?;
- if !self.retry_on_serialization_error(&error, i).await {
- return Err(error);
- }
- }
- }
- i += 1;
- }
- };
-
- self.run(body).await
- }
-
- pub async fn weak_transaction<F, Fut, T>(&self, f: F) -> Result<T>
- where
- F: Send + Fn(TransactionHandle) -> Fut,
- Fut: Send + Future<Output = Result<T>>,
- {
- let body = async {
- let (tx, result) = self.with_weak_transaction(&f).await?;
+ let (tx, result) = self.with_transaction(&f).await?;
match result {
Ok(result) => match tx.commit().await.map_err(Into::into) {
Ok(()) => Ok(result),
@@ -174,44 +134,28 @@ impl Database {
Fut: Send + Future<Output = Result<Option<(RoomId, T)>>>,
{
let body = async {
- let mut i = 0;
- loop {
- let (tx, result) = self.with_transaction(&f).await?;
- match result {
- Ok(Some((room_id, data))) => {
- let lock = self.rooms.entry(room_id).or_default().clone();
- let _guard = lock.lock_owned().await;
- match tx.commit().await.map_err(Into::into) {
- Ok(()) => {
- return Ok(Some(TransactionGuard {
- data,
- _guard,
- _not_send: PhantomData,
- }));
- }
- Err(error) => {
- if !self.retry_on_serialization_error(&error, i).await {
- return Err(error);
- }
- }
- }
- }
- Ok(None) => match tx.commit().await.map_err(Into::into) {
- Ok(()) => return Ok(None),
- Err(error) => {
- if !self.retry_on_serialization_error(&error, i).await {
- return Err(error);
- }
- }
- },
- Err(error) => {
- tx.rollback().await?;
- if !self.retry_on_serialization_error(&error, i).await {
- return Err(error);
- }
+ let (tx, result) = self.with_transaction(&f).await?;
+ match result {
+ Ok(Some((room_id, data))) => {
+ let lock = self.rooms.entry(room_id).or_default().clone();
+ let _guard = lock.lock_owned().await;
+ match tx.commit().await.map_err(Into::into) {
+ Ok(()) => Ok(Some(TransactionGuard {
+ data,
+ _guard,
+ _not_send: PhantomData,
+ })),
+ Err(error) => Err(error),
}
}
- i += 1;
+ Ok(None) => match tx.commit().await.map_err(Into::into) {
+ Ok(()) => Ok(None),
+ Err(error) => Err(error),
+ },
+ Err(error) => {
+ tx.rollback().await?;
+ Err(error)
+ }
}
};
@@ -229,38 +173,26 @@ impl Database {
{
let room_id = Database::room_id_for_project(self, project_id).await?;
let body = async {
- let mut i = 0;
- loop {
- let lock = if let Some(room_id) = room_id {
- self.rooms.entry(room_id).or_default().clone()
- } else {
- self.projects.entry(project_id).or_default().clone()
- };
- let _guard = lock.lock_owned().await;
- let (tx, result) = self.with_transaction(&f).await?;
- match result {
- Ok(data) => match tx.commit().await.map_err(Into::into) {
- Ok(()) => {
- return Ok(TransactionGuard {
- data,
- _guard,
- _not_send: PhantomData,
- });
- }
- Err(error) => {
- if !self.retry_on_serialization_error(&error, i).await {
- return Err(error);
- }
- }
- },
- Err(error) => {
- tx.rollback().await?;
- if !self.retry_on_serialization_error(&error, i).await {
- return Err(error);
- }
- }
+ let lock = if let Some(room_id) = room_id {
+ self.rooms.entry(room_id).or_default().clone()
+ } else {
+ self.projects.entry(project_id).or_default().clone()
+ };
+ let _guard = lock.lock_owned().await;
+ let (tx, result) = self.with_transaction(&f).await?;
+ match result {
+ Ok(data) => match tx.commit().await.map_err(Into::into) {
+ Ok(()) => Ok(TransactionGuard {
+ data,
+ _guard,
+ _not_send: PhantomData,
+ }),
+ Err(error) => Err(error),
+ },
+ Err(error) => {
+ tx.rollback().await?;
+ Err(error)
}
- i += 1;
}
};
@@ -280,34 +212,22 @@ impl Database {
Fut: Send + Future<Output = Result<T>>,
{
let body = async {
- let mut i = 0;
- loop {
- let lock = self.rooms.entry(room_id).or_default().clone();
- let _guard = lock.lock_owned().await;
- let (tx, result) = self.with_transaction(&f).await?;
- match result {
- Ok(data) => match tx.commit().await.map_err(Into::into) {
- Ok(()) => {
- return Ok(TransactionGuard {
- data,
- _guard,
- _not_send: PhantomData,
- });
- }
- Err(error) => {
- if !self.retry_on_serialization_error(&error, i).await {
- return Err(error);
- }
- }
- },
- Err(error) => {
- tx.rollback().await?;
- if !self.retry_on_serialization_error(&error, i).await {
- return Err(error);
- }
- }
+ let lock = self.rooms.entry(room_id).or_default().clone();
+ let _guard = lock.lock_owned().await;
+ let (tx, result) = self.with_transaction(&f).await?;
+ match result {
+ Ok(data) => match tx.commit().await.map_err(Into::into) {
+ Ok(()) => Ok(TransactionGuard {
+ data,
+ _guard,
+ _not_send: PhantomData,
+ }),
+ Err(error) => Err(error),
+ },
+ Err(error) => {
+ tx.rollback().await?;
+ Err(error)
}
- i += 1;
}
};
@@ -315,28 +235,6 @@ impl Database {
}
async fn with_transaction<F, Fut, T>(&self, f: &F) -> Result<(DatabaseTransaction, Result<T>)>
- where
- F: Send + Fn(TransactionHandle) -> Fut,
- Fut: Send + Future<Output = Result<T>>,
- {
- let tx = self
- .pool
- .begin_with_config(Some(IsolationLevel::Serializable), None)
- .await?;
-
- let mut tx = Arc::new(Some(tx));
- let result = f(TransactionHandle(tx.clone())).await;
- let tx = Arc::get_mut(&mut tx)
- .and_then(|tx| tx.take())
- .context("couldn't complete transaction because it's still in use")?;
-
- Ok((tx, result))
- }
-
- async fn with_weak_transaction<F, Fut, T>(
- &self,
- f: &F,
- ) -> Result<(DatabaseTransaction, Result<T>)>
where
F: Send + Fn(TransactionHandle) -> Fut,
Fut: Send + Future<Output = Result<T>>,
@@ -361,13 +259,13 @@ impl Database {
{
#[cfg(test)]
{
+ use rand::prelude::*;
+
let test_options = self.test_options.as_ref().unwrap();
- if let Executor::Deterministic(executor) = &self.executor {
- executor.simulate_random_delay().await;
- let fail_probability = *test_options.query_failure_probability.lock();
- if executor.rng().gen_bool(fail_probability) {
- return Err(anyhow!("simulated query failure"))?;
- }
+ test_options.executor.simulate_random_delay().await;
+ let fail_probability = *test_options.query_failure_probability.lock();
+ if test_options.executor.rng().gen_bool(fail_probability) {
+ return Err(anyhow!("simulated query failure"))?;
}
test_options.runtime.block_on(future)
@@ -378,46 +276,6 @@ impl Database {
future.await
}
}
-
- async fn retry_on_serialization_error(&self, error: &Error, prev_attempt_count: usize) -> bool {
- // If the error is due to a failure to serialize concurrent transactions, then retry
- // this transaction after a delay. With each subsequent retry, double the delay duration.
- // Also vary the delay randomly in order to ensure different database connections retry
- // at different times.
- const SLEEPS: [f32; 10] = [10., 20., 40., 80., 160., 320., 640., 1280., 2560., 5120.];
- if is_serialization_error(error) && prev_attempt_count < SLEEPS.len() {
- let base_delay = SLEEPS[prev_attempt_count];
- let randomized_delay = base_delay * self.rng.lock().await.gen_range(0.5..=2.0);
- log::warn!(
- "retrying transaction after serialization error. delay: {} ms.",
- randomized_delay
- );
- self.executor
- .sleep(Duration::from_millis(randomized_delay as u64))
- .await;
- true
- } else {
- false
- }
- }
-}
-
-fn is_serialization_error(error: &Error) -> bool {
- const SERIALIZATION_FAILURE_CODE: &str = "40001";
- match error {
- Error::Database(
- DbErr::Exec(sea_orm::RuntimeErr::SqlxError(error))
- | DbErr::Query(sea_orm::RuntimeErr::SqlxError(error)),
- ) if error
- .as_database_error()
- .and_then(|error| error.code())
- .as_deref()
- == Some(SERIALIZATION_FAILURE_CODE) =>
- {
- true
- }
- _ => false,
- }
}
/// A handle to a [`DatabaseTransaction`].
@@ -20,7 +20,7 @@ impl Database {
&self,
params: &CreateBillingCustomerParams,
) -> Result<billing_customer::Model> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
let customer = billing_customer::Entity::insert(billing_customer::ActiveModel {
user_id: ActiveValue::set(params.user_id),
stripe_customer_id: ActiveValue::set(params.stripe_customer_id.clone()),
@@ -40,7 +40,7 @@ impl Database {
id: BillingCustomerId,
params: &UpdateBillingCustomerParams,
) -> Result<()> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
billing_customer::Entity::update(billing_customer::ActiveModel {
id: ActiveValue::set(id),
user_id: params.user_id.clone(),
@@ -61,7 +61,7 @@ impl Database {
&self,
id: BillingCustomerId,
) -> Result<Option<billing_customer::Model>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
Ok(billing_customer::Entity::find()
.filter(billing_customer::Column::Id.eq(id))
.one(&*tx)
@@ -75,7 +75,7 @@ impl Database {
&self,
user_id: UserId,
) -> Result<Option<billing_customer::Model>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
Ok(billing_customer::Entity::find()
.filter(billing_customer::Column::UserId.eq(user_id))
.one(&*tx)
@@ -89,7 +89,7 @@ impl Database {
&self,
stripe_customer_id: &str,
) -> Result<Option<billing_customer::Model>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
Ok(billing_customer::Entity::find()
.filter(billing_customer::Column::StripeCustomerId.eq(stripe_customer_id))
.one(&*tx)
@@ -22,7 +22,7 @@ impl Database {
&self,
user_id: UserId,
) -> Result<Option<billing_preference::Model>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
Ok(billing_preference::Entity::find()
.filter(billing_preference::Column::UserId.eq(user_id))
.one(&*tx)
@@ -37,7 +37,7 @@ impl Database {
user_id: UserId,
params: &CreateBillingPreferencesParams,
) -> Result<billing_preference::Model> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
let preferences = billing_preference::Entity::insert(billing_preference::ActiveModel {
user_id: ActiveValue::set(user_id),
max_monthly_llm_usage_spending_in_cents: ActiveValue::set(
@@ -65,7 +65,7 @@ impl Database {
user_id: UserId,
params: &UpdateBillingPreferencesParams,
) -> Result<billing_preference::Model> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
let preferences = billing_preference::Entity::update_many()
.set(billing_preference::ActiveModel {
max_monthly_llm_usage_spending_in_cents: params
@@ -35,7 +35,7 @@ impl Database {
&self,
params: &CreateBillingSubscriptionParams,
) -> Result<billing_subscription::Model> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
let id = billing_subscription::Entity::insert(billing_subscription::ActiveModel {
billing_customer_id: ActiveValue::set(params.billing_customer_id),
kind: ActiveValue::set(params.kind),
@@ -64,7 +64,7 @@ impl Database {
id: BillingSubscriptionId,
params: &UpdateBillingSubscriptionParams,
) -> Result<()> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
billing_subscription::Entity::update(billing_subscription::ActiveModel {
id: ActiveValue::set(id),
billing_customer_id: params.billing_customer_id.clone(),
@@ -90,7 +90,7 @@ impl Database {
&self,
id: BillingSubscriptionId,
) -> Result<Option<billing_subscription::Model>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
Ok(billing_subscription::Entity::find_by_id(id)
.one(&*tx)
.await?)
@@ -103,7 +103,7 @@ impl Database {
&self,
stripe_subscription_id: &str,
) -> Result<Option<billing_subscription::Model>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
Ok(billing_subscription::Entity::find()
.filter(
billing_subscription::Column::StripeSubscriptionId.eq(stripe_subscription_id),
@@ -118,7 +118,7 @@ impl Database {
&self,
user_id: UserId,
) -> Result<Option<billing_subscription::Model>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
Ok(billing_subscription::Entity::find()
.inner_join(billing_customer::Entity)
.filter(billing_customer::Column::UserId.eq(user_id))
@@ -152,7 +152,7 @@ impl Database {
&self,
user_id: UserId,
) -> Result<Vec<billing_subscription::Model>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
let subscriptions = billing_subscription::Entity::find()
.inner_join(billing_customer::Entity)
.filter(billing_customer::Column::UserId.eq(user_id))
@@ -169,7 +169,7 @@ impl Database {
&self,
user_ids: HashSet<UserId>,
) -> Result<HashMap<UserId, (billing_customer::Model, billing_subscription::Model)>> {
- self.weak_transaction(|tx| {
+ self.transaction(|tx| {
let user_ids = user_ids.clone();
async move {
let mut rows = billing_subscription::Entity::find()
@@ -201,7 +201,7 @@ impl Database {
&self,
user_ids: HashSet<UserId>,
) -> Result<HashMap<UserId, (billing_customer::Model, billing_subscription::Model)>> {
- self.weak_transaction(|tx| {
+ self.transaction(|tx| {
let user_ids = user_ids.clone();
async move {
let mut rows = billing_subscription::Entity::find()
@@ -236,7 +236,7 @@ impl Database {
/// Returns the count of the active billing subscriptions for the user with the specified ID.
pub async fn count_active_billing_subscriptions(&self, user_id: UserId) -> Result<usize> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
let count = billing_subscription::Entity::find()
.inner_join(billing_customer::Entity)
.filter(
@@ -501,10 +501,8 @@ impl Database {
/// Returns all channels for the user with the given ID.
pub async fn get_channels_for_user(&self, user_id: UserId) -> Result<ChannelsForUser> {
- self.weak_transaction(
- |tx| async move { self.get_user_channels(user_id, None, true, &tx).await },
- )
- .await
+ self.transaction(|tx| async move { self.get_user_channels(user_id, None, true, &tx).await })
+ .await
}
/// Returns all channels for the user with the given ID that are descendants
@@ -15,7 +15,7 @@ impl Database {
user_b_busy: bool,
}
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
let user_a_participant = Alias::new("user_a_participant");
let user_b_participant = Alias::new("user_b_participant");
let mut db_contacts = contact::Entity::find()
@@ -91,7 +91,7 @@ impl Database {
/// Returns whether the given user is a busy (on a call).
pub async fn is_user_busy(&self, user_id: UserId) -> Result<bool> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
let participant = room_participant::Entity::find()
.filter(room_participant::Column::UserId.eq(user_id))
.one(&*tx)
@@ -9,7 +9,7 @@ pub enum ContributorSelector {
impl Database {
/// Retrieves the GitHub logins of all users who have signed the CLA.
pub async fn get_contributors(&self) -> Result<Vec<String>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
enum QueryGithubLogin {
GithubLogin,
@@ -32,7 +32,7 @@ impl Database {
&self,
selector: &ContributorSelector,
) -> Result<Option<DateTime>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
let condition = match selector {
ContributorSelector::GitHubUserId { github_user_id } => {
user::Column::GithubUserId.eq(*github_user_id)
@@ -69,7 +69,7 @@ impl Database {
github_user_created_at: DateTimeUtc,
initial_channel_id: Option<ChannelId>,
) -> Result<()> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
let user = self
.update_or_create_user_by_github_account_tx(
github_login,
@@ -8,7 +8,7 @@ impl Database {
model: &str,
digests: &[Vec<u8>],
) -> Result<HashMap<Vec<u8>, Vec<f32>>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
let embeddings = {
let mut db_embeddings = embedding::Entity::find()
.filter(
@@ -52,7 +52,7 @@ impl Database {
model: &str,
embeddings: &HashMap<Vec<u8>, Vec<f32>>,
) -> Result<()> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
embedding::Entity::insert_many(embeddings.iter().map(|(digest, dimensions)| {
let now_offset_datetime = OffsetDateTime::now_utc();
let retrieved_at =
@@ -78,7 +78,7 @@ impl Database {
}
pub async fn purge_old_embeddings(&self) -> Result<()> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
embedding::Entity::delete_many()
.filter(
embedding::Column::RetrievedAt
@@ -15,7 +15,7 @@ impl Database {
max_schema_version: i32,
limit: usize,
) -> Result<Vec<ExtensionMetadata>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
let mut condition = Condition::all()
.add(
extension::Column::LatestVersion
@@ -43,7 +43,7 @@ impl Database {
ids: &[&str],
constraints: Option<&ExtensionVersionConstraints>,
) -> Result<Vec<ExtensionMetadata>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
let extensions = extension::Entity::find()
.filter(extension::Column::ExternalId.is_in(ids.iter().copied()))
.all(&*tx)
@@ -123,7 +123,7 @@ impl Database {
&self,
extension_id: &str,
) -> Result<Vec<ExtensionMetadata>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
let condition = extension::Column::ExternalId
.eq(extension_id)
.into_condition();
@@ -162,7 +162,7 @@ impl Database {
extension_id: &str,
constraints: Option<&ExtensionVersionConstraints>,
) -> Result<Option<ExtensionMetadata>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
let extension = extension::Entity::find()
.filter(extension::Column::ExternalId.eq(extension_id))
.one(&*tx)
@@ -187,7 +187,7 @@ impl Database {
extension_id: &str,
version: &str,
) -> Result<Option<ExtensionMetadata>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
let extension = extension::Entity::find()
.filter(extension::Column::ExternalId.eq(extension_id))
.filter(extension_version::Column::Version.eq(version))
@@ -204,7 +204,7 @@ impl Database {
}
pub async fn get_known_extension_versions(&self) -> Result<HashMap<String, Vec<String>>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
let mut extension_external_ids_by_id = HashMap::default();
let mut rows = extension::Entity::find().stream(&*tx).await?;
@@ -242,7 +242,7 @@ impl Database {
&self,
versions_by_extension_id: &HashMap<&str, Vec<NewExtensionVersion>>,
) -> Result<()> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
for (external_id, versions) in versions_by_extension_id {
if versions.is_empty() {
continue;
@@ -349,7 +349,7 @@ impl Database {
}
pub async fn record_extension_download(&self, extension: &str, version: &str) -> Result<bool> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
enum QueryId {
Id,
@@ -13,7 +13,7 @@ impl Database {
&self,
params: &CreateProcessedStripeEventParams,
) -> Result<()> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
processed_stripe_event::Entity::insert(processed_stripe_event::ActiveModel {
stripe_event_id: ActiveValue::set(params.stripe_event_id.clone()),
stripe_event_type: ActiveValue::set(params.stripe_event_type.clone()),
@@ -35,7 +35,7 @@ impl Database {
&self,
event_id: &str,
) -> Result<Option<processed_stripe_event::Model>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
Ok(processed_stripe_event::Entity::find_by_id(event_id)
.one(&*tx)
.await?)
@@ -48,7 +48,7 @@ impl Database {
&self,
event_ids: &[&str],
) -> Result<Vec<processed_stripe_event::Model>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
Ok(processed_stripe_event::Entity::find()
.filter(
processed_stripe_event::Column::StripeEventId.is_in(event_ids.iter().copied()),
@@ -112,7 +112,7 @@ impl Database {
}
pub async fn delete_project(&self, project_id: ProjectId) -> Result<()> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
project::Entity::delete_by_id(project_id).exec(&*tx).await?;
Ok(())
})
@@ -80,7 +80,7 @@ impl Database {
&self,
user_id: UserId,
) -> Result<Option<proto::IncomingCall>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
let pending_participant = room_participant::Entity::find()
.filter(
room_participant::Column::UserId
@@ -142,6 +142,50 @@ impl Database {
}
}
+ loop {
+ let delete_query = Query::delete()
+ .from_table(project_repository_statuses::Entity)
+ .and_where(
+ Expr::tuple([Expr::col((
+ project_repository_statuses::Entity,
+ project_repository_statuses::Column::ProjectId,
+ ))
+ .into()])
+ .in_subquery(
+ Query::select()
+ .columns([(
+ project_repository_statuses::Entity,
+ project_repository_statuses::Column::ProjectId,
+ )])
+ .from(project_repository_statuses::Entity)
+ .inner_join(
+ project::Entity,
+ Expr::col((project::Entity, project::Column::Id)).equals((
+ project_repository_statuses::Entity,
+ project_repository_statuses::Column::ProjectId,
+ )),
+ )
+ .and_where(project::Column::HostConnectionServerId.ne(server_id))
+ .limit(10000)
+ .to_owned(),
+ ),
+ )
+ .to_owned();
+
+ let statement = Statement::from_sql_and_values(
+ tx.get_database_backend(),
+ delete_query
+ .to_string(sea_orm::sea_query::PostgresQueryBuilder)
+ .as_str(),
+ vec![],
+ );
+
+ let result = tx.execute(statement).await?;
+ if result.rows_affected() == 0 {
+ break;
+ }
+ }
+
Ok(())
})
.await
@@ -382,7 +382,7 @@ impl Database {
/// Returns the active flags for the user.
pub async fn get_user_flags(&self, user: UserId) -> Result<Vec<String>> {
- self.weak_transaction(|tx| async move {
+ self.transaction(|tx| async move {
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
enum QueryAs {
Flag,
@@ -17,11 +17,15 @@ use crate::migrations::run_database_migrations;
use super::*;
use gpui::BackgroundExecutor;
use parking_lot::Mutex;
+use rand::prelude::*;
use sea_orm::ConnectionTrait;
use sqlx::migrate::MigrateDatabase;
-use std::sync::{
- Arc,
- atomic::{AtomicI32, AtomicU32, Ordering::SeqCst},
+use std::{
+ sync::{
+ Arc,
+ atomic::{AtomicI32, AtomicU32, Ordering::SeqCst},
+ },
+ time::Duration,
};
pub struct TestDb {
@@ -41,9 +45,7 @@ impl TestDb {
let mut db = runtime.block_on(async {
let mut options = ConnectOptions::new(url);
options.max_connections(5);
- let mut db = Database::new(options, Executor::Deterministic(executor.clone()))
- .await
- .unwrap();
+ let mut db = Database::new(options).await.unwrap();
let sql = include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/migrations.sqlite/20221109000000_test_schema.sql"
@@ -60,6 +62,7 @@ impl TestDb {
});
db.test_options = Some(DatabaseTestOptions {
+ executor,
runtime,
query_failure_probability: parking_lot::Mutex::new(0.0),
});
@@ -93,9 +96,7 @@ impl TestDb {
options
.max_connections(5)
.idle_timeout(Duration::from_secs(0));
- let mut db = Database::new(options, Executor::Deterministic(executor.clone()))
- .await
- .unwrap();
+ let mut db = Database::new(options).await.unwrap();
let migrations_path = concat!(env!("CARGO_MANIFEST_DIR"), "/migrations");
run_database_migrations(db.options(), migrations_path)
.await
@@ -105,6 +106,7 @@ impl TestDb {
});
db.test_options = Some(DatabaseTestOptions {
+ executor,
runtime,
query_failure_probability: parking_lot::Mutex::new(0.0),
});
@@ -49,7 +49,7 @@ async fn test_purge_old_embeddings(cx: &mut gpui::TestAppContext) {
db.save_embeddings(model, &embeddings).await.unwrap();
// Reach into the DB and change the retrieved at to be > 60 days
- db.weak_transaction(|tx| {
+ db.transaction(|tx| {
let digest = digest.clone();
async move {
let sixty_days_ago = OffsetDateTime::now_utc().sub(Duration::days(61));
@@ -285,7 +285,7 @@ impl AppState {
pub async fn new(config: Config, executor: Executor) -> Result<Arc<Self>> {
let mut db_options = db::ConnectOptions::new(config.database_url.clone());
db_options.max_connections(config.database_max_connections);
- let mut db = Database::new(db_options, Executor::Production).await?;
+ let mut db = Database::new(db_options).await?;
db.initialize_notification_kinds().await?;
let llm_db = if let Some((llm_database_url, llm_database_max_connections)) = config
@@ -59,7 +59,7 @@ async fn main() -> Result<()> {
let config = envy::from_env::<Config>().expect("error loading config");
let db_options = db::ConnectOptions::new(config.database_url.clone());
- let mut db = Database::new(db_options, Executor::Production).await?;
+ let mut db = Database::new(db_options).await?;
db.initialize_notification_kinds().await?;
collab::seed::seed(&config, &db, false).await?;
@@ -253,7 +253,7 @@ async fn main() -> Result<()> {
async fn setup_app_database(config: &Config) -> Result<()> {
let db_options = db::ConnectOptions::new(config.database_url.clone());
- let mut db = Database::new(db_options, Executor::Production).await?;
+ let mut db = Database::new(db_options).await?;
let migrations_path = config.migrations_path.as_deref().unwrap_or_else(|| {
#[cfg(feature = "sqlite")]
@@ -4591,14 +4591,13 @@ async fn test_formatting_buffer(
cx_a.update(|cx| {
SettingsStore::update_global(cx, |store, cx| {
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
- file.defaults.formatter = Some(SelectedFormatter::List(FormatterList(
- vec![Formatter::External {
+ file.defaults.formatter = Some(SelectedFormatter::List(FormatterList::Single(
+ Formatter::External {
command: "awk".into(),
arguments: Some(
vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into(),
),
- }]
- .into(),
+ },
)));
});
});
@@ -4699,8 +4698,8 @@ async fn test_prettier_formatting_buffer(
cx_b.update(|cx| {
SettingsStore::update_global(cx, |store, cx| {
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
- file.defaults.formatter = Some(SelectedFormatter::List(FormatterList(
- vec![Formatter::LanguageServer { name: None }].into(),
+ file.defaults.formatter = Some(SelectedFormatter::List(FormatterList::Single(
+ Formatter::LanguageServer { name: None },
)));
file.defaults.prettier = Some(PrettierSettings {
allowed: true,
@@ -4822,7 +4821,7 @@ async fn test_definition(
);
let definitions_1 = project_b
- .update(cx_b, |p, cx| p.definition(&buffer_b, 23, cx))
+ .update(cx_b, |p, cx| p.definitions(&buffer_b, 23, cx))
.await
.unwrap();
cx_b.read(|cx| {
@@ -4853,7 +4852,7 @@ async fn test_definition(
);
let definitions_2 = project_b
- .update(cx_b, |p, cx| p.definition(&buffer_b, 33, cx))
+ .update(cx_b, |p, cx| p.definitions(&buffer_b, 33, cx))
.await
.unwrap();
cx_b.read(|cx| {
@@ -4890,7 +4889,7 @@ async fn test_definition(
);
let type_definitions = project_b
- .update(cx_b, |p, cx| p.type_definition(&buffer_b, 7, cx))
+ .update(cx_b, |p, cx| p.type_definitions(&buffer_b, 7, cx))
.await
.unwrap();
cx_b.read(|cx| {
@@ -5058,7 +5057,7 @@ async fn test_references(
lsp_response_tx
.unbounded_send(Err(anyhow!("can't find references")))
.unwrap();
- references.await.unwrap_err();
+ assert_eq!(references.await.unwrap(), []);
// User is informed that the request is no longer pending.
executor.run_until_parked();
@@ -5642,7 +5641,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it(
let definitions;
let buffer_b2;
if rng.r#gen() {
- definitions = project_b.update(cx_b, |p, cx| p.definition(&buffer_b1, 23, cx));
+ definitions = project_b.update(cx_b, |p, cx| p.definitions(&buffer_b1, 23, cx));
(buffer_b2, _) = project_b
.update(cx_b, |p, cx| {
p.open_buffer_with_lsp((worktree_id, "b.rs"), cx)
@@ -5656,7 +5655,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it(
})
.await
.unwrap();
- definitions = project_b.update(cx_b, |p, cx| p.definition(&buffer_b1, 23, cx));
+ definitions = project_b.update(cx_b, |p, cx| p.definitions(&buffer_b1, 23, cx));
}
let definitions = definitions.await.unwrap();
@@ -838,7 +838,7 @@ impl RandomizedTest for ProjectCollaborationTest {
.map(|_| Ok(()))
.boxed(),
LspRequestKind::Definition => project
- .definition(&buffer, offset, cx)
+ .definitions(&buffer, offset, cx)
.map_ok(|_| ())
.boxed(),
LspRequestKind::Highlights => project
@@ -505,8 +505,8 @@ async fn test_ssh_collaboration_formatting_with_prettier(
cx_b.update(|cx| {
SettingsStore::update_global(cx, |store, cx| {
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
- file.defaults.formatter = Some(SelectedFormatter::List(FormatterList(
- vec![Formatter::LanguageServer { name: None }].into(),
+ file.defaults.formatter = Some(SelectedFormatter::List(FormatterList::Single(
+ Formatter::LanguageServer { name: None },
)));
file.defaults.prettier = Some(PrettierSettings {
allowed: true,
@@ -30,7 +30,13 @@ use workspace::{
};
use workspace::{item::Dedup, notifications::NotificationId};
-actions!(collab, [CopyLink]);
+actions!(
+ collab,
+ [
+ /// Copies a link to the current position in the channel buffer.
+ CopyLink
+ ]
+);
pub fn init(cx: &mut App) {
workspace::FollowableViewRegistry::register::<ChannelView>(cx)
@@ -71,7 +71,13 @@ struct SerializedChatPanel {
width: Option<Pixels>,
}
-actions!(chat_panel, [ToggleFocus]);
+actions!(
+ chat_panel,
+ [
+ /// Toggles focus on the chat panel.
+ ToggleFocus
+ ]
+);
impl ChatPanel {
pub fn new(
@@ -44,15 +44,25 @@ use workspace::{
actions!(
collab_panel,
[
+ /// Toggles focus on the collaboration panel.
ToggleFocus,
+ /// Removes the selected channel or contact.
Remove,
+ /// Opens the context menu for the selected item.
Secondary,
+ /// Collapses the selected channel in the tree view.
CollapseSelectedChannel,
+ /// Expands the selected channel in the tree view.
ExpandSelectedChannel,
+ /// Starts moving a channel to a new location.
StartMoveChannel,
+ /// Moves the selected item to the current location.
MoveSelected,
+ /// Inserts a space character in the filter input.
InsertSpace,
+ /// Moves the selected channel up in the list.
MoveChannelUp,
+ /// Moves the selected channel down in the list.
MoveChannelDown,
]
);
@@ -17,9 +17,13 @@ use workspace::{ModalView, notifications::DetachAndPromptErr};
actions!(
channel_modal,
[
+ /// Selects the next control in the channel modal.
SelectNextControl,
+ /// Toggles between invite members and manage members mode.
ToggleMode,
+ /// Toggles admin status for the selected member.
ToggleMemberAdmin,
+ /// Removes the selected member from the channel.
RemoveMember
]
);
@@ -74,7 +74,13 @@ pub struct NotificationPresenter {
pub can_navigate: bool,
}
-actions!(notification_panel, [ToggleFocus]);
+actions!(
+ notification_panel,
+ [
+ /// Toggles focus on the notification panel.
+ ToggleFocus
+ ]
+);
pub fn init(cx: &mut App) {
cx.observe_new(|workspace: &mut Workspace, _, _| {
@@ -28,7 +28,6 @@ pub struct ChatPanelSettings {
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
-#[schemars(deny_unknown_fields)]
pub struct ChatPanelSettingsContent {
/// When to show the panel button in the status bar.
///
@@ -52,7 +51,6 @@ pub struct NotificationPanelSettings {
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
-#[schemars(deny_unknown_fields)]
pub struct PanelSettingsContent {
/// Whether to show the panel button in the status bar.
///
@@ -69,7 +67,6 @@ pub struct PanelSettingsContent {
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
-#[schemars(deny_unknown_fields)]
pub struct MessageEditorSettings {
/// Whether to automatically replace emoji shortcodes with emoji characters.
/// For example: typing `:wave:` gets replaced with `👋`.
@@ -41,7 +41,7 @@ pub struct CommandPalette {
/// Removes subsequent whitespace characters and double colons from the query.
///
/// This improves the likelihood of a match by either humanized name or keymap-style name.
-fn normalize_query(input: &str) -> String {
+pub fn normalize_action_query(input: &str) -> String {
let mut result = String::with_capacity(input.len());
let mut last_char = None;
@@ -297,7 +297,7 @@ impl PickerDelegate for CommandPaletteDelegate {
let mut commands = self.all_commands.clone();
let hit_counts = self.hit_counts();
let executor = cx.background_executor().clone();
- let query = normalize_query(query.as_str());
+ let query = normalize_action_query(query.as_str());
async move {
commands.sort_by_key(|action| {
(
@@ -311,29 +311,17 @@ impl PickerDelegate for CommandPaletteDelegate {
.enumerate()
.map(|(ix, command)| StringMatchCandidate::new(ix, &command.name))
.collect::<Vec<_>>();
- let matches = if query.is_empty() {
- candidates
- .into_iter()
- .enumerate()
- .map(|(index, candidate)| StringMatch {
- candidate_id: index,
- string: candidate.string,
- positions: Vec::new(),
- score: 0.0,
- })
- .collect()
- } else {
- fuzzy::match_strings(
- &candidates,
- &query,
- true,
- true,
- 10000,
- &Default::default(),
- executor,
- )
- .await
- };
+
+ let matches = fuzzy::match_strings(
+ &candidates,
+ &query,
+ true,
+ true,
+ 10000,
+ &Default::default(),
+ executor,
+ )
+ .await;
tx.send((commands, matches)).await.log_err();
}
@@ -422,8 +410,8 @@ impl PickerDelegate for CommandPaletteDelegate {
window: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> Option<Self::ListItem> {
- let r#match = self.matches.get(ix)?;
- let command = self.commands.get(r#match.candidate_id)?;
+ let matching_command = self.matches.get(ix)?;
+ let command = self.commands.get(matching_command.candidate_id)?;
Some(
ListItem::new(ix)
.inset(true)
@@ -436,7 +424,7 @@ impl PickerDelegate for CommandPaletteDelegate {
.justify_between()
.child(HighlightedLabel::new(
command.name.clone(),
- r#match.positions.clone(),
+ matching_command.positions.clone(),
))
.children(KeyBinding::for_action_in(
&*command.action,
@@ -512,19 +500,28 @@ mod tests {
#[test]
fn test_normalize_query() {
- assert_eq!(normalize_query("editor: backspace"), "editor: backspace");
- assert_eq!(normalize_query("editor: backspace"), "editor: backspace");
- assert_eq!(normalize_query("editor: backspace"), "editor: backspace");
assert_eq!(
- normalize_query("editor::GoToDefinition"),
+ normalize_action_query("editor: backspace"),
+ "editor: backspace"
+ );
+ assert_eq!(
+ normalize_action_query("editor: backspace"),
+ "editor: backspace"
+ );
+ assert_eq!(
+ normalize_action_query("editor: backspace"),
+ "editor: backspace"
+ );
+ assert_eq!(
+ normalize_action_query("editor::GoToDefinition"),
"editor:GoToDefinition"
);
assert_eq!(
- normalize_query("editor::::GoToDefinition"),
+ normalize_action_query("editor::::GoToDefinition"),
"editor:GoToDefinition"
);
assert_eq!(
- normalize_query("editor: :GoToDefinition"),
+ normalize_action_query("editor: :GoToDefinition"),
"editor: :GoToDefinition"
);
}
@@ -61,7 +61,7 @@ impl RenderOnce for ComponentExample {
12.0,
12.0,
))
- .shadow_sm()
+ .shadow_xs()
.child(self.element),
)
.into_any_element()
@@ -46,11 +46,17 @@ pub use crate::sign_in::{CopilotCodeVerification, initiate_sign_in, reinstall_an
actions!(
copilot,
[
+ /// Requests a code completion suggestion from Copilot.
Suggest,
+ /// Cycles to the next Copilot suggestion.
NextSuggestion,
+ /// Cycles to the previous Copilot suggestion.
PreviousSuggestion,
+ /// Reinstalls the Copilot language server.
Reinstall,
+ /// Signs in to GitHub Copilot.
SignIn,
+ /// Signs out of GitHub Copilot.
SignOut
]
);
@@ -528,6 +528,7 @@ impl CopilotChat {
pub async fn stream_completion(
request: Request,
+ is_user_initiated: bool,
mut cx: AsyncApp,
) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
let this = cx
@@ -562,7 +563,14 @@ impl CopilotChat {
};
let api_url = configuration.api_url_from_endpoint(&token.api_endpoint);
- stream_completion(client.clone(), token.api_key, api_url.into(), request).await
+ stream_completion(
+ client.clone(),
+ token.api_key,
+ api_url.into(),
+ request,
+ is_user_initiated,
+ )
+ .await
}
pub fn set_configuration(
@@ -697,6 +705,7 @@ async fn stream_completion(
api_key: String,
completion_url: Arc<str>,
request: Request,
+ is_user_initiated: bool,
) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
let is_vision_request = request.messages.iter().any(|message| match message {
ChatMessage::User { content }
@@ -707,6 +716,8 @@ async fn stream_completion(
_ => false,
});
+ let request_initiator = if is_user_initiated { "user" } else { "agent" };
+
let mut request_builder = HttpRequest::builder()
.method(Method::POST)
.uri(completion_url.as_ref())
@@ -719,7 +730,8 @@ async fn stream_completion(
)
.header("Authorization", format!("Bearer {}", api_key))
.header("Content-Type", "application/json")
- .header("Copilot-Integration-Id", "vscode-chat");
+ .header("Copilot-Integration-Id", "vscode-chat")
+ .header("X-Initiator", request_initiator);
if is_vision_request {
request_builder =
@@ -10,6 +10,7 @@ use gpui::{AsyncApp, SharedString};
pub use http_client::{HttpClient, github::latest_github_release};
use language::{LanguageName, LanguageToolchainStore};
use node_runtime::NodeRuntime;
+use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::WorktreeId;
use smol::fs::File;
@@ -47,7 +48,10 @@ pub trait DapDelegate: Send + Sync + 'static {
async fn shell_env(&self) -> collections::HashMap<String, String>;
}
-#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
+#[derive(
+ Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize, JsonSchema,
+)]
+#[serde(transparent)]
pub struct DebugAdapterName(pub SharedString);
impl Deref for DebugAdapterName {
@@ -25,7 +25,9 @@ anyhow.workspace = true
async-trait.workspace = true
collections.workspace = true
dap.workspace = true
+dotenvy.workspace = true
futures.workspace = true
+fs.workspace = true
gpui.workspace = true
json_dotpath.workspace = true
language.workspace = true
@@ -22,17 +22,16 @@ impl CodeLldbDebugAdapter {
async fn request_args(
&self,
delegate: &Arc<dyn DapDelegate>,
- task_definition: &DebugTaskDefinition,
+ mut configuration: Value,
+ label: &str,
) -> Result<dap::StartDebuggingRequestArguments> {
- // CodeLLDB uses `name` for a terminal label.
- let mut configuration = task_definition.config.clone();
-
let obj = configuration
.as_object_mut()
.context("CodeLLDB is not a valid json object")?;
+ // CodeLLDB uses `name` for a terminal label.
obj.entry("name")
- .or_insert(Value::String(String::from(task_definition.label.as_ref())));
+ .or_insert(Value::String(String::from(label)));
obj.entry("cwd")
.or_insert(delegate.worktree_root_path().to_string_lossy().into());
@@ -361,17 +360,31 @@ impl DebugAdapter for CodeLldbDebugAdapter {
self.path_to_codelldb.set(path.clone()).ok();
command = Some(path);
};
-
+ let mut json_config = config.config.clone();
Ok(DebugAdapterBinary {
command: Some(command.unwrap()),
cwd: Some(delegate.worktree_root_path().to_path_buf()),
arguments: user_args.unwrap_or_else(|| {
- vec![
- "--settings".into(),
- json!({"sourceLanguages": ["cpp", "rust"]}).to_string(),
- ]
+ if let Some(config) = json_config.as_object_mut()
+ && let Some(source_languages) = config.get("sourceLanguages").filter(|value| {
+ value
+ .as_array()
+ .map_or(false, |array| array.iter().all(Value::is_string))
+ })
+ {
+ let ret = vec![
+ "--settings".into(),
+ json!({"sourceLanguages": source_languages}).to_string(),
+ ];
+ config.remove("sourceLanguages");
+ ret
+ } else {
+ vec![]
+ }
}),
- request_args: self.request_args(delegate, &config).await?,
+ request_args: self
+ .request_args(delegate, json_config, &config.label)
+ .await?,
envs: HashMap::default(),
connection: None,
})
@@ -4,7 +4,6 @@ mod go;
mod javascript;
mod php;
mod python;
-mod ruby;
use std::sync::Arc;
@@ -25,7 +24,6 @@ use gpui::{App, BorrowAppContext};
use javascript::JsDebugAdapter;
use php::PhpDebugAdapter;
use python::PythonDebugAdapter;
-use ruby::RubyDebugAdapter;
use serde_json::json;
use task::{DebugScenario, ZedDebugConfig};
@@ -35,7 +33,6 @@ pub fn init(cx: &mut App) {
registry.add_adapter(Arc::from(PythonDebugAdapter::default()));
registry.add_adapter(Arc::from(PhpDebugAdapter::default()));
registry.add_adapter(Arc::from(JsDebugAdapter::default()));
- registry.add_adapter(Arc::from(RubyDebugAdapter));
registry.add_adapter(Arc::from(GoDebugAdapter::default()));
registry.add_adapter(Arc::from(GdbDebugAdapter));
@@ -7,13 +7,22 @@ use dap::{
latest_github_release,
},
};
-
+use fs::Fs;
use gpui::{AsyncApp, SharedString};
use language::LanguageName;
-use std::{env::consts, ffi::OsStr, path::PathBuf, sync::OnceLock};
+use log::warn;
+use serde_json::{Map, Value};
use task::TcpArgumentsTemplate;
use util;
+use std::{
+ env::consts,
+ ffi::OsStr,
+ path::{Path, PathBuf},
+ str::FromStr,
+ sync::OnceLock,
+};
+
use crate::*;
#[derive(Default, Debug)]
@@ -437,22 +446,34 @@ impl DebugAdapter for GoDebugAdapter {
adapter_path.join("dlv").to_string_lossy().to_string()
};
- let cwd = task_definition
- .config
- .get("cwd")
- .and_then(|s| s.as_str())
- .map(PathBuf::from)
- .unwrap_or_else(|| delegate.worktree_root_path().to_path_buf());
+ let cwd = Some(
+ task_definition
+ .config
+ .get("cwd")
+ .and_then(|s| s.as_str())
+ .map(PathBuf::from)
+ .unwrap_or_else(|| delegate.worktree_root_path().to_path_buf()),
+ );
let arguments;
let command;
let connection;
let mut configuration = task_definition.config.clone();
+ let mut envs = HashMap::default();
+
if let Some(configuration) = configuration.as_object_mut() {
configuration
.entry("cwd")
.or_insert_with(|| delegate.worktree_root_path().to_string_lossy().into());
+
+ handle_envs(
+ configuration,
+ &mut envs,
+ cwd.as_deref(),
+ delegate.fs().clone(),
+ )
+ .await;
}
if let Some(connection_options) = &task_definition.tcp_connection {
@@ -494,8 +515,8 @@ impl DebugAdapter for GoDebugAdapter {
Ok(DebugAdapterBinary {
command,
arguments,
- cwd: Some(cwd),
- envs: HashMap::default(),
+ cwd,
+ envs,
connection,
request_args: StartDebuggingRequestArguments {
configuration,
@@ -504,3 +525,44 @@ impl DebugAdapter for GoDebugAdapter {
})
}
}
+
+// delve doesn't do anything with the envFile setting, so we intercept it
+async fn handle_envs(
+ config: &mut Map<String, Value>,
+ envs: &mut HashMap<String, String>,
+ cwd: Option<&Path>,
+ fs: Arc<dyn Fs>,
+) -> Option<()> {
+ let env_files = match config.get("envFile")? {
+ Value::Array(arr) => arr.iter().map(|v| v.as_str()).collect::<Vec<_>>(),
+ Value::String(s) => vec![Some(s.as_str())],
+ _ => return None,
+ };
+
+ let rebase_path = |path: PathBuf| {
+ if path.is_absolute() {
+ Some(path)
+ } else {
+ cwd.map(|p| p.join(path))
+ }
+ };
+
+ for path in env_files {
+ let Some(path) = path
+ .and_then(|s| PathBuf::from_str(s).ok())
+ .and_then(rebase_path)
+ else {
+ continue;
+ };
+
+ if let Ok(file) = fs.open_sync(&path).await {
+ envs.extend(dotenvy::from_read_iter(file).filter_map(Result::ok))
+ } else {
+ warn!("While starting Go debug session: failed to read env file {path:?}");
+ };
+ }
+
+ // remove envFile now that it's been handled
+ config.remove("entry");
+ Some(())
+}
@@ -1,9 +1,10 @@
use adapters::latest_github_release;
use anyhow::Context as _;
+use collections::HashMap;
use dap::{StartDebuggingRequestArguments, adapters::DebugTaskDefinition};
use gpui::AsyncApp;
use serde_json::Value;
-use std::{collections::HashMap, path::PathBuf, sync::OnceLock};
+use std::{path::PathBuf, sync::OnceLock};
use task::DebugRequest;
use util::{ResultExt, maybe};
@@ -70,6 +71,8 @@ impl JsDebugAdapter {
let tcp_connection = task_definition.tcp_connection.clone().unwrap_or_default();
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
+ let mut envs = HashMap::default();
+
let mut configuration = task_definition.config.clone();
if let Some(configuration) = configuration.as_object_mut() {
maybe!({
@@ -79,9 +82,9 @@ impl JsDebugAdapter {
let command = configuration.get("command")?.as_str()?.to_owned();
let mut args = shlex::split(&command)?.into_iter();
let program = args.next()?;
- configuration.insert("program".to_owned(), program.into());
+ configuration.insert("runtimeExecutable".to_owned(), program.into());
configuration.insert(
- "args".to_owned(),
+ "runtimeArgs".to_owned(),
args.map(Value::from).collect::<Vec<_>>().into(),
);
configuration.insert("console".to_owned(), "externalTerminal".into());
@@ -110,6 +113,12 @@ impl JsDebugAdapter {
}
}
+ if let Some(env) = configuration.get("env").cloned() {
+ if let Ok(env) = serde_json::from_value(env) {
+ envs = env;
+ }
+ }
+
configuration
.entry("cwd")
.or_insert(delegate.worktree_root_path().to_string_lossy().into());
@@ -158,7 +167,7 @@ impl JsDebugAdapter {
),
arguments,
cwd: Some(delegate.worktree_root_path().to_path_buf()),
- envs: HashMap::default(),
+ envs,
connection: Some(adapters::TcpArguments {
host,
port,
@@ -245,7 +254,7 @@ impl DebugAdapter for JsDebugAdapter {
"properties": {
"type": {
"type": "string",
- "enum": ["pwa-node", "node", "chrome", "pwa-chrome", "msedge", "pwa-msedge"],
+ "enum": ["pwa-node", "node", "chrome", "pwa-chrome", "msedge", "pwa-msedge", "node-terminal"],
"description": "The type of debug session",
"default": "pwa-node"
},
@@ -282,6 +291,10 @@ impl DebugAdapter for JsDebugAdapter {
"description": "Automatically stop program after launch",
"default": false
},
+ "attachSimplePort": {
+ "type": "number",
+ "description": "If set, attaches to the process via the given port. This is generally no longer necessary for Node.js programs and loses the ability to debug child processes, but can be useful in more esoteric scenarios such as with Deno and Docker launches. If set to 0, a random port will be chosen and --inspect-brk added to the launch arguments automatically."
+ },
"runtimeExecutable": {
"type": ["string", "null"],
"description": "Runtime to use, an absolute path or the name of a runtime available on PATH",
@@ -375,10 +388,6 @@ impl DebugAdapter for JsDebugAdapter {
}
}
},
- "oneOf": [
- { "required": ["program"] },
- { "required": ["url"] }
- ]
}
]
},
@@ -518,7 +527,11 @@ impl DebugAdapter for JsDebugAdapter {
}
fn label_for_child_session(&self, args: &StartDebuggingRequestArguments) -> Option<String> {
- let label = args.configuration.get("name")?.as_str()?;
+ let label = args
+ .configuration
+ .get("name")?
+ .as_str()
+ .filter(|name| !name.is_empty())?;
Some(label.to_owned())
}
}
@@ -660,6 +660,15 @@ impl DebugAdapter for PythonDebugAdapter {
self.get_installed_binary(delegate, &config, None, user_args, toolchain, false)
.await
}
+
+ fn label_for_child_session(&self, args: &StartDebuggingRequestArguments) -> Option<String> {
+ let label = args
+ .configuration
+ .get("name")?
+ .as_str()
+ .filter(|label| !label.is_empty())?;
+ Some(label.to_owned())
+ }
}
async fn fetch_latest_adapter_version_from_github(
@@ -1,208 +0,0 @@
-use anyhow::{Result, bail};
-use async_trait::async_trait;
-use collections::FxHashMap;
-use dap::{
- DebugRequest, StartDebuggingRequestArguments, StartDebuggingRequestArgumentsRequest,
- adapters::{
- DapDelegate, DebugAdapter, DebugAdapterBinary, DebugAdapterName, DebugTaskDefinition,
- },
-};
-use gpui::{AsyncApp, SharedString};
-use language::LanguageName;
-use serde::{Deserialize, Serialize};
-use serde_json::json;
-use std::path::PathBuf;
-use std::{ffi::OsStr, sync::Arc};
-use task::{DebugScenario, ZedDebugConfig};
-use util::command::new_smol_command;
-
-#[derive(Default)]
-pub(crate) struct RubyDebugAdapter;
-
-impl RubyDebugAdapter {
- const ADAPTER_NAME: &'static str = "Ruby";
-}
-
-#[derive(Serialize, Deserialize)]
-struct RubyDebugConfig {
- script_or_command: Option<String>,
- script: Option<String>,
- command: Option<String>,
- #[serde(default)]
- args: Vec<String>,
- #[serde(default)]
- env: FxHashMap<String, String>,
- cwd: Option<PathBuf>,
-}
-
-#[async_trait(?Send)]
-impl DebugAdapter for RubyDebugAdapter {
- fn name(&self) -> DebugAdapterName {
- DebugAdapterName(Self::ADAPTER_NAME.into())
- }
-
- fn adapter_language_name(&self) -> Option<LanguageName> {
- Some(SharedString::new_static("Ruby").into())
- }
-
- async fn request_kind(
- &self,
- _: &serde_json::Value,
- ) -> Result<StartDebuggingRequestArgumentsRequest> {
- Ok(StartDebuggingRequestArgumentsRequest::Launch)
- }
-
- fn dap_schema(&self) -> serde_json::Value {
- json!({
- "type": "object",
- "properties": {
- "command": {
- "type": "string",
- "description": "Command name (ruby, rake, bin/rails, bundle exec ruby, etc)",
- },
- "script": {
- "type": "string",
- "description": "Absolute path to a Ruby file."
- },
- "cwd": {
- "type": "string",
- "description": "Directory to execute the program in",
- "default": "${ZED_WORKTREE_ROOT}"
- },
- "args": {
- "type": "array",
- "description": "Command line arguments passed to the program",
- "items": {
- "type": "string"
- },
- "default": []
- },
- "env": {
- "type": "object",
- "description": "Additional environment variables to pass to the debugging (and debugged) process",
- "default": {}
- },
- }
- })
- }
-
- async fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
- match zed_scenario.request {
- DebugRequest::Launch(launch) => {
- let config = RubyDebugConfig {
- script_or_command: Some(launch.program),
- script: None,
- command: None,
- args: launch.args,
- env: launch.env,
- cwd: launch.cwd.clone(),
- };
-
- let config = serde_json::to_value(config)?;
-
- Ok(DebugScenario {
- adapter: zed_scenario.adapter,
- label: zed_scenario.label,
- config,
- tcp_connection: None,
- build: None,
- })
- }
- DebugRequest::Attach(_) => {
- anyhow::bail!("Attach requests are unsupported");
- }
- }
- }
-
- async fn get_binary(
- &self,
- delegate: &Arc<dyn DapDelegate>,
- definition: &DebugTaskDefinition,
- _user_installed_path: Option<PathBuf>,
- _user_args: Option<Vec<String>>,
- _cx: &mut AsyncApp,
- ) -> Result<DebugAdapterBinary> {
- let adapter_path = paths::debug_adapters_dir().join(self.name().as_ref());
- let mut rdbg_path = adapter_path.join("rdbg");
- if !delegate.fs().is_file(&rdbg_path).await {
- match delegate.which("rdbg".as_ref()).await {
- Some(path) => rdbg_path = path,
- None => {
- delegate.output_to_console(
- "rdbg not found on path, trying `gem install debug`".to_string(),
- );
- let output = new_smol_command("gem")
- .arg("install")
- .arg("--no-document")
- .arg("--bindir")
- .arg(adapter_path)
- .arg("debug")
- .output()
- .await?;
- anyhow::ensure!(
- output.status.success(),
- "Failed to install rdbg:\n{}",
- String::from_utf8_lossy(&output.stderr).to_string()
- );
- }
- }
- }
-
- let tcp_connection = definition.tcp_connection.clone().unwrap_or_default();
- let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
- let ruby_config = serde_json::from_value::<RubyDebugConfig>(definition.config.clone())?;
-
- let mut arguments = vec![
- "--open".to_string(),
- format!("--port={}", port),
- format!("--host={}", host),
- ];
-
- if let Some(script) = &ruby_config.script {
- arguments.push(script.clone());
- } else if let Some(command) = &ruby_config.command {
- arguments.push("--command".to_string());
- arguments.push(command.clone());
- } else if let Some(command_or_script) = &ruby_config.script_or_command {
- if delegate
- .which(OsStr::new(&command_or_script))
- .await
- .is_some()
- {
- arguments.push("--command".to_string());
- }
- arguments.push(command_or_script.clone());
- } else {
- bail!("Ruby debug config must have 'script' or 'command' args");
- }
-
- arguments.extend(ruby_config.args);
-
- let mut configuration = definition.config.clone();
- if let Some(configuration) = configuration.as_object_mut() {
- configuration
- .entry("cwd")
- .or_insert_with(|| delegate.worktree_root_path().to_string_lossy().into());
- }
-
- Ok(DebugAdapterBinary {
- command: Some(rdbg_path.to_string_lossy().to_string()),
- arguments,
- connection: Some(dap::adapters::TcpArguments {
- host,
- port,
- timeout,
- }),
- cwd: Some(
- ruby_config
- .cwd
- .unwrap_or(delegate.worktree_root_path().to_owned()),
- ),
- envs: ruby_config.env.into_iter().collect(),
- request_args: StartDebuggingRequestArguments {
- request: self.request_kind(&definition.config).await?,
- configuration,
- },
- })
- }
-}
@@ -44,7 +44,9 @@ impl DapLocator for ExtensionLocatorAdapter {
.flatten()
}
- async fn run(&self, _build_config: SpawnInTerminal) -> Result<DebugRequest> {
- Err(anyhow::anyhow!("Not implemented"))
+ async fn run(&self, build_config: SpawnInTerminal) -> Result<DebugRequest> {
+ self.extension
+ .run_dap_locator(self.locator_name.as_ref().to_owned(), build_config)
+ .await
}
}
@@ -21,7 +21,7 @@ use project::{
use settings::Settings as _;
use std::{
borrow::Cow,
- collections::{HashMap, VecDeque},
+ collections::{BTreeMap, HashMap, VecDeque},
sync::Arc,
};
use util::maybe;
@@ -32,13 +32,6 @@ use workspace::{
ui::{Button, Clickable, ContextMenu, Label, LabelCommon, PopoverMenu, h_flex},
};
-// TODO:
-// - [x] stop sorting by session ID
-// - [x] pick the most recent session by default (logs if available, RPC messages otherwise)
-// - [ ] dump the launch/attach request somewhere (logs?)
-
-const MAX_SESSIONS: usize = 10;
-
struct DapLogView {
editor: Entity<Editor>,
focus_handle: FocusHandle,
@@ -49,14 +42,34 @@ struct DapLogView {
_subscriptions: Vec<Subscription>,
}
+struct LogStoreEntryIdentifier<'a> {
+ session_id: SessionId,
+ project: Cow<'a, WeakEntity<Project>>,
+}
+impl LogStoreEntryIdentifier<'_> {
+ fn to_owned(&self) -> LogStoreEntryIdentifier<'static> {
+ LogStoreEntryIdentifier {
+ session_id: self.session_id,
+ project: Cow::Owned(self.project.as_ref().clone()),
+ }
+ }
+}
+
+struct LogStoreMessage {
+ id: LogStoreEntryIdentifier<'static>,
+ kind: IoKind,
+ command: Option<SharedString>,
+ message: SharedString,
+}
+
pub struct LogStore {
projects: HashMap<WeakEntity<Project>, ProjectState>,
- debug_sessions: VecDeque<DebugAdapterState>,
- rpc_tx: UnboundedSender<(SessionId, IoKind, Option<SharedString>, SharedString)>,
- adapter_log_tx: UnboundedSender<(SessionId, IoKind, Option<SharedString>, SharedString)>,
+ rpc_tx: UnboundedSender<LogStoreMessage>,
+ adapter_log_tx: UnboundedSender<LogStoreMessage>,
}
struct ProjectState {
+ debug_sessions: BTreeMap<SessionId, DebugAdapterState>,
_subscriptions: [gpui::Subscription; 2],
}
@@ -122,13 +135,12 @@ impl DebugAdapterState {
impl LogStore {
pub fn new(cx: &Context<Self>) -> Self {
- let (rpc_tx, mut rpc_rx) =
- unbounded::<(SessionId, IoKind, Option<SharedString>, SharedString)>();
+ let (rpc_tx, mut rpc_rx) = unbounded::<LogStoreMessage>();
cx.spawn(async move |this, cx| {
- while let Some((session_id, io_kind, command, message)) = rpc_rx.next().await {
+ while let Some(message) = rpc_rx.next().await {
if let Some(this) = this.upgrade() {
this.update(cx, |this, cx| {
- this.add_debug_adapter_message(session_id, io_kind, command, message, cx);
+ this.add_debug_adapter_message(message, cx);
})?;
}
@@ -138,13 +150,12 @@ impl LogStore {
})
.detach_and_log_err(cx);
- let (adapter_log_tx, mut adapter_log_rx) =
- unbounded::<(SessionId, IoKind, Option<SharedString>, SharedString)>();
+ let (adapter_log_tx, mut adapter_log_rx) = unbounded::<LogStoreMessage>();
cx.spawn(async move |this, cx| {
- while let Some((session_id, io_kind, _, message)) = adapter_log_rx.next().await {
+ while let Some(message) = adapter_log_rx.next().await {
if let Some(this) = this.upgrade() {
this.update(cx, |this, cx| {
- this.add_debug_adapter_log(session_id, io_kind, message, cx);
+ this.add_debug_adapter_log(message, cx);
})?;
}
@@ -157,57 +168,76 @@ impl LogStore {
rpc_tx,
adapter_log_tx,
projects: HashMap::new(),
- debug_sessions: Default::default(),
}
}
pub fn add_project(&mut self, project: &Entity<Project>, cx: &mut Context<Self>) {
- let weak_project = project.downgrade();
self.projects.insert(
project.downgrade(),
ProjectState {
_subscriptions: [
- cx.observe_release(project, move |this, _, _| {
- this.projects.remove(&weak_project);
+ cx.observe_release(project, {
+ let weak_project = project.downgrade();
+ move |this, _, _| {
+ this.projects.remove(&weak_project);
+ }
}),
- cx.subscribe(
- &project.read(cx).dap_store(),
- |this, dap_store, event, cx| match event {
+ cx.subscribe(&project.read(cx).dap_store(), {
+ let weak_project = project.downgrade();
+ move |this, dap_store, event, cx| match event {
dap_store::DapStoreEvent::DebugClientStarted(session_id) => {
let session = dap_store.read(cx).session_by_id(session_id);
if let Some(session) = session {
- this.add_debug_session(*session_id, session, cx);
+ this.add_debug_session(
+ LogStoreEntryIdentifier {
+ project: Cow::Owned(weak_project.clone()),
+ session_id: *session_id,
+ },
+ session,
+ cx,
+ );
}
}
dap_store::DapStoreEvent::DebugClientShutdown(session_id) => {
- this.get_debug_adapter_state(*session_id)
- .iter_mut()
- .for_each(|state| state.is_terminated = true);
+ let id = LogStoreEntryIdentifier {
+ project: Cow::Borrowed(&weak_project),
+ session_id: *session_id,
+ };
+ if let Some(state) = this.get_debug_adapter_state(&id) {
+ state.is_terminated = true;
+ }
+
this.clean_sessions(cx);
}
_ => {}
- },
- ),
+ }
+ }),
],
+ debug_sessions: Default::default(),
},
);
}
- fn get_debug_adapter_state(&mut self, id: SessionId) -> Option<&mut DebugAdapterState> {
- self.debug_sessions
- .iter_mut()
- .find(|adapter_state| adapter_state.id == id)
+ fn get_debug_adapter_state(
+ &mut self,
+ id: &LogStoreEntryIdentifier<'_>,
+ ) -> Option<&mut DebugAdapterState> {
+ self.projects
+ .get_mut(&id.project)
+ .and_then(|state| state.debug_sessions.get_mut(&id.session_id))
}
fn add_debug_adapter_message(
&mut self,
- id: SessionId,
- io_kind: IoKind,
- command: Option<SharedString>,
- message: SharedString,
+ LogStoreMessage {
+ id,
+ kind: io_kind,
+ command,
+ message,
+ }: LogStoreMessage,
cx: &mut Context<Self>,
) {
- let Some(debug_client_state) = self.get_debug_adapter_state(id) else {
+ let Some(debug_client_state) = self.get_debug_adapter_state(&id) else {
return;
};
@@ -229,7 +259,7 @@ impl LogStore {
if rpc_messages.last_message_kind != Some(kind) {
Self::get_debug_adapter_entry(
&mut rpc_messages.messages,
- id,
+ id.to_owned(),
kind.label().into(),
LogKind::Rpc,
cx,
@@ -239,7 +269,7 @@ impl LogStore {
let entry = Self::get_debug_adapter_entry(
&mut rpc_messages.messages,
- id,
+ id.to_owned(),
message,
LogKind::Rpc,
cx,
@@ -260,12 +290,15 @@ impl LogStore {
fn add_debug_adapter_log(
&mut self,
- id: SessionId,
- io_kind: IoKind,
- message: SharedString,
+ LogStoreMessage {
+ id,
+ kind: io_kind,
+ message,
+ ..
+ }: LogStoreMessage,
cx: &mut Context<Self>,
) {
- let Some(debug_adapter_state) = self.get_debug_adapter_state(id) else {
+ let Some(debug_adapter_state) = self.get_debug_adapter_state(&id) else {
return;
};
@@ -276,7 +309,7 @@ impl LogStore {
Self::get_debug_adapter_entry(
&mut debug_adapter_state.log_messages,
- id,
+ id.to_owned(),
message,
LogKind::Adapter,
cx,
@@ -286,13 +319,17 @@ impl LogStore {
fn get_debug_adapter_entry(
log_lines: &mut VecDeque<SharedString>,
- id: SessionId,
+ id: LogStoreEntryIdentifier<'static>,
message: SharedString,
kind: LogKind,
cx: &mut Context<Self>,
) -> SharedString {
- while log_lines.len() >= RpcMessages::MESSAGE_QUEUE_LIMIT {
- log_lines.pop_front();
+ if let Some(excess) = log_lines
+ .len()
+ .checked_sub(RpcMessages::MESSAGE_QUEUE_LIMIT)
+ && excess > 0
+ {
+ log_lines.drain(..excess);
}
let format_messages = DebuggerSettings::get_global(cx).format_dap_log_messages;
@@ -322,118 +359,116 @@ impl LogStore {
fn add_debug_session(
&mut self,
- session_id: SessionId,
+ id: LogStoreEntryIdentifier<'static>,
session: Entity<Session>,
cx: &mut Context<Self>,
) {
- if self
- .debug_sessions
- .iter_mut()
- .any(|adapter_state| adapter_state.id == session_id)
- {
- return;
- }
-
- let (adapter_name, has_adapter_logs) = session.read_with(cx, |session, _| {
- (
- session.adapter(),
- session
- .adapter_client()
- .map(|client| client.has_adapter_logs())
- .unwrap_or(false),
- )
- });
-
- self.debug_sessions.push_back(DebugAdapterState::new(
- session_id,
- adapter_name,
- has_adapter_logs,
- ));
-
- self.clean_sessions(cx);
-
- let io_tx = self.rpc_tx.clone();
-
- let Some(client) = session.read(cx).adapter_client() else {
- return;
- };
+ maybe!({
+ let project_entry = self.projects.get_mut(&id.project)?;
+ let std::collections::btree_map::Entry::Vacant(state) =
+ project_entry.debug_sessions.entry(id.session_id)
+ else {
+ return None;
+ };
+
+ let (adapter_name, has_adapter_logs) = session.read_with(cx, |session, _| {
+ (
+ session.adapter(),
+ session
+ .adapter_client()
+ .map_or(false, |client| client.has_adapter_logs()),
+ )
+ });
- client.add_log_handler(
- move |io_kind, command, message| {
- io_tx
- .unbounded_send((
- session_id,
- io_kind,
- command.map(|command| command.to_owned().into()),
- message.to_owned().into(),
- ))
- .ok();
- },
- LogKind::Rpc,
- );
+ state.insert(DebugAdapterState::new(
+ id.session_id,
+ adapter_name,
+ has_adapter_logs,
+ ));
+
+ self.clean_sessions(cx);
+
+ let io_tx = self.rpc_tx.clone();
+
+ let client = session.read(cx).adapter_client()?;
+ let project = id.project.clone();
+ let session_id = id.session_id;
+ client.add_log_handler(
+ move |kind, command, message| {
+ io_tx
+ .unbounded_send(LogStoreMessage {
+ id: LogStoreEntryIdentifier {
+ session_id,
+ project: project.clone(),
+ },
+ kind,
+ command: command.map(|command| command.to_owned().into()),
+ message: message.to_owned().into(),
+ })
+ .ok();
+ },
+ LogKind::Rpc,
+ );
- let log_io_tx = self.adapter_log_tx.clone();
- client.add_log_handler(
- move |io_kind, command, message| {
- log_io_tx
- .unbounded_send((
- session_id,
- io_kind,
- command.map(|command| command.to_owned().into()),
- message.to_owned().into(),
- ))
- .ok();
- },
- LogKind::Adapter,
- );
+ let log_io_tx = self.adapter_log_tx.clone();
+ let project = id.project;
+ client.add_log_handler(
+ move |kind, command, message| {
+ log_io_tx
+ .unbounded_send(LogStoreMessage {
+ id: LogStoreEntryIdentifier {
+ session_id,
+ project: project.clone(),
+ },
+ kind,
+ command: command.map(|command| command.to_owned().into()),
+ message: message.to_owned().into(),
+ })
+ .ok();
+ },
+ LogKind::Adapter,
+ );
+ Some(())
+ });
}
fn clean_sessions(&mut self, cx: &mut Context<Self>) {
- let mut to_remove = self.debug_sessions.len().saturating_sub(MAX_SESSIONS);
- self.debug_sessions.retain(|session| {
- if to_remove > 0 && session.is_terminated {
- to_remove -= 1;
- return false;
- }
- true
+ self.projects.values_mut().for_each(|project| {
+ let mut allowed_terminated_sessions = 10u32;
+ project.debug_sessions.retain(|_, session| {
+ if !session.is_terminated {
+ return true;
+ }
+ allowed_terminated_sessions = allowed_terminated_sessions.saturating_sub(1);
+ allowed_terminated_sessions > 0
+ });
});
+
cx.notify();
}
fn log_messages_for_session(
&mut self,
- session_id: SessionId,
+ id: &LogStoreEntryIdentifier<'_>,
) -> Option<&mut VecDeque<SharedString>> {
- self.debug_sessions
- .iter_mut()
- .find(|session| session.id == session_id)
+ self.get_debug_adapter_state(id)
.map(|state| &mut state.log_messages)
}
fn rpc_messages_for_session(
&mut self,
- session_id: SessionId,
+ id: &LogStoreEntryIdentifier<'_>,
) -> Option<&mut VecDeque<SharedString>> {
- self.debug_sessions.iter_mut().find_map(|state| {
- if state.id == session_id {
- Some(&mut state.rpc_messages.messages)
- } else {
- None
- }
- })
+ self.get_debug_adapter_state(id)
+ .map(|state| &mut state.rpc_messages.messages)
}
fn initialization_sequence_for_session(
&mut self,
- session_id: SessionId,
- ) -> Option<&mut Vec<SharedString>> {
- self.debug_sessions.iter_mut().find_map(|state| {
- if state.id == session_id {
- Some(&mut state.rpc_messages.initialization_sequence)
- } else {
- None
- }
- })
+ id: &LogStoreEntryIdentifier<'_>,
+ ) -> Option<&Vec<SharedString>> {
+ self.get_debug_adapter_state(&id)
+ .map(|state| &state.rpc_messages.initialization_sequence)
}
}
@@ -453,10 +488,11 @@ impl Render for DapLogToolbarItemView {
return Empty.into_any_element();
};
- let (menu_rows, current_session_id) = log_view.update(cx, |log_view, cx| {
+ let (menu_rows, current_session_id, project) = log_view.update(cx, |log_view, cx| {
(
log_view.menu_items(cx),
log_view.current_view.map(|(session_id, _)| session_id),
+ log_view.project.downgrade(),
)
});
@@ -484,6 +520,7 @@ impl Render for DapLogToolbarItemView {
.menu(move |mut window, cx| {
let log_view = log_view.clone();
let menu_rows = menu_rows.clone();
+ let project = project.clone();
ContextMenu::build(&mut window, cx, move |mut menu, window, _cx| {
for row in menu_rows.into_iter() {
menu = menu.custom_row(move |_window, _cx| {
@@ -509,8 +546,15 @@ impl Render for DapLogToolbarItemView {
.child(Label::new(ADAPTER_LOGS))
.into_any_element()
},
- window.handler_for(&log_view, move |view, window, cx| {
- view.show_log_messages_for_adapter(row.session_id, window, cx);
+ window.handler_for(&log_view, {
+ let project = project.clone();
+ let id = LogStoreEntryIdentifier {
+ project: Cow::Owned(project),
+ session_id: row.session_id,
+ };
+ move |view, window, cx| {
+ view.show_log_messages_for_adapter(&id, window, cx);
+ }
}),
);
}
@@ -524,8 +568,15 @@ impl Render for DapLogToolbarItemView {
.child(Label::new(RPC_MESSAGES))
.into_any_element()
},
- window.handler_for(&log_view, move |view, window, cx| {
- view.show_rpc_trace_for_server(row.session_id, window, cx);
+ window.handler_for(&log_view, {
+ let project = project.clone();
+ let id = LogStoreEntryIdentifier {
+ project: Cow::Owned(project),
+ session_id: row.session_id,
+ };
+ move |view, window, cx| {
+ view.show_rpc_trace_for_server(&id, window, cx);
+ }
}),
)
.custom_entry(
@@ -536,12 +587,17 @@ impl Render for DapLogToolbarItemView {
.child(Label::new(INITIALIZATION_SEQUENCE))
.into_any_element()
},
- window.handler_for(&log_view, move |view, window, cx| {
- view.show_initialization_sequence_for_server(
- row.session_id,
- window,
- cx,
- );
+ window.handler_for(&log_view, {
+ let project = project.clone();
+ let id = LogStoreEntryIdentifier {
+ project: Cow::Owned(project),
+ session_id: row.session_id,
+ };
+ move |view, window, cx| {
+ view.show_initialization_sequence_for_server(
+ &id, window, cx,
+ );
+ }
}),
);
}
@@ -613,7 +669,9 @@ impl DapLogView {
let events_subscriptions = cx.subscribe(&log_store, |log_view, _, event, cx| match event {
Event::NewLogEntry { id, entry, kind } => {
- if log_view.current_view == Some((*id, *kind)) {
+ if log_view.current_view == Some((id.session_id, *kind))
+ && log_view.project == *id.project
+ {
log_view.editor.update(cx, |editor, cx| {
editor.set_read_only(false);
let last_point = editor.buffer().read(cx).len(cx);
@@ -629,12 +687,18 @@ impl DapLogView {
}
}
});
-
+ let weak_project = project.downgrade();
let state_info = log_store
.read(cx)
- .debug_sessions
- .back()
- .map(|session| (session.id, session.has_adapter_logs));
+ .projects
+ .get(&weak_project)
+ .and_then(|project| {
+ project
+ .debug_sessions
+ .values()
+ .next_back()
+ .map(|session| (session.id, session.has_adapter_logs))
+ });
let mut this = Self {
editor,
@@ -647,10 +711,14 @@ impl DapLogView {
};
if let Some((session_id, have_adapter_logs)) = state_info {
+ let id = LogStoreEntryIdentifier {
+ session_id,
+ project: Cow::Owned(weak_project),
+ };
if have_adapter_logs {
- this.show_log_messages_for_adapter(session_id, window, cx);
+ this.show_log_messages_for_adapter(&id, window, cx);
} else {
- this.show_rpc_trace_for_server(session_id, window, cx);
+ this.show_rpc_trace_for_server(&id, window, cx);
}
}
@@ -690,31 +758,38 @@ impl DapLogView {
fn menu_items(&self, cx: &App) -> Vec<DapMenuItem> {
self.log_store
.read(cx)
- .debug_sessions
- .iter()
- .rev()
- .map(|state| DapMenuItem {
- session_id: state.id,
- adapter_name: state.adapter_name.clone(),
- has_adapter_logs: state.has_adapter_logs,
- selected_entry: self.current_view.map_or(LogKind::Adapter, |(_, kind)| kind),
+ .projects
+ .get(&self.project.downgrade())
+ .map_or_else(Vec::new, |state| {
+ state
+ .debug_sessions
+ .values()
+ .rev()
+ .map(|state| DapMenuItem {
+ session_id: state.id,
+ adapter_name: state.adapter_name.clone(),
+ has_adapter_logs: state.has_adapter_logs,
+ selected_entry: self
+ .current_view
+ .map_or(LogKind::Adapter, |(_, kind)| kind),
+ })
+ .collect::<Vec<_>>()
})
- .collect::<Vec<_>>()
}
fn show_rpc_trace_for_server(
&mut self,
- session_id: SessionId,
+ id: &LogStoreEntryIdentifier<'_>,
window: &mut Window,
cx: &mut Context<Self>,
) {
let rpc_log = self.log_store.update(cx, |log_store, _| {
log_store
- .rpc_messages_for_session(session_id)
+ .rpc_messages_for_session(id)
.map(|state| log_contents(state.iter().cloned()))
});
if let Some(rpc_log) = rpc_log {
- self.current_view = Some((session_id, LogKind::Rpc));
+ self.current_view = Some((id.session_id, LogKind::Rpc));
let (editor, editor_subscriptions) = Self::editor_for_logs(rpc_log, window, cx);
let language = self.project.read(cx).languages().language_for_name("JSON");
editor
@@ -725,8 +800,7 @@ impl DapLogView {
.expect("log buffer should be a singleton")
.update(cx, |_, cx| {
cx.spawn({
- let buffer = cx.entity();
- async move |_, cx| {
+ async move |buffer, cx| {
let language = language.await.ok();
buffer.update(cx, |buffer, cx| {
buffer.set_language(language, cx);
@@ -746,17 +820,17 @@ impl DapLogView {
fn show_log_messages_for_adapter(
&mut self,
- session_id: SessionId,
+ id: &LogStoreEntryIdentifier<'_>,
window: &mut Window,
cx: &mut Context<Self>,
) {
let message_log = self.log_store.update(cx, |log_store, _| {
log_store
- .log_messages_for_session(session_id)
+ .log_messages_for_session(id)
.map(|state| log_contents(state.iter().cloned()))
});
if let Some(message_log) = message_log {
- self.current_view = Some((session_id, LogKind::Adapter));
+ self.current_view = Some((id.session_id, LogKind::Adapter));
let (editor, editor_subscriptions) = Self::editor_for_logs(message_log, window, cx);
editor
.read(cx)
@@ -775,17 +849,17 @@ impl DapLogView {
fn show_initialization_sequence_for_server(
&mut self,
- session_id: SessionId,
+ id: &LogStoreEntryIdentifier<'_>,
window: &mut Window,
cx: &mut Context<Self>,
) {
let rpc_log = self.log_store.update(cx, |log_store, _| {
log_store
- .initialization_sequence_for_session(session_id)
+ .initialization_sequence_for_session(id)
.map(|state| log_contents(state.iter().cloned()))
});
if let Some(rpc_log) = rpc_log {
- self.current_view = Some((session_id, LogKind::Rpc));
+ self.current_view = Some((id.session_id, LogKind::Rpc));
let (editor, editor_subscriptions) = Self::editor_for_logs(rpc_log, window, cx);
let language = self.project.read(cx).languages().language_for_name("JSON");
editor
@@ -844,7 +918,13 @@ impl Render for DapLogView {
}
}
-actions!(dev, [OpenDebugAdapterLogs]);
+actions!(
+ dev,
+ [
+ /// Opens the debug adapter protocol logs viewer.
+ OpenDebugAdapterLogs
+ ]
+);
pub fn init(cx: &mut App) {
let log_store = cx.new(|cx| LogStore::new(cx));
@@ -993,9 +1073,9 @@ impl Focusable for DapLogView {
}
}
-pub enum Event {
+enum Event {
NewLogEntry {
- id: SessionId,
+ id: LogStoreEntryIdentifier<'static>,
entry: SharedString,
kind: LogKind,
},
@@ -1008,31 +1088,30 @@ impl EventEmitter<SearchEvent> for DapLogView {}
#[cfg(any(test, feature = "test-support"))]
impl LogStore {
- pub fn contained_session_ids(&self) -> Vec<SessionId> {
- self.debug_sessions
- .iter()
- .map(|session| session.id)
- .collect()
+ pub fn has_projects(&self) -> bool {
+ !self.projects.is_empty()
}
- pub fn rpc_messages_for_session_id(&self, session_id: SessionId) -> Vec<SharedString> {
- self.debug_sessions
- .iter()
- .find(|adapter_state| adapter_state.id == session_id)
- .expect("This session should exist if a test is calling")
- .rpc_messages
- .messages
- .clone()
- .into()
+ pub fn contained_session_ids(&self, project: &WeakEntity<Project>) -> Vec<SessionId> {
+ self.projects.get(project).map_or(vec![], |state| {
+ state.debug_sessions.keys().copied().collect()
+ })
}
- pub fn log_messages_for_session_id(&self, session_id: SessionId) -> Vec<SharedString> {
- self.debug_sessions
- .iter()
- .find(|adapter_state| adapter_state.id == session_id)
- .expect("This session should exist if a test is calling")
- .log_messages
- .clone()
- .into()
+ pub fn rpc_messages_for_session_id(
+ &self,
+ project: &WeakEntity<Project>,
+ session_id: SessionId,
+ ) -> Vec<SharedString> {
+ self.projects.get(&project).map_or(vec![], |state| {
+ state
+ .debug_sessions
+ .get(&session_id)
+ .expect("This session should exist if a test is calling")
+ .rpc_messages
+ .messages
+ .clone()
+ .into()
+ })
}
}
@@ -5,7 +5,7 @@ use crate::session::running::breakpoint_list::BreakpointList;
use crate::{
ClearAllBreakpoints, Continue, CopyDebugAdapterArguments, Detach, FocusBreakpointList,
FocusConsole, FocusFrames, FocusLoadedSources, FocusModules, FocusTerminal, FocusVariables,
- NewProcessModal, NewProcessMode, Pause, Restart, StepInto, StepOut, StepOver, Stop,
+ NewProcessModal, NewProcessMode, Pause, RerunSession, StepInto, StepOut, StepOver, Stop,
ToggleExpandItem, ToggleSessionPicker, ToggleThreadPicker, persistence, spawn_task_or_modal,
};
use anyhow::{Context as _, Result, anyhow};
@@ -25,7 +25,7 @@ use gpui::{
use itertools::Itertools as _;
use language::Buffer;
use project::debugger::session::{Session, SessionStateEvent};
-use project::{Fs, ProjectPath, WorktreeId};
+use project::{DebugScenarioContext, Fs, ProjectPath, WorktreeId};
use project::{Project, debugger::session::ThreadStatus};
use rpc::proto::{self};
use settings::Settings;
@@ -197,6 +197,7 @@ impl DebugPanel {
.and_then(|buffer| buffer.read(cx).file())
.map(|f| f.worktree_id(cx))
});
+
let Some(worktree) = worktree
.and_then(|id| self.project.read(cx).worktree_for_id(id, cx))
.or_else(|| self.project.read(cx).visible_worktrees(cx).next())
@@ -204,6 +205,7 @@ impl DebugPanel {
log::debug!("Could not find a worktree to spawn the debug session in");
return;
};
+
self.debug_scenario_scheduled_last = true;
if let Some(inventory) = self
.project
@@ -214,7 +216,15 @@ impl DebugPanel {
.cloned()
{
inventory.update(cx, |inventory, _| {
- inventory.scenario_scheduled(scenario.clone());
+ inventory.scenario_scheduled(
+ scenario.clone(),
+ // todo(debugger): Task context is cloned three times
+ // once in Session,inventory, and in resolve scenario
+ // we should wrap it in an RC instead to save some memory
+ task_context.clone(),
+ worktree_id,
+ active_buffer.as_ref().map(|buffer| buffer.downgrade()),
+ );
})
}
let task = cx.spawn_in(window, {
@@ -225,6 +235,16 @@ impl DebugPanel {
let definition = debug_session
.update_in(cx, |debug_session, window, cx| {
debug_session.running_state().update(cx, |running, cx| {
+ if scenario.build.is_some() {
+ running.scenario = Some(scenario.clone());
+ running.scenario_context = Some(DebugScenarioContext {
+ active_buffer: active_buffer
+ .as_ref()
+ .map(|entity| entity.downgrade()),
+ task_context: task_context.clone(),
+ worktree_id: worktree_id,
+ });
+ };
running.resolve_scenario(
scenario,
task_context,
@@ -273,7 +293,8 @@ impl DebugPanel {
return;
};
let workspace = self.workspace.clone();
- let Some(scenario) = task_inventory.read(cx).last_scheduled_scenario().cloned() else {
+ let Some((scenario, context)) = task_inventory.read(cx).last_scheduled_scenario().cloned()
+ else {
window.defer(cx, move |window, cx| {
workspace
.update(cx, |workspace, cx| {
@@ -284,28 +305,22 @@ impl DebugPanel {
return;
};
- cx.spawn_in(window, async move |this, cx| {
- let task_contexts = workspace
- .update_in(cx, |workspace, window, cx| {
- tasks_ui::task_contexts(workspace, window, cx)
- })?
- .await;
+ let DebugScenarioContext {
+ task_context,
+ worktree_id,
+ active_buffer,
+ } = context;
- let task_context = task_contexts.active_context().cloned().unwrap_or_default();
- let worktree_id = task_contexts.worktree();
+ let active_buffer = active_buffer.and_then(|buffer| buffer.upgrade());
- this.update_in(cx, |this, window, cx| {
- this.start_session(
- scenario.clone(),
- task_context,
- None,
- worktree_id,
- window,
- cx,
- );
- })
- })
- .detach();
+ self.start_session(
+ scenario,
+ task_context,
+ active_buffer,
+ worktree_id,
+ window,
+ cx,
+ );
}
pub(crate) async fn register_session(
@@ -758,16 +773,16 @@ impl DebugPanel {
.icon_size(IconSize::XSmall)
.on_click(window.listener_for(
&running_state,
- |this, _, _window, cx| {
- this.restart_session(cx);
+ |this, _, window, cx| {
+ this.rerun_session(window, cx);
},
))
.tooltip({
let focus_handle = focus_handle.clone();
move |window, cx| {
Tooltip::for_action_in(
- "Restart",
- &Restart,
+ "Rerun Session",
+ &RerunSession,
&focus_handle,
window,
cx,
@@ -1298,11 +1313,13 @@ impl Render for DebugPanel {
}
v_flex()
- .when_else(
- self.position(window, cx) == DockPosition::Bottom,
- |this| this.max_h(self.size),
- |this| this.max_w(self.size),
- )
+ .when(!self.is_zoomed, |this| {
+ this.when_else(
+ self.position(window, cx) == DockPosition::Bottom,
+ |this| this.max_h(self.size),
+ |this| this.max_w(self.size),
+ )
+ })
.size_full()
.key_context("DebugPanel")
.child(h_flex().children(self.top_controls_strip(window, cx)))
@@ -1600,12 +1617,13 @@ impl workspace::DebuggerProvider for DebuggerProvider {
definition: DebugScenario,
context: TaskContext,
buffer: Option<Entity<Buffer>>,
+ worktree_id: Option<WorktreeId>,
window: &mut Window,
cx: &mut App,
) {
self.0.update(cx, |_, cx| {
- cx.defer_in(window, |this, window, cx| {
- this.start_session(definition, context, buffer, None, window, cx);
+ cx.defer_in(window, move |this, window, cx| {
+ this.start_session(definition, context, buffer, worktree_id, window, cx);
})
})
}
@@ -32,34 +32,67 @@ pub mod tests;
actions!(
debugger,
[
+ /// Starts a new debugging session.
Start,
+ /// Continues execution until the next breakpoint.
Continue,
+ /// Detaches the debugger from the running process.
Detach,
+ /// Pauses the currently running program.
Pause,
+ /// Restarts the current debugging session.
Restart,
+ /// Reruns the current debugging session with the same configuration.
+ RerunSession,
+ /// Steps into the next function call.
StepInto,
+ /// Steps over the current line.
StepOver,
+ /// Steps out of the current function.
StepOut,
+ /// Steps back to the previous statement.
StepBack,
+ /// Stops the debugging session.
Stop,
+ /// Toggles whether to ignore all breakpoints.
ToggleIgnoreBreakpoints,
+ /// Clears all breakpoints in the project.
ClearAllBreakpoints,
+ /// Focuses on the debugger console panel.
FocusConsole,
+ /// Focuses on the variables panel.
FocusVariables,
+ /// Focuses on the breakpoint list panel.
FocusBreakpointList,
+ /// Focuses on the call stack frames panel.
FocusFrames,
+ /// Focuses on the loaded modules panel.
FocusModules,
+ /// Focuses on the loaded sources panel.
FocusLoadedSources,
+ /// Focuses on the terminal panel.
FocusTerminal,
+ /// Shows the stack trace for the current thread.
ShowStackTrace,
+ /// Toggles the thread picker dropdown.
ToggleThreadPicker,
+ /// Toggles the session picker dropdown.
ToggleSessionPicker,
- RerunLastSession,
+ /// Reruns the last debugging session.
+ #[action(deprecated_aliases = ["debugger::RerunLastSession"])]
+ Rerun,
+ /// Toggles expansion of the selected item in the debugger UI.
ToggleExpandItem,
]
);
-actions!(dev, [CopyDebugAdapterArguments]);
+actions!(
+ dev,
+ [
+ /// Copies debug adapter launch arguments to clipboard.
+ CopyDebugAdapterArguments
+ ]
+);
pub fn init(cx: &mut App) {
DebuggerSettings::register(cx);
@@ -74,17 +107,15 @@ pub fn init(cx: &mut App) {
.register_action(|workspace: &mut Workspace, _: &Start, window, cx| {
NewProcessModal::show(workspace, window, NewProcessMode::Debug, None, cx);
})
- .register_action(
- |workspace: &mut Workspace, _: &RerunLastSession, window, cx| {
- let Some(debug_panel) = workspace.panel::<DebugPanel>(cx) else {
- return;
- };
+ .register_action(|workspace: &mut Workspace, _: &Rerun, window, cx| {
+ let Some(debug_panel) = workspace.panel::<DebugPanel>(cx) else {
+ return;
+ };
- debug_panel.update(cx, |debug_panel, cx| {
- debug_panel.rerun_last_session(workspace, window, cx);
- })
- },
- )
+ debug_panel.update(cx, |debug_panel, cx| {
+ debug_panel.rerun_last_session(workspace, window, cx);
+ })
+ })
.register_action(
|workspace: &mut Workspace, _: &ShutdownDebugAdapters, _window, cx| {
workspace.project().update(cx, |project, cx| {
@@ -210,6 +241,14 @@ pub fn init(cx: &mut App) {
.ok();
}
})
+ .on_action({
+ let active_item = active_item.clone();
+ move |_: &RerunSession, window, cx| {
+ active_item
+ .update(cx, |item, cx| item.rerun_session(window, cx))
+ .ok();
+ }
+ })
.on_action({
let active_item = active_item.clone();
move |_: &Stop, _, cx| {
@@ -4,6 +4,7 @@ use collections::HashMap;
use gpui::{Animation, AnimationExt as _, Entity, Transformation, percentage};
use project::debugger::session::{ThreadId, ThreadStatus};
use ui::{ContextMenu, DropdownMenu, DropdownStyle, Indicator, prelude::*};
+use util::truncate_and_trailoff;
use crate::{
debugger_panel::DebugPanel,
@@ -12,6 +13,8 @@ use crate::{
impl DebugPanel {
fn dropdown_label(label: impl Into<SharedString>) -> Label {
+ const MAX_LABEL_CHARS: usize = 50;
+ let label = truncate_and_trailoff(&label.into(), MAX_LABEL_CHARS);
Label::new(label).size(LabelSize::Small)
}
@@ -170,6 +173,8 @@ impl DebugPanel {
window: &mut Window,
cx: &mut Context<Self>,
) -> Option<DropdownMenu> {
+ const MAX_LABEL_CHARS: usize = 150;
+
let running_state = running_state.clone();
let running_state_read = running_state.read(cx);
let thread_id = running_state_read.thread_id();
@@ -202,6 +207,7 @@ impl DebugPanel {
.is_empty()
.then(|| format!("Tid: {}", thread.id))
.unwrap_or_else(|| thread.name);
+ let entry_name = truncate_and_trailoff(&entry_name, MAX_LABEL_CHARS);
this = this.entry(entry_name, None, move |window, cx| {
running_state.update(cx, |running_state, cx| {
@@ -23,7 +23,9 @@ use gpui::{
};
use itertools::Itertools as _;
use picker::{Picker, PickerDelegate, highlighted_match_with_paths::HighlightedMatch};
-use project::{ProjectPath, TaskContexts, TaskSourceKind, task_store::TaskStore};
+use project::{
+ DebugScenarioContext, ProjectPath, TaskContexts, TaskSourceKind, task_store::TaskStore,
+};
use settings::{Settings, initial_local_debug_tasks_content};
use task::{DebugScenario, RevealTarget, ZedDebugConfig};
use theme::ThemeSettings;
@@ -92,6 +94,7 @@ impl NewProcessModal {
cx.spawn_in(window, async move |workspace, cx| {
let task_contexts = workspace.update_in(cx, |workspace, window, cx| {
+ // todo(debugger): get the buffer here (if the active item is an editor) and store it so we can pass it to start_session later
tasks_ui::task_contexts(workspace, window, cx)
})?;
workspace.update_in(cx, |workspace, window, cx| {
@@ -1110,7 +1113,11 @@ pub(super) struct TaskMode {
pub(super) struct DebugDelegate {
task_store: Entity<TaskStore>,
- candidates: Vec<(Option<TaskSourceKind>, DebugScenario)>,
+ candidates: Vec<(
+ Option<TaskSourceKind>,
+ DebugScenario,
+ Option<DebugScenarioContext>,
+ )>,
selected_index: usize,
matches: Vec<StringMatch>,
prompt: String,
@@ -1208,7 +1215,11 @@ impl DebugDelegate {
this.delegate.candidates = recent
.into_iter()
- .map(|scenario| Self::get_scenario_kind(&languages, &dap_registry, scenario))
+ .map(|(scenario, context)| {
+ let (kind, scenario) =
+ Self::get_scenario_kind(&languages, &dap_registry, scenario);
+ (kind, scenario, Some(context))
+ })
.chain(
scenarios
.into_iter()
@@ -1223,7 +1234,7 @@ impl DebugDelegate {
.map(|(kind, scenario)| {
let (language, scenario) =
Self::get_scenario_kind(&languages, &dap_registry, scenario);
- (language.or(Some(kind)), scenario)
+ (language.or(Some(kind)), scenario, None)
}),
)
.collect();
@@ -1269,7 +1280,7 @@ impl PickerDelegate for DebugDelegate {
let candidates: Vec<_> = candidates
.into_iter()
.enumerate()
- .map(|(index, (_, candidate))| {
+ .map(|(index, (_, candidate, _))| {
StringMatchCandidate::new(index, candidate.label.as_ref())
})
.collect();
@@ -1434,25 +1445,40 @@ impl PickerDelegate for DebugDelegate {
.get(self.selected_index())
.and_then(|match_candidate| self.candidates.get(match_candidate.candidate_id).cloned());
- let Some((_, debug_scenario)) = debug_scenario else {
+ let Some((_, debug_scenario, context)) = debug_scenario else {
return;
};
- let (task_context, worktree_id) = self
- .task_contexts
- .as_ref()
- .and_then(|task_contexts| {
- Some((
- task_contexts.active_context().cloned()?,
- task_contexts.worktree(),
- ))
- })
- .unwrap_or_default();
+ let context = context.unwrap_or_else(|| {
+ self.task_contexts
+ .as_ref()
+ .and_then(|task_contexts| {
+ Some(DebugScenarioContext {
+ task_context: task_contexts.active_context().cloned()?,
+ active_buffer: None,
+ worktree_id: task_contexts.worktree(),
+ })
+ })
+ .unwrap_or_default()
+ });
+ let DebugScenarioContext {
+ task_context,
+ active_buffer,
+ worktree_id,
+ } = context;
+ let active_buffer = active_buffer.and_then(|buffer| buffer.upgrade());
send_telemetry(&debug_scenario, TelemetrySpawnLocation::ScenarioList, cx);
self.debug_panel
.update(cx, |panel, cx| {
- panel.start_session(debug_scenario, task_context, None, worktree_id, window, cx);
+ panel.start_session(
+ debug_scenario,
+ task_context,
+ active_buffer,
+ worktree_id,
+ window,
+ cx,
+ );
})
.ok();
@@ -12,6 +12,7 @@ use rpc::proto;
use running::RunningState;
use std::{cell::OnceCell, sync::OnceLock};
use ui::{Indicator, Tooltip, prelude::*};
+use util::truncate_and_trailoff;
use workspace::{
CollaboratorId, FollowableItem, ViewId, Workspace,
item::{self, Item},
@@ -126,7 +127,10 @@ impl DebugSession {
}
pub(crate) fn label_element(&self, depth: usize, cx: &App) -> AnyElement {
+ const MAX_LABEL_CHARS: usize = 150;
+
let label = self.label(cx);
+ let label = truncate_and_trailoff(&label, MAX_LABEL_CHARS);
let is_terminated = self
.running_state
@@ -33,7 +33,7 @@ use language::Buffer;
use loaded_source_list::LoadedSourceList;
use module_list::ModuleList;
use project::{
- Project, WorktreeId,
+ DebugScenarioContext, Project, WorktreeId,
debugger::session::{Session, SessionEvent, ThreadId, ThreadStatus},
terminals::TerminalKind,
};
@@ -79,6 +79,8 @@ pub struct RunningState {
pane_close_subscriptions: HashMap<EntityId, Subscription>,
dock_axis: Axis,
_schedule_serialize: Option<Task<()>>,
+ pub(crate) scenario: Option<DebugScenario>,
+ pub(crate) scenario_context: Option<DebugScenarioContext>,
}
impl RunningState {
@@ -831,6 +833,8 @@ impl RunningState {
debug_terminal,
dock_axis,
_schedule_serialize: None,
+ scenario: None,
+ scenario_context: None,
}
}
@@ -900,7 +904,7 @@ impl RunningState {
let config_is_valid = request_type.is_ok();
-
+ let mut extra_config = Value::Null;
let build_output = if let Some(build) = build {
let (task_template, locator_name) = match build {
BuildTaskDefinition::Template {
@@ -930,6 +934,7 @@ impl RunningState {
};
let locator_name = if let Some(locator_name) = locator_name {
+ extra_config = config.clone();
debug_assert!(!config_is_valid);
Some(locator_name)
} else if !config_is_valid {
@@ -945,6 +950,7 @@ impl RunningState {
});
if let Ok(t) = task {
t.await.and_then(|scenario| {
+ extra_config = scenario.config;
match scenario.build {
Some(BuildTaskDefinition::Template {
locator_name, ..
@@ -967,7 +973,7 @@ impl RunningState {
let task_with_shell = SpawnInTerminal {
command_label,
- command,
+ command: Some(command),
args,
..task.resolved.clone()
};
@@ -1008,13 +1014,13 @@ impl RunningState {
if !exit_status.success() {
anyhow::bail!("Build failed");
}
- Some((task.resolved.clone(), locator_name))
+ Some((task.resolved.clone(), locator_name, extra_config))
} else {
None
};
if config_is_valid {
- } else if let Some((task, locator_name)) = build_output {
+ } else if let Some((task, locator_name, extra_config)) = build_output {
let locator_name =
locator_name.with_context(|| {
format!("Could not find a valid locator for a build task and configure is invalid with error: {}", request_type.err()
@@ -1037,8 +1043,10 @@ impl RunningState {
let scenario = dap_registry
.adapter(&adapter)
.with_context(|| anyhow!("{}: is not a valid adapter name", &adapter))?.config_from_zed_format(zed_config)
-.await?;
+ .await?;
config = scenario.config;
+ util::merge_non_null_json_value_into(extra_config, &mut config);
+
Self::substitute_variables_in_config(&mut config, &task_context);
} else {
let Err(e) = request_type else {
@@ -1077,19 +1085,6 @@ impl RunningState {
.map(PathBuf::from)
.or_else(|| session.binary().unwrap().cwd.clone());
- let mut args = request.args.clone();
-
- // Handle special case for NodeJS debug adapter
- // If only the Node binary path is provided, we set the command to None
- // This prevents the NodeJS REPL from appearing, which is not the desired behavior
- // The expected usage is for users to provide their own Node command, e.g., `node test.js`
- // This allows the NodeJS debug client to attach correctly
- let command = if args.len() > 1 {
- Some(args.remove(0))
- } else {
- None
- };
-
let mut envs: HashMap<String, String> =
self.session.read(cx).task_context().project_env.clone();
if let Some(Value::Object(env)) = &request.env {
@@ -1103,32 +1098,58 @@ impl RunningState {
}
}
- let shell = project.read(cx).terminal_settings(&cwd, cx).shell.clone();
- let kind = if let Some(command) = command {
- let title = request.title.clone().unwrap_or(command.clone());
- TerminalKind::Task(task::SpawnInTerminal {
- id: task::TaskId("debug".to_string()),
- full_label: title.clone(),
- label: title.clone(),
- command: command.clone(),
- args,
- command_label: title.clone(),
- cwd,
- env: envs,
- use_new_terminal: true,
- allow_concurrent_runs: true,
- reveal: task::RevealStrategy::NoFocus,
- reveal_target: task::RevealTarget::Dock,
- hide: task::HideStrategy::Never,
- shell,
- show_summary: false,
- show_command: false,
- show_rerun: false,
- })
+ let mut args = request.args.clone();
+ let command = if envs.contains_key("VSCODE_INSPECTOR_OPTIONS") {
+ // Handle special case for NodeJS debug adapter
+ // If the Node binary path is provided (possibly with arguments like --experimental-network-inspection),
+ // we set the command to None
+ // This prevents the NodeJS REPL from appearing, which is not the desired behavior
+ // The expected usage is for users to provide their own Node command, e.g., `node test.js`
+ // This allows the NodeJS debug client to attach correctly
+ if args
+ .iter()
+ .filter(|arg| !arg.starts_with("--"))
+ .collect::<Vec<_>>()
+ .len()
+ > 1
+ {
+ Some(args.remove(0))
+ } else {
+ None
+ }
+ } else if args.len() > 0 {
+ Some(args.remove(0))
} else {
- TerminalKind::Shell(cwd.map(|c| c.to_path_buf()))
+ None
};
+ let shell = project.read(cx).terminal_settings(&cwd, cx).shell.clone();
+ let title = request
+ .title
+ .clone()
+ .filter(|title| !title.is_empty())
+ .or_else(|| command.clone())
+ .unwrap_or_else(|| "Debug terminal".to_string());
+ let kind = TerminalKind::Task(task::SpawnInTerminal {
+ id: task::TaskId("debug".to_string()),
+ full_label: title.clone(),
+ label: title.clone(),
+ command: command.clone(),
+ args,
+ command_label: title.clone(),
+ cwd,
+ env: envs,
+ use_new_terminal: true,
+ allow_concurrent_runs: true,
+ reveal: task::RevealStrategy::NoFocus,
+ reveal_target: task::RevealTarget::Dock,
+ hide: task::HideStrategy::Never,
+ shell,
+ show_summary: false,
+ show_command: false,
+ show_rerun: false,
+ });
+
let workspace = self.workspace.clone();
let weak_project = project.downgrade();
@@ -1521,6 +1542,34 @@ impl RunningState {
});
}
+ pub fn rerun_session(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ if let Some((scenario, context)) = self.scenario.take().zip(self.scenario_context.take())
+ && scenario.build.is_some()
+ {
+ let DebugScenarioContext {
+ task_context,
+ active_buffer,
+ worktree_id,
+ } = context;
+ let active_buffer = active_buffer.and_then(|buffer| buffer.upgrade());
+
+ self.workspace
+ .update(cx, |workspace, cx| {
+ workspace.start_debug_session(
+ scenario,
+ task_context,
+ active_buffer,
+ worktree_id,
+ window,
+ cx,
+ )
+ })
+ .ok();
+ } else {
+ self.restart_session(cx);
+ }
+ }
+
pub fn restart_session(&self, cx: &mut Context<Self>) {
self.session().update(cx, |state, cx| {
state.restart(None, cx);
@@ -33,7 +33,12 @@ use zed_actions::{ToggleEnableBreakpoint, UnsetBreakpoint};
actions!(
debugger,
- [PreviousBreakpointProperty, NextBreakpointProperty]
+ [
+ /// Navigates to the previous breakpoint property in the list.
+ PreviousBreakpointProperty,
+ /// Navigates to the next breakpoint property in the list.
+ NextBreakpointProperty
+ ]
);
#[derive(Clone, Copy, PartialEq)]
pub(crate) enum SelectedBreakpointKind {
@@ -5,7 +5,7 @@ use super::{
use alacritty_terminal::vte::ansi;
use anyhow::Result;
use collections::HashMap;
-use dap::OutputEvent;
+use dap::{CompletionItem, CompletionItemType, OutputEvent};
use editor::{Bias, CompletionProvider, Editor, EditorElement, EditorStyle, ExcerptId};
use fuzzy::StringMatchCandidate;
use gpui::{
@@ -13,17 +13,27 @@ use gpui::{
Render, Subscription, Task, TextStyle, WeakEntity, actions,
};
use language::{Buffer, CodeLabel, ToOffset};
-use menu::Confirm;
+use menu::{Confirm, SelectNext, SelectPrevious};
use project::{
Completion, CompletionResponse,
- debugger::session::{CompletionsQuery, OutputToken, Session, SessionEvent},
+ debugger::session::{CompletionsQuery, OutputToken, Session},
+ lsp_store::CompletionDocumentation,
+ search_history::{SearchHistory, SearchHistoryCursor},
};
use settings::Settings;
+use std::fmt::Write;
use std::{cell::RefCell, ops::Range, rc::Rc, usize};
use theme::{Theme, ThemeSettings};
use ui::{ContextMenu, Divider, PopoverMenu, SplitButton, Tooltip, prelude::*};
+use util::ResultExt;
-actions!(console, [WatchExpression]);
+actions!(
+ console,
+ [
+ /// Adds an expression to the watch list.
+ WatchExpression
+ ]
+);
pub struct Console {
console: Entity<Editor>,
@@ -33,8 +43,10 @@ pub struct Console {
variable_list: Entity<VariableList>,
stack_frame_list: Entity<StackFrameList>,
last_token: OutputToken,
- update_output_task: Task<()>,
+ update_output_task: Option<Task<()>>,
focus_handle: FocusHandle,
+ history: SearchHistory,
+ cursor: SearchHistoryCursor,
}
impl Console {
@@ -83,11 +95,6 @@ impl Console {
let _subscriptions = vec![
cx.subscribe(&stack_frame_list, Self::handle_stack_frame_list_events),
- cx.subscribe_in(&session, window, |this, _, event, window, cx| {
- if let SessionEvent::ConsoleOutput = event {
- this.update_output(window, cx)
- }
- }),
cx.on_focus(&focus_handle, window, |console, window, cx| {
if console.is_running(cx) {
console.query_bar.focus_handle(cx).focus(window);
@@ -102,9 +109,14 @@ impl Console {
variable_list,
_subscriptions,
stack_frame_list,
- update_output_task: Task::ready(()),
+ update_output_task: None,
last_token: OutputToken(0),
focus_handle,
+ history: SearchHistory::new(
+ None,
+ project::search_history::QueryInsertionBehavior::ReplacePreviousIfContains,
+ ),
+ cursor: Default::default(),
}
}
@@ -133,202 +145,116 @@ impl Console {
self.session.read(cx).has_new_output(self.last_token)
}
- pub fn add_messages<'a>(
+ fn add_messages(
&mut self,
- events: impl Iterator<Item = &'a OutputEvent>,
+ events: Vec<OutputEvent>,
window: &mut Window,
cx: &mut App,
- ) {
- self.console.update(cx, |console, cx| {
- console.set_read_only(false);
-
- for event in events {
- let to_insert = format!("{}\n", event.output.trim_end());
-
- let mut ansi_handler = ConsoleHandler::default();
- let mut ansi_processor = ansi::Processor::<ansi::StdSyncHandler>::default();
-
- let len = console.buffer().read(cx).len(cx);
- ansi_processor.advance(&mut ansi_handler, to_insert.as_bytes());
- let output = std::mem::take(&mut ansi_handler.output);
- let mut spans = std::mem::take(&mut ansi_handler.spans);
- let mut background_spans = std::mem::take(&mut ansi_handler.background_spans);
- if ansi_handler.current_range_start < output.len() {
- spans.push((
- ansi_handler.current_range_start..output.len(),
- ansi_handler.current_color,
- ));
- }
- if ansi_handler.current_background_range_start < output.len() {
- background_spans.push((
- ansi_handler.current_background_range_start..output.len(),
- ansi_handler.current_background_color,
- ));
- }
- console.move_to_end(&editor::actions::MoveToEnd, window, cx);
- console.insert(&output, window, cx);
- let buffer = console.buffer().read(cx).snapshot(cx);
-
- struct ConsoleAnsiHighlight;
-
- for (range, color) in spans {
- let Some(color) = color else { continue };
- let start_offset = len + range.start;
- let range = start_offset..len + range.end;
- let range = buffer.anchor_after(range.start)..buffer.anchor_before(range.end);
- let style = HighlightStyle {
- color: Some(terminal_view::terminal_element::convert_color(
- &color,
- cx.theme(),
- )),
- ..Default::default()
- };
- console.highlight_text_key::<ConsoleAnsiHighlight>(
- start_offset,
- vec![range],
- style,
- cx,
- );
- }
-
- for (range, color) in background_spans {
- let Some(color) = color else { continue };
- let start_offset = len + range.start;
- let range = start_offset..len + range.end;
- let range = buffer.anchor_after(range.start)..buffer.anchor_before(range.end);
-
- let color_fetcher: fn(&Theme) -> Hsla = match color {
- // Named and theme defined colors
- ansi::Color::Named(n) => match n {
- ansi::NamedColor::Black => |theme| theme.colors().terminal_ansi_black,
- ansi::NamedColor::Red => |theme| theme.colors().terminal_ansi_red,
- ansi::NamedColor::Green => |theme| theme.colors().terminal_ansi_green,
- ansi::NamedColor::Yellow => |theme| theme.colors().terminal_ansi_yellow,
- ansi::NamedColor::Blue => |theme| theme.colors().terminal_ansi_blue,
- ansi::NamedColor::Magenta => {
- |theme| theme.colors().terminal_ansi_magenta
- }
- ansi::NamedColor::Cyan => |theme| theme.colors().terminal_ansi_cyan,
- ansi::NamedColor::White => |theme| theme.colors().terminal_ansi_white,
- ansi::NamedColor::BrightBlack => {
- |theme| theme.colors().terminal_ansi_bright_black
- }
- ansi::NamedColor::BrightRed => {
- |theme| theme.colors().terminal_ansi_bright_red
- }
- ansi::NamedColor::BrightGreen => {
- |theme| theme.colors().terminal_ansi_bright_green
- }
- ansi::NamedColor::BrightYellow => {
- |theme| theme.colors().terminal_ansi_bright_yellow
- }
- ansi::NamedColor::BrightBlue => {
- |theme| theme.colors().terminal_ansi_bright_blue
- }
- ansi::NamedColor::BrightMagenta => {
- |theme| theme.colors().terminal_ansi_bright_magenta
- }
- ansi::NamedColor::BrightCyan => {
- |theme| theme.colors().terminal_ansi_bright_cyan
- }
- ansi::NamedColor::BrightWhite => {
- |theme| theme.colors().terminal_ansi_bright_white
- }
- ansi::NamedColor::Foreground => {
- |theme| theme.colors().terminal_foreground
- }
- ansi::NamedColor::Background => {
- |theme| theme.colors().terminal_background
- }
- ansi::NamedColor::Cursor => |theme| theme.players().local().cursor,
- ansi::NamedColor::DimBlack => {
- |theme| theme.colors().terminal_ansi_dim_black
- }
- ansi::NamedColor::DimRed => {
- |theme| theme.colors().terminal_ansi_dim_red
- }
- ansi::NamedColor::DimGreen => {
- |theme| theme.colors().terminal_ansi_dim_green
- }
- ansi::NamedColor::DimYellow => {
- |theme| theme.colors().terminal_ansi_dim_yellow
+ ) -> Task<Result<()>> {
+ self.console.update(cx, |_, cx| {
+ cx.spawn_in(window, async move |console, cx| {
+ let mut len = console.update(cx, |this, cx| this.buffer().read(cx).len(cx))?;
+ let (output, spans, background_spans) = cx
+ .background_spawn(async move {
+ let mut all_spans = Vec::new();
+ let mut all_background_spans = Vec::new();
+ let mut to_insert = String::new();
+ let mut scratch = String::new();
+
+ for event in &events {
+ scratch.clear();
+ let mut ansi_handler = ConsoleHandler::default();
+ let mut ansi_processor =
+ ansi::Processor::<ansi::StdSyncHandler>::default();
+
+ let trimmed_output = event.output.trim_end();
+ let _ = writeln!(&mut scratch, "{trimmed_output}");
+ ansi_processor.advance(&mut ansi_handler, scratch.as_bytes());
+ let output = std::mem::take(&mut ansi_handler.output);
+ to_insert.extend(output.chars());
+ let mut spans = std::mem::take(&mut ansi_handler.spans);
+ let mut background_spans =
+ std::mem::take(&mut ansi_handler.background_spans);
+ if ansi_handler.current_range_start < output.len() {
+ spans.push((
+ ansi_handler.current_range_start..output.len(),
+ ansi_handler.current_color,
+ ));
}
- ansi::NamedColor::DimBlue => {
- |theme| theme.colors().terminal_ansi_dim_blue
+ if ansi_handler.current_background_range_start < output.len() {
+ background_spans.push((
+ ansi_handler.current_background_range_start..output.len(),
+ ansi_handler.current_background_color,
+ ));
}
- ansi::NamedColor::DimMagenta => {
- |theme| theme.colors().terminal_ansi_dim_magenta
- }
- ansi::NamedColor::DimCyan => {
- |theme| theme.colors().terminal_ansi_dim_cyan
- }
- ansi::NamedColor::DimWhite => {
- |theme| theme.colors().terminal_ansi_dim_white
- }
- ansi::NamedColor::BrightForeground => {
- |theme| theme.colors().terminal_bright_foreground
- }
- ansi::NamedColor::DimForeground => {
- |theme| theme.colors().terminal_dim_foreground
+
+ for (range, _) in spans.iter_mut() {
+ let start_offset = len + range.start;
+ *range = start_offset..len + range.end;
}
- },
- // 'True' colors
- ansi::Color::Spec(_) => |theme| theme.colors().editor_background,
- // 8 bit, indexed colors
- ansi::Color::Indexed(i) => {
- match i {
- // 0-15 are the same as the named colors above
- 0 => |theme| theme.colors().terminal_ansi_black,
- 1 => |theme| theme.colors().terminal_ansi_red,
- 2 => |theme| theme.colors().terminal_ansi_green,
- 3 => |theme| theme.colors().terminal_ansi_yellow,
- 4 => |theme| theme.colors().terminal_ansi_blue,
- 5 => |theme| theme.colors().terminal_ansi_magenta,
- 6 => |theme| theme.colors().terminal_ansi_cyan,
- 7 => |theme| theme.colors().terminal_ansi_white,
- 8 => |theme| theme.colors().terminal_ansi_bright_black,
- 9 => |theme| theme.colors().terminal_ansi_bright_red,
- 10 => |theme| theme.colors().terminal_ansi_bright_green,
- 11 => |theme| theme.colors().terminal_ansi_bright_yellow,
- 12 => |theme| theme.colors().terminal_ansi_bright_blue,
- 13 => |theme| theme.colors().terminal_ansi_bright_magenta,
- 14 => |theme| theme.colors().terminal_ansi_bright_cyan,
- 15 => |theme| theme.colors().terminal_ansi_bright_white,
- // 16-231 are a 6x6x6 RGB color cube, mapped to 0-255 using steps defined by XTerm.
- // See: https://github.com/xterm-x11/xterm-snapshots/blob/master/256colres.pl
- // 16..=231 => {
- // let (r, g, b) = rgb_for_index(index as u8);
- // rgba_color(
- // if r == 0 { 0 } else { r * 40 + 55 },
- // if g == 0 { 0 } else { g * 40 + 55 },
- // if b == 0 { 0 } else { b * 40 + 55 },
- // )
- // }
- // 232-255 are a 24-step grayscale ramp from (8, 8, 8) to (238, 238, 238).
- // 232..=255 => {
- // let i = index as u8 - 232; // Align index to 0..24
- // let value = i * 10 + 8;
- // rgba_color(value, value, value)
- // }
- // For compatibility with the alacritty::Colors interface
- // See: https://github.com/alacritty/alacritty/blob/master/alacritty_terminal/src/term/color.rs
- _ => |_| gpui::black(),
+
+ for (range, _) in background_spans.iter_mut() {
+ let start_offset = len + range.start;
+ *range = start_offset..len + range.end;
}
+
+ len += output.len();
+
+ all_spans.extend(spans);
+ all_background_spans.extend(background_spans);
}
- };
-
- console.highlight_background_key::<ConsoleAnsiHighlight>(
- start_offset,
- &[range],
- color_fetcher,
- cx,
- );
- }
- }
+ (to_insert, all_spans, all_background_spans)
+ })
+ .await;
+ console.update_in(cx, |console, window, cx| {
+ console.set_read_only(false);
+ console.move_to_end(&editor::actions::MoveToEnd, window, cx);
+ console.insert(&output, window, cx);
+ console.set_read_only(true);
+
+ struct ConsoleAnsiHighlight;
+
+ let buffer = console.buffer().read(cx).snapshot(cx);
+
+ for (range, color) in spans {
+ let Some(color) = color else { continue };
+ let start_offset = range.start;
+ let range =
+ buffer.anchor_after(range.start)..buffer.anchor_before(range.end);
+ let style = HighlightStyle {
+ color: Some(terminal_view::terminal_element::convert_color(
+ &color,
+ cx.theme(),
+ )),
+ ..Default::default()
+ };
+ console.highlight_text_key::<ConsoleAnsiHighlight>(
+ start_offset,
+ vec![range],
+ style,
+ cx,
+ );
+ }
- console.set_read_only(true);
- cx.notify();
- });
+ for (range, color) in background_spans {
+ let Some(color) = color else { continue };
+ let start_offset = range.start;
+ let range =
+ buffer.anchor_after(range.start)..buffer.anchor_before(range.end);
+ console.highlight_background_key::<ConsoleAnsiHighlight>(
+ start_offset,
+ &[range],
+ color_fetcher(color),
+ cx,
+ );
+ }
+
+ cx.notify();
+ })?;
+
+ Ok(())
+ })
+ })
}
pub fn watch_expression(
@@ -345,7 +271,8 @@ impl Console {
expression
});
-
+ self.history.add(&mut self.cursor, expression.clone());
+ self.cursor.reset();
self.session.update(cx, |session, cx| {
session
.evaluate(
@@ -365,7 +292,28 @@ impl Console {
});
}
- pub fn evaluate(&mut self, _: &Confirm, window: &mut Window, cx: &mut Context<Self>) {
+ fn previous_query(&mut self, _: &SelectPrevious, window: &mut Window, cx: &mut Context<Self>) {
+ let prev = self.history.previous(&mut self.cursor);
+ if let Some(prev) = prev {
+ self.query_bar.update(cx, |editor, cx| {
+ editor.set_text(prev, window, cx);
+ });
+ }
+ }
+
+ fn next_query(&mut self, _: &SelectNext, window: &mut Window, cx: &mut Context<Self>) {
+ let next = self.history.next(&mut self.cursor);
+ let query = next.unwrap_or_else(|| {
+ self.cursor.reset();
+ ""
+ });
+
+ self.query_bar.update(cx, |editor, cx| {
+ editor.set_text(query, window, cx);
+ });
+ }
+
+ fn evaluate(&mut self, _: &Confirm, window: &mut Window, cx: &mut Context<Self>) {
let expression = self.query_bar.update(cx, |editor, cx| {
let expression = editor.text(cx);
cx.defer_in(window, |editor, window, cx| {
@@ -375,6 +323,8 @@ impl Console {
expression
});
+ self.history.add(&mut self.cursor, expression.clone());
+ self.cursor.reset();
self.session.update(cx, |session, cx| {
session
.evaluate(
@@ -458,31 +408,50 @@ impl Console {
EditorElement::new(&self.query_bar, Self::editor_style(&self.query_bar, cx))
}
- fn update_output(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ pub(crate) fn update_output(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ if self.update_output_task.is_some() {
+ return;
+ }
let session = self.session.clone();
let token = self.last_token;
-
- self.update_output_task = cx.spawn_in(window, async move |this, cx| {
- _ = session.update_in(cx, move |session, window, cx| {
- let (output, last_processed_token) = session.output(token);
-
- _ = this.update(cx, |this, cx| {
- if last_processed_token == this.last_token {
- return;
- }
- this.add_messages(output, window, cx);
-
- this.last_token = last_processed_token;
+ self.update_output_task = Some(cx.spawn_in(window, async move |this, cx| {
+ let Some((last_processed_token, task)) = session
+ .update_in(cx, |session, window, cx| {
+ let (output, last_processed_token) = session.output(token);
+
+ this.update(cx, |this, cx| {
+ if last_processed_token == this.last_token {
+ return None;
+ }
+ Some((
+ last_processed_token,
+ this.add_messages(output.cloned().collect(), window, cx),
+ ))
+ })
+ .ok()
+ .flatten()
+ })
+ .ok()
+ .flatten()
+ else {
+ _ = this.update(cx, |this, _| {
+ this.update_output_task.take();
});
+ return;
+ };
+ _ = task.await.log_err();
+ _ = this.update(cx, |this, _| {
+ this.last_token = last_processed_token;
+ this.update_output_task.take();
});
- });
+ }));
}
}
impl Render for Console {
- fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let query_focus_handle = self.query_bar.focus_handle(cx);
-
+ self.update_output(window, cx);
v_flex()
.track_focus(&self.focus_handle)
.key_context("DebugConsole")
@@ -493,6 +462,8 @@ impl Render for Console {
.when(self.is_running(cx), |this| {
this.child(Divider::horizontal()).child(
h_flex()
+ .on_action(cx.listener(Self::previous_query))
+ .on_action(cx.listener(Self::next_query))
.gap_1()
.bg(cx.theme().colors().editor_background)
.child(self.render_query_bar(cx))
@@ -585,15 +556,27 @@ impl CompletionProvider for ConsoleQueryBarCompletionProvider {
buffer: &Entity<Buffer>,
position: language::Anchor,
text: &str,
- _trigger_in_words: bool,
+ trigger_in_words: bool,
menu_is_open: bool,
cx: &mut Context<Editor>,
) -> bool {
+ let mut chars = text.chars();
+ let char = if let Some(char) = chars.next() {
+ char
+ } else {
+ return false;
+ };
+
let snapshot = buffer.read(cx).snapshot();
if !menu_is_open && !snapshot.settings_at(position, cx).show_completions_on_input {
return false;
}
+ let classifier = snapshot.char_classifier_at(position).for_completion(true);
+ if trigger_in_words && classifier.is_word(char) {
+ return true;
+ }
+
self.0
.read_with(cx, |console, cx| {
console
@@ -626,21 +609,28 @@ impl ConsoleQueryBarCompletionProvider {
variable_list.completion_variables(cx)
}) {
if let Some(evaluate_name) = &variable.evaluate_name {
- variables.insert(evaluate_name.clone(), variable.value.clone());
+ if variables
+ .insert(evaluate_name.clone(), variable.value.clone())
+ .is_none()
+ {
+ string_matches.push(StringMatchCandidate {
+ id: 0,
+ string: evaluate_name.clone(),
+ char_bag: evaluate_name.chars().collect(),
+ });
+ }
+ }
+
+ if variables
+ .insert(variable.name.clone(), variable.value.clone())
+ .is_none()
+ {
string_matches.push(StringMatchCandidate {
id: 0,
- string: evaluate_name.clone(),
- char_bag: evaluate_name.chars().collect(),
+ string: variable.name.clone(),
+ char_bag: variable.name.chars().collect(),
});
}
-
- variables.insert(variable.name.clone(), variable.value.clone());
-
- string_matches.push(StringMatchCandidate {
- id: 0,
- string: variable.name.clone(),
- char_bag: variable.name.chars().collect(),
- });
}
(variables, string_matches)
@@ -686,11 +676,13 @@ impl ConsoleQueryBarCompletionProvider {
new_text: string_match.string.clone(),
label: CodeLabel {
filter_range: 0..string_match.string.len(),
- text: format!("{} {}", string_match.string, variable_value),
+ text: string_match.string.clone(),
runs: Vec::new(),
},
icon_path: None,
- documentation: None,
+ documentation: Some(CompletionDocumentation::MultiLineMarkdown(
+ variable_value.into(),
+ )),
confirm: None,
source: project::CompletionSource::Custom,
insert_text_mode: None,
@@ -705,6 +697,32 @@ impl ConsoleQueryBarCompletionProvider {
})
}
+ const fn completion_type_score(completion_type: CompletionItemType) -> usize {
+ match completion_type {
+ CompletionItemType::Field | CompletionItemType::Property => 0,
+ CompletionItemType::Variable | CompletionItemType::Value => 1,
+ CompletionItemType::Method
+ | CompletionItemType::Function
+ | CompletionItemType::Constructor => 2,
+ CompletionItemType::Class
+ | CompletionItemType::Interface
+ | CompletionItemType::Module => 3,
+ _ => 4,
+ }
+ }
+
+ fn completion_item_sort_text(completion_item: &CompletionItem) -> String {
+ completion_item.sort_text.clone().unwrap_or_else(|| {
+ format!(
+ "{:03}_{}",
+ Self::completion_type_score(
+ completion_item.type_.unwrap_or(CompletionItemType::Text)
+ ),
+ completion_item.label.to_ascii_lowercase()
+ )
+ })
+ }
+
fn client_completions(
&self,
console: &Entity<Console>,
@@ -729,6 +747,7 @@ impl ConsoleQueryBarCompletionProvider {
let completions = completions
.into_iter()
.map(|completion| {
+ let sort_text = Self::completion_item_sort_text(&completion);
let new_text = completion
.text
.as_ref()
@@ -761,12 +780,11 @@ impl ConsoleQueryBarCompletionProvider {
runs: Vec::new(),
},
icon_path: None,
- documentation: None,
+ documentation: completion.detail.map(|detail| {
+ CompletionDocumentation::MultiLineMarkdown(detail.into())
+ }),
confirm: None,
- source: project::CompletionSource::BufferWord {
- word_range: buffer_position..language::Anchor::MAX,
- resolved: false,
- },
+ source: project::CompletionSource::Dap { sort_text },
insert_text_mode: None,
}
})
@@ -845,3 +863,84 @@ impl ansi::Handler for ConsoleHandler {
}
}
}
+
+fn color_fetcher(color: ansi::Color) -> fn(&Theme) -> Hsla {
+ let color_fetcher: fn(&Theme) -> Hsla = match color {
+ // Named and theme defined colors
+ ansi::Color::Named(n) => match n {
+ ansi::NamedColor::Black => |theme| theme.colors().terminal_ansi_black,
+ ansi::NamedColor::Red => |theme| theme.colors().terminal_ansi_red,
+ ansi::NamedColor::Green => |theme| theme.colors().terminal_ansi_green,
+ ansi::NamedColor::Yellow => |theme| theme.colors().terminal_ansi_yellow,
+ ansi::NamedColor::Blue => |theme| theme.colors().terminal_ansi_blue,
+ ansi::NamedColor::Magenta => |theme| theme.colors().terminal_ansi_magenta,
+ ansi::NamedColor::Cyan => |theme| theme.colors().terminal_ansi_cyan,
+ ansi::NamedColor::White => |theme| theme.colors().terminal_ansi_white,
+ ansi::NamedColor::BrightBlack => |theme| theme.colors().terminal_ansi_bright_black,
+ ansi::NamedColor::BrightRed => |theme| theme.colors().terminal_ansi_bright_red,
+ ansi::NamedColor::BrightGreen => |theme| theme.colors().terminal_ansi_bright_green,
+ ansi::NamedColor::BrightYellow => |theme| theme.colors().terminal_ansi_bright_yellow,
+ ansi::NamedColor::BrightBlue => |theme| theme.colors().terminal_ansi_bright_blue,
+ ansi::NamedColor::BrightMagenta => |theme| theme.colors().terminal_ansi_bright_magenta,
+ ansi::NamedColor::BrightCyan => |theme| theme.colors().terminal_ansi_bright_cyan,
+ ansi::NamedColor::BrightWhite => |theme| theme.colors().terminal_ansi_bright_white,
+ ansi::NamedColor::Foreground => |theme| theme.colors().terminal_foreground,
+ ansi::NamedColor::Background => |theme| theme.colors().terminal_background,
+ ansi::NamedColor::Cursor => |theme| theme.players().local().cursor,
+ ansi::NamedColor::DimBlack => |theme| theme.colors().terminal_ansi_dim_black,
+ ansi::NamedColor::DimRed => |theme| theme.colors().terminal_ansi_dim_red,
+ ansi::NamedColor::DimGreen => |theme| theme.colors().terminal_ansi_dim_green,
+ ansi::NamedColor::DimYellow => |theme| theme.colors().terminal_ansi_dim_yellow,
+ ansi::NamedColor::DimBlue => |theme| theme.colors().terminal_ansi_dim_blue,
+ ansi::NamedColor::DimMagenta => |theme| theme.colors().terminal_ansi_dim_magenta,
+ ansi::NamedColor::DimCyan => |theme| theme.colors().terminal_ansi_dim_cyan,
+ ansi::NamedColor::DimWhite => |theme| theme.colors().terminal_ansi_dim_white,
+ ansi::NamedColor::BrightForeground => |theme| theme.colors().terminal_bright_foreground,
+ ansi::NamedColor::DimForeground => |theme| theme.colors().terminal_dim_foreground,
+ },
+ // 'True' colors
+ ansi::Color::Spec(_) => |theme| theme.colors().editor_background,
+ // 8 bit, indexed colors
+ ansi::Color::Indexed(i) => {
+ match i {
+ // 0-15 are the same as the named colors above
+ 0 => |theme| theme.colors().terminal_ansi_black,
+ 1 => |theme| theme.colors().terminal_ansi_red,
+ 2 => |theme| theme.colors().terminal_ansi_green,
+ 3 => |theme| theme.colors().terminal_ansi_yellow,
+ 4 => |theme| theme.colors().terminal_ansi_blue,
+ 5 => |theme| theme.colors().terminal_ansi_magenta,
+ 6 => |theme| theme.colors().terminal_ansi_cyan,
+ 7 => |theme| theme.colors().terminal_ansi_white,
+ 8 => |theme| theme.colors().terminal_ansi_bright_black,
+ 9 => |theme| theme.colors().terminal_ansi_bright_red,
+ 10 => |theme| theme.colors().terminal_ansi_bright_green,
+ 11 => |theme| theme.colors().terminal_ansi_bright_yellow,
+ 12 => |theme| theme.colors().terminal_ansi_bright_blue,
+ 13 => |theme| theme.colors().terminal_ansi_bright_magenta,
+ 14 => |theme| theme.colors().terminal_ansi_bright_cyan,
+ 15 => |theme| theme.colors().terminal_ansi_bright_white,
+ // 16-231 are a 6x6x6 RGB color cube, mapped to 0-255 using steps defined by XTerm.
+ // See: https://github.com/xterm-x11/xterm-snapshots/blob/master/256colres.pl
+ // 16..=231 => {
+ // let (r, g, b) = rgb_for_index(index as u8);
+ // rgba_color(
+ // if r == 0 { 0 } else { r * 40 + 55 },
+ // if g == 0 { 0 } else { g * 40 + 55 },
+ // if b == 0 { 0 } else { b * 40 + 55 },
+ // )
+ // }
+ // 232-255 are a 24-step grayscale ramp from (8, 8, 8) to (238, 238, 238).
+ // 232..=255 => {
+ // let i = index as u8 - 232; // Align index to 0..24
+ // let value = i * 10 + 8;
+ // rgba_color(value, value, value)
+ // }
+ // For compatibility with the alacritty::Colors interface
+ // See: https://github.com/alacritty/alacritty/blob/master/alacritty_terminal/src/term/color.rs
+ _ => |_| gpui::black(),
+ }
+ }
+ };
+ color_fetcher
+}
@@ -18,12 +18,19 @@ use util::debug_panic;
actions!(
variable_list,
[
+ /// Expands the selected variable entry to show its children.
ExpandSelectedEntry,
+ /// Collapses the selected variable entry to hide its children.
CollapseSelectedEntry,
+ /// Copies the variable name to the clipboard.
CopyVariableName,
+ /// Copies the variable value to the clipboard.
CopyVariableValue,
+ /// Edits the value of the selected variable.
EditVariable,
+ /// Adds the selected variable to the watch list.
AddWatch,
+ /// Removes the selected variable from the watch list.
RemoveWatch,
]
);
@@ -115,6 +115,7 @@ pub fn start_debug_session_with<T: Fn(&Arc<DebugAdapterClient>) + 'static>(
config.to_scenario(),
TaskContext::default(),
None,
+ None,
window,
cx,
)
@@ -232,7 +232,6 @@ async fn test_escape_code_processing(executor: BackgroundExecutor, cx: &mut Test
location_reference: None,
}))
.await;
- // [crates/debugger_ui/src/session/running/console.rs:147:9] &to_insert = "Could not read source map for file:///Users/cole/roles-at/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/lib/typescript.js: ENOENT: no such file or directory, open '/Users/cole/roles-at/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/lib/typescript.js.map'\n"
client
.fake_event(dap::messages::Events::Output(dap::OutputEvent {
category: None,
@@ -260,7 +259,6 @@ async fn test_escape_code_processing(executor: BackgroundExecutor, cx: &mut Test
}))
.await;
- // introduce some background highlight
client
.fake_event(dap::messages::Events::Output(dap::OutputEvent {
category: None,
@@ -274,7 +272,6 @@ async fn test_escape_code_processing(executor: BackgroundExecutor, cx: &mut Test
location_reference: None,
}))
.await;
- // another random line
client
.fake_event(dap::messages::Events::Output(dap::OutputEvent {
category: None,
@@ -294,6 +291,11 @@ async fn test_escape_code_processing(executor: BackgroundExecutor, cx: &mut Test
let _running_state =
active_debug_session_panel(workspace, cx).update_in(cx, |item, window, cx| {
cx.focus_self(window);
+ item.running_state().update(cx, |this, cx| {
+ this.console()
+ .update(cx, |this, cx| this.update_output(window, cx));
+ });
+
item.running_state().clone()
});
@@ -37,15 +37,23 @@ async fn test_dap_logger_captures_all_session_rpc_messages(
.await;
assert!(
- log_store.read_with(cx, |log_store, _| log_store
- .contained_session_ids()
- .is_empty()),
- "log_store shouldn't contain any session IDs before any sessions were created"
+ log_store.read_with(cx, |log_store, _| !log_store.has_projects()),
+ "log_store shouldn't contain any projects before any projects were created"
);
let project = Project::test(fs, [path!("/project").as_ref()], cx).await;
let workspace = init_test_workspace(&project, cx).await;
+ assert!(
+ log_store.read_with(cx, |log_store, _| log_store.has_projects()),
+ "log_store shouldn't contain any projects before any projects were created"
+ );
+ assert!(
+ log_store.read_with(cx, |log_store, _| log_store
+ .contained_session_ids(&project.downgrade())
+ .is_empty()),
+ "log_store shouldn't contain any projects before any projects were created"
+ );
let cx = &mut VisualTestContext::from_window(*workspace, cx);
// Start a debug session
@@ -54,20 +62,22 @@ async fn test_dap_logger_captures_all_session_rpc_messages(
let client = session.update(cx, |session, _| session.adapter_client().unwrap());
assert_eq!(
- log_store.read_with(cx, |log_store, _| log_store.contained_session_ids().len()),
+ log_store.read_with(cx, |log_store, _| log_store
+ .contained_session_ids(&project.downgrade())
+ .len()),
1,
);
assert!(
log_store.read_with(cx, |log_store, _| log_store
- .contained_session_ids()
+ .contained_session_ids(&project.downgrade())
.contains(&session_id)),
"log_store should contain the session IDs of the started session"
);
assert!(
!log_store.read_with(cx, |log_store, _| log_store
- .rpc_messages_for_session_id(session_id)
+ .rpc_messages_for_session_id(&project.downgrade(), session_id)
.is_empty()),
"We should have the initialization sequence in the log store"
);
@@ -141,7 +141,14 @@ async fn test_debug_session_substitutes_variables_and_relativizes_paths(
workspace
.update(cx, |workspace, window, cx| {
- workspace.start_debug_session(scenario, task_context.clone(), None, window, cx)
+ workspace.start_debug_session(
+ scenario,
+ task_context.clone(),
+ None,
+ None,
+ window,
+ cx,
+ )
})
.unwrap();
@@ -267,7 +274,6 @@ async fn test_dap_adapter_config_conversion_and_validation(cx: &mut TestAppConte
"Debugpy",
"PHP",
"JavaScript",
- "Ruby",
"Delve",
"GDB",
"fake-adapter",
@@ -48,7 +48,14 @@ use workspace::{
actions!(
diagnostics,
- [Deploy, ToggleWarnings, ToggleDiagnosticsRefresh]
+ [
+ /// Opens the project diagnostics view.
+ Deploy,
+ /// Toggles the display of warning-level diagnostics.
+ ToggleWarnings,
+ /// Toggles automatic refresh of diagnostics.
+ ToggleDiagnosticsRefresh
+ ]
);
#[derive(Default)]
@@ -61,6 +61,7 @@ parking_lot.workspace = true
pretty_assertions.workspace = true
project.workspace = true
rand.workspace = true
+regex.workspace = true
rpc.workspace = true
schemars.workspace = true
serde.workspace = true
@@ -108,6 +109,7 @@ theme = { workspace = true, features = ["test-support"] }
tree-sitter-html.workspace = true
tree-sitter-rust.workspace = true
tree-sitter-typescript.workspace = true
+tree-sitter-yaml.workspace = true
unindent.workspace = true
util = { workspace = true, features = ["test-support"] }
workspace = { workspace = true, features = ["test-support"] }
@@ -4,6 +4,7 @@ use gpui::{Action, actions};
use schemars::JsonSchema;
use util::serde::default_true;
+/// Selects the next occurrence of the current selection.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -12,6 +13,7 @@ pub struct SelectNext {
pub replace_newest: bool,
}
+/// Selects the previous occurrence of the current selection.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -20,6 +22,7 @@ pub struct SelectPrevious {
pub replace_newest: bool,
}
+/// Moves the cursor to the beginning of the current line.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -30,6 +33,7 @@ pub struct MoveToBeginningOfLine {
pub stop_at_indent: bool,
}
+/// Selects from the cursor to the beginning of the current line.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -40,6 +44,7 @@ pub struct SelectToBeginningOfLine {
pub stop_at_indent: bool,
}
+/// Deletes from the cursor to the beginning of the current line.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -48,6 +53,7 @@ pub struct DeleteToBeginningOfLine {
pub(super) stop_at_indent: bool,
}
+/// Moves the cursor up by one page.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -56,6 +62,7 @@ pub struct MovePageUp {
pub(super) center_cursor: bool,
}
+/// Moves the cursor down by one page.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -64,6 +71,7 @@ pub struct MovePageDown {
pub(super) center_cursor: bool,
}
+/// Moves the cursor to the end of the current line.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -72,6 +80,7 @@ pub struct MoveToEndOfLine {
pub stop_at_soft_wraps: bool,
}
+/// Selects from the cursor to the end of the current line.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -80,6 +89,7 @@ pub struct SelectToEndOfLine {
pub(super) stop_at_soft_wraps: bool,
}
+/// Toggles the display of available code actions at the cursor position.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -101,6 +111,7 @@ pub enum CodeActionSource {
QuickActionBar,
}
+/// Confirms and accepts the currently selected completion suggestion.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -109,6 +120,7 @@ pub struct ConfirmCompletion {
pub item_ix: Option<usize>,
}
+/// Composes multiple completion suggestions into a single completion.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -117,6 +129,7 @@ pub struct ComposeCompletion {
pub item_ix: Option<usize>,
}
+/// Confirms and applies the currently selected code action.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -125,6 +138,7 @@ pub struct ConfirmCodeAction {
pub item_ix: Option<usize>,
}
+/// Toggles comment markers for the selected lines.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -135,6 +149,7 @@ pub struct ToggleComments {
pub ignore_indent: bool,
}
+/// Moves the cursor up by a specified number of lines.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -143,6 +158,7 @@ pub struct MoveUpByLines {
pub(super) lines: u32,
}
+/// Moves the cursor down by a specified number of lines.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -151,6 +167,7 @@ pub struct MoveDownByLines {
pub(super) lines: u32,
}
+/// Extends selection up by a specified number of lines.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -159,6 +176,7 @@ pub struct SelectUpByLines {
pub(super) lines: u32,
}
+/// Extends selection down by a specified number of lines.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -167,6 +185,7 @@ pub struct SelectDownByLines {
pub(super) lines: u32,
}
+/// Expands all excerpts in the editor.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -175,6 +194,7 @@ pub struct ExpandExcerpts {
pub(super) lines: u32,
}
+/// Expands excerpts above the current position.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -183,6 +203,7 @@ pub struct ExpandExcerptsUp {
pub(super) lines: u32,
}
+/// Expands excerpts below the current position.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -191,6 +212,7 @@ pub struct ExpandExcerptsDown {
pub(super) lines: u32,
}
+/// Shows code completion suggestions at the cursor position.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -199,10 +221,12 @@ pub struct ShowCompletions {
pub(super) trigger: Option<String>,
}
+/// Handles text input in the editor.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
pub struct HandleInput(pub String);
+/// Deletes from the cursor to the end of the next word.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -211,6 +235,7 @@ pub struct DeleteToNextWordEnd {
pub ignore_newlines: bool,
}
+/// Deletes from the cursor to the start of the previous word.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -219,10 +244,12 @@ pub struct DeleteToPreviousWordStart {
pub ignore_newlines: bool,
}
+/// Folds all code blocks at the specified indentation level.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
pub struct FoldAtLevel(pub u32);
+/// Spawns the nearest available task from the current cursor position.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
@@ -238,11 +265,20 @@ pub enum UuidVersion {
V7,
}
-actions!(debugger, [RunToCursor, EvaluateSelectedText]);
+actions!(
+ debugger,
+ [
+ /// Runs program execution to the current cursor position.
+ RunToCursor,
+ /// Evaluates the selected text in the debugger context.
+ EvaluateSelectedText
+ ]
+);
actions!(
go_to_line,
[
+ /// Toggles the go to line dialog.
#[action(name = "Toggle")]
ToggleGoToLine
]
@@ -251,217 +287,430 @@ actions!(
actions!(
editor,
[
+ /// Accepts the full edit prediction.
AcceptEditPrediction,
+ /// Accepts a partial Copilot suggestion.
AcceptPartialCopilotSuggestion,
+ /// Accepts a partial edit prediction.
AcceptPartialEditPrediction,
+ /// Adds a cursor above the current selection.
AddSelectionAbove,
+ /// Adds a cursor below the current selection.
AddSelectionBelow,
+ /// Applies all diff hunks in the editor.
ApplyAllDiffHunks,
+ /// Applies the diff hunk at the current position.
ApplyDiffHunk,
+ /// Deletes the character before the cursor.
Backspace,
+ /// Cancels the current operation.
Cancel,
+ /// Cancels the running flycheck operation.
CancelFlycheck,
+ /// Cancels pending language server work.
CancelLanguageServerWork,
+ /// Clears flycheck results.
ClearFlycheck,
+ /// Confirms the rename operation.
ConfirmRename,
+ /// Confirms completion by inserting at cursor.
ConfirmCompletionInsert,
+ /// Confirms completion by replacing existing text.
ConfirmCompletionReplace,
+ /// Navigates to the first item in the context menu.
ContextMenuFirst,
+ /// Navigates to the last item in the context menu.
ContextMenuLast,
+ /// Navigates to the next item in the context menu.
ContextMenuNext,
+ /// Navigates to the previous item in the context menu.
ContextMenuPrevious,
+ /// Converts indentation from tabs to spaces.
ConvertIndentationToSpaces,
+ /// Converts indentation from spaces to tabs.
ConvertIndentationToTabs,
+ /// Converts selected text to kebab-case.
ConvertToKebabCase,
+ /// Converts selected text to lowerCamelCase.
ConvertToLowerCamelCase,
+ /// Converts selected text to lowercase.
ConvertToLowerCase,
+ /// Toggles the case of selected text.
ConvertToOppositeCase,
+ /// Converts selected text to snake_case.
ConvertToSnakeCase,
+ /// Converts selected text to Title Case.
ConvertToTitleCase,
+ /// Converts selected text to UpperCamelCase.
ConvertToUpperCamelCase,
+ /// Converts selected text to UPPERCASE.
ConvertToUpperCase,
+ /// Applies ROT13 cipher to selected text.
ConvertToRot13,
+ /// Applies ROT47 cipher to selected text.
ConvertToRot47,
+ /// Copies selected text to the clipboard.
Copy,
+ /// Copies selected text to the clipboard with leading/trailing whitespace trimmed.
CopyAndTrim,
+ /// Copies the current file location to the clipboard.
CopyFileLocation,
+ /// Copies the highlighted text as JSON.
CopyHighlightJson,
+ /// Copies the current file name to the clipboard.
CopyFileName,
+ /// Copies the file name without extension to the clipboard.
CopyFileNameWithoutExtension,
+ /// Copies a permalink to the current line.
CopyPermalinkToLine,
+ /// Cuts selected text to the clipboard.
Cut,
+ /// Cuts from cursor to end of line.
CutToEndOfLine,
+ /// Deletes the character after the cursor.
Delete,
+ /// Deletes the current line.
DeleteLine,
+ /// Deletes from cursor to end of line.
DeleteToEndOfLine,
+ /// Deletes to the end of the next subword.
DeleteToNextSubwordEnd,
+ /// Deletes to the start of the previous subword.
DeleteToPreviousSubwordStart,
+ /// Displays names of all active cursors.
DisplayCursorNames,
+ /// Duplicates the current line below.
DuplicateLineDown,
+ /// Duplicates the current line above.
DuplicateLineUp,
+ /// Duplicates the current selection.
DuplicateSelection,
+ /// Expands all diff hunks in the editor.
#[action(deprecated_aliases = ["editor::ExpandAllHunkDiffs"])]
ExpandAllDiffHunks,
+ /// Expands macros recursively at cursor position.
ExpandMacroRecursively,
+ /// Finds all references to the symbol at cursor.
FindAllReferences,
+ /// Finds the next match in the search.
FindNextMatch,
+ /// Finds the previous match in the search.
FindPreviousMatch,
+ /// Folds the current code block.
Fold,
+ /// Folds all foldable regions in the editor.
FoldAll,
+ /// Folds all function bodies in the editor.
FoldFunctionBodies,
+ /// Folds the current code block and all its children.
FoldRecursive,
+ /// Folds the selected ranges.
FoldSelectedRanges,
+ /// Toggles folding at the current position.
ToggleFold,
+ /// Toggles recursive folding at the current position.
ToggleFoldRecursive,
+ /// Formats the entire document.
Format,
+ /// Formats only the selected text.
FormatSelections,
+ /// Goes to the declaration of the symbol at cursor.
GoToDeclaration,
+ /// Goes to declaration in a split pane.
GoToDeclarationSplit,
+ /// Goes to the definition of the symbol at cursor.
GoToDefinition,
+ /// Goes to definition in a split pane.
GoToDefinitionSplit,
+ /// Goes to the next diagnostic in the file.
GoToDiagnostic,
+ /// Goes to the next diff hunk.
GoToHunk,
+ /// Goes to the previous diff hunk.
GoToPreviousHunk,
+ /// Goes to the implementation of the symbol at cursor.
GoToImplementation,
+ /// Goes to implementation in a split pane.
GoToImplementationSplit,
+ /// Goes to the next change in the file.
GoToNextChange,
+ /// Goes to the parent module of the current file.
GoToParentModule,
+ /// Goes to the previous change in the file.
GoToPreviousChange,
+ /// Goes to the previous diagnostic in the file.
GoToPreviousDiagnostic,
+ /// Goes to the type definition of the symbol at cursor.
GoToTypeDefinition,
+ /// Goes to type definition in a split pane.
GoToTypeDefinitionSplit,
+ /// Scrolls down by half a page.
HalfPageDown,
+ /// Scrolls up by half a page.
HalfPageUp,
+ /// Shows hover information for the symbol at cursor.
Hover,
+ /// Increases indentation of selected lines.
Indent,
+ /// Inserts a UUID v4 at cursor position.
InsertUuidV4,
+ /// Inserts a UUID v7 at cursor position.
InsertUuidV7,
+ /// Joins the current line with the next line.
JoinLines,
+ /// Cuts to kill ring (Emacs-style).
KillRingCut,
+ /// Yanks from kill ring (Emacs-style).
KillRingYank,
+ /// Moves cursor down one line.
LineDown,
+ /// Moves cursor up one line.
LineUp,
+ /// Moves cursor down.
MoveDown,
+ /// Moves cursor left.
MoveLeft,
+ /// Moves the current line down.
MoveLineDown,
+ /// Moves the current line up.
MoveLineUp,
+ /// Moves cursor right.
MoveRight,
+ /// Moves cursor to the beginning of the document.
MoveToBeginning,
+ /// Moves cursor to the enclosing bracket.
MoveToEnclosingBracket,
+ /// Moves cursor to the end of the document.
MoveToEnd,
+ /// Moves cursor to the end of the paragraph.
MoveToEndOfParagraph,
+ /// Moves cursor to the end of the next subword.
MoveToNextSubwordEnd,
+ /// Moves cursor to the end of the next word.
MoveToNextWordEnd,
+ /// Moves cursor to the start of the previous subword.
MoveToPreviousSubwordStart,
+ /// Moves cursor to the start of the previous word.
MoveToPreviousWordStart,
+ /// Moves cursor to the start of the paragraph.
MoveToStartOfParagraph,
+ /// Moves cursor to the start of the current excerpt.
MoveToStartOfExcerpt,
+ /// Moves cursor to the start of the next excerpt.
MoveToStartOfNextExcerpt,
+ /// Moves cursor to the end of the current excerpt.
MoveToEndOfExcerpt,
+ /// Moves cursor to the end of the previous excerpt.
MoveToEndOfPreviousExcerpt,
+ /// Moves cursor up.
MoveUp,
+ /// Inserts a new line and moves cursor to it.
Newline,
+ /// Inserts a new line above the current line.
NewlineAbove,
+ /// Inserts a new line below the current line.
NewlineBelow,
+ /// Navigates to the next edit prediction.
NextEditPrediction,
+ /// Scrolls to the next screen.
NextScreen,
+ /// Opens the context menu at cursor position.
OpenContextMenu,
+ /// Opens excerpts from the current file.
OpenExcerpts,
+ /// Opens excerpts in a split pane.
OpenExcerptsSplit,
+ /// Opens the proposed changes editor.
OpenProposedChangesEditor,
+ /// Opens documentation for the symbol at cursor.
OpenDocs,
+ /// Opens a permalink to the current line.
OpenPermalinkToLine,
+ /// Opens the file whose name is selected in the editor.
#[action(deprecated_aliases = ["editor::OpenFile"])]
OpenSelectedFilename,
+ /// Opens all selections in a multibuffer.
OpenSelectionsInMultibuffer,
+ /// Opens the URL at cursor position.
OpenUrl,
+ /// Organizes import statements.
OrganizeImports,
+ /// Decreases indentation of selected lines.
Outdent,
+ /// Automatically adjusts indentation based on context.
AutoIndent,
+ /// Scrolls down by one page.
PageDown,
+ /// Scrolls up by one page.
PageUp,
+ /// Pastes from clipboard.
Paste,
+ /// Navigates to the previous edit prediction.
PreviousEditPrediction,
+ /// Redoes the last undone edit.
Redo,
+ /// Redoes the last selection change.
RedoSelection,
+ /// Renames the symbol at cursor.
Rename,
+ /// Restarts the language server for the current file.
RestartLanguageServer,
+ /// Reveals the current file in the system file manager.
RevealInFileManager,
+ /// Reverses the order of selected lines.
ReverseLines,
+ /// Reloads the file from disk.
ReloadFile,
+ /// Rewraps text to fit within the preferred line length.
Rewrap,
+ /// Runs flycheck diagnostics.
RunFlycheck,
+ /// Scrolls the cursor to the bottom of the viewport.
ScrollCursorBottom,
+ /// Scrolls the cursor to the center of the viewport.
ScrollCursorCenter,
+ /// Cycles cursor position between center, top, and bottom.
ScrollCursorCenterTopBottom,
+ /// Scrolls the cursor to the top of the viewport.
ScrollCursorTop,
+ /// Selects all text in the editor.
SelectAll,
+ /// Selects all matches of the current selection.
SelectAllMatches,
+ /// Selects to the start of the current excerpt.
SelectToStartOfExcerpt,
+ /// Selects to the start of the next excerpt.
SelectToStartOfNextExcerpt,
+ /// Selects to the end of the current excerpt.
SelectToEndOfExcerpt,
+ /// Selects to the end of the previous excerpt.
SelectToEndOfPreviousExcerpt,
+ /// Extends selection down.
SelectDown,
+ /// Selects the enclosing symbol.
SelectEnclosingSymbol,
+ /// Selects the next larger syntax node.
SelectLargerSyntaxNode,
+ /// Extends selection left.
SelectLeft,
+ /// Selects the current line.
SelectLine,
+ /// Extends selection down by one page.
SelectPageDown,
+ /// Extends selection up by one page.
SelectPageUp,
+ /// Extends selection right.
SelectRight,
+ /// Selects the next smaller syntax node.
SelectSmallerSyntaxNode,
+ /// Selects to the beginning of the document.
SelectToBeginning,
+ /// Selects to the end of the document.
SelectToEnd,
+ /// Selects to the end of the paragraph.
SelectToEndOfParagraph,
+ /// Selects to the end of the next subword.
SelectToNextSubwordEnd,
+ /// Selects to the end of the next word.
SelectToNextWordEnd,
+ /// Selects to the start of the previous subword.
SelectToPreviousSubwordStart,
+ /// Selects to the start of the previous word.
SelectToPreviousWordStart,
+ /// Selects to the start of the paragraph.
SelectToStartOfParagraph,
+ /// Extends selection up.
SelectUp,
+ /// Shows the system character palette.
ShowCharacterPalette,
+ /// Shows edit prediction at cursor.
ShowEditPrediction,
+ /// Shows signature help for the current function.
ShowSignatureHelp,
+ /// Shows word completions.
ShowWordCompletions,
+ /// Randomly shuffles selected lines.
ShuffleLines,
+ /// Navigates to the next signature in the signature help popup.
+ SignatureHelpNext,
+ /// Navigates to the previous signature in the signature help popup.
+ SignatureHelpPrevious,
+ /// Sorts selected lines case-insensitively.
SortLinesCaseInsensitive,
+ /// Sorts selected lines case-sensitively.
SortLinesCaseSensitive,
+ /// Splits selection into individual lines.
SplitSelectionIntoLines,
+ /// Stops the language server for the current file.
StopLanguageServer,
+ /// Switches between source and header files.
SwitchSourceHeader,
+ /// Inserts a tab character or indents.
Tab,
+ /// Removes a tab character or outdents.
Backtab,
+ /// Toggles a breakpoint at the current line.
ToggleBreakpoint,
+ /// Toggles the case of selected text.
ToggleCase,
+ /// Disables the breakpoint at the current line.
DisableBreakpoint,
+ /// Enables the breakpoint at the current line.
EnableBreakpoint,
+ /// Edits the log message for a breakpoint.
EditLogBreakpoint,
+ /// Toggles automatic signature help.
ToggleAutoSignatureHelp,
+ /// Toggles inline git blame display.
ToggleGitBlameInline,
+ /// Opens the git commit for the blame at cursor.
OpenGitBlameCommit,
+ /// Toggles the diagnostics panel.
ToggleDiagnostics,
+ /// Toggles indent guides display.
ToggleIndentGuides,
+ /// Toggles inlay hints display.
ToggleInlayHints,
+ /// Toggles inline values display.
ToggleInlineValues,
+ /// Toggles inline diagnostics display.
ToggleInlineDiagnostics,
+ /// Toggles edit prediction feature.
ToggleEditPrediction,
+ /// Toggles line numbers display.
ToggleLineNumbers,
+ /// Toggles the minimap display.
ToggleMinimap,
+ /// Swaps the start and end of the current selection.
SwapSelectionEnds,
+ /// Sets a mark at the current position.
SetMark,
+ /// Toggles relative line numbers display.
ToggleRelativeLineNumbers,
+ /// Toggles diff display for selected hunks.
#[action(deprecated_aliases = ["editor::ToggleHunkDiff"])]
ToggleSelectedDiffHunks,
+ /// Toggles the selection menu.
ToggleSelectionMenu,
+ /// Toggles soft wrap mode.
ToggleSoftWrap,
+ /// Toggles the tab bar display.
ToggleTabBar,
+ /// Transposes characters around cursor.
Transpose,
+ /// Undoes the last edit.
Undo,
+ /// Undoes the last selection change.
UndoSelection,
+ /// Unfolds all folded regions.
UnfoldAll,
+ /// Unfolds lines at cursor.
UnfoldLines,
+ /// Unfolds recursively at cursor.
UnfoldRecursive,
+ /// Removes duplicate lines (case-insensitive).
UniqueLinesCaseInsensitive,
+ /// Removes duplicate lines (case-sensitive).
UniqueLinesCaseSensitive,
]
);
@@ -1083,11 +1083,10 @@ impl CompletionsMenu {
if lsp_completion.kind == Some(CompletionItemKind::SNIPPET)
);
- let sort_text = if let CompletionSource::Lsp { lsp_completion, .. } = &completion.source
- {
- lsp_completion.sort_text.as_deref()
- } else {
- None
+ let sort_text = match &completion.source {
+ CompletionSource::Lsp { lsp_completion, .. } => lsp_completion.sort_text.as_deref(),
+ CompletionSource::Dap { sort_text } => Some(sort_text.as_str()),
+ _ => None,
};
let (sort_kind, sort_label) = completion.sort_key();
@@ -296,12 +296,25 @@ impl<'a> Iterator for InlayChunks<'a> {
*chunk = self.buffer_chunks.next().unwrap();
}
- let (prefix, suffix) = chunk.text.split_at(
- chunk
- .text
- .len()
- .min(self.transforms.end(&()).0.0 - self.output_offset.0),
- );
+ let desired_bytes = self.transforms.end(&()).0.0 - self.output_offset.0;
+
+ // If we're already at the transform boundary, skip to the next transform
+ if desired_bytes == 0 {
+ self.inlay_chunks = None;
+ self.transforms.next(&());
+ return self.next();
+ }
+
+ // Determine split index handling edge cases
+ let split_index = if desired_bytes >= chunk.text.len() {
+ chunk.text.len()
+ } else if chunk.text.is_char_boundary(desired_bytes) {
+ desired_bytes
+ } else {
+ find_next_utf8_boundary(chunk.text, desired_bytes)
+ };
+
+ let (prefix, suffix) = chunk.text.split_at(split_index);
chunk.text = suffix;
self.output_offset.0 += prefix.len();
@@ -344,16 +357,14 @@ impl<'a> Iterator for InlayChunks<'a> {
id: ChunkRendererId::Inlay(inlay.id),
render: Arc::new(move |cx| {
div()
- .w_4()
- .h_4()
.relative()
+ .size_3p5()
.child(
div()
.absolute()
.right_1()
- .w_3p5()
- .h_3p5()
- .border_2()
+ .size_3()
+ .border_1()
.border_color(cx.theme().colors().border)
.bg(color),
)
@@ -393,8 +404,24 @@ impl<'a> Iterator for InlayChunks<'a> {
let inlay_chunk = self
.inlay_chunk
.get_or_insert_with(|| inlay_chunks.next().unwrap());
- let (chunk, remainder) =
- inlay_chunk.split_at(inlay_chunk.len().min(next_inlay_highlight_endpoint));
+
+ // Determine split index handling edge cases
+ let split_index = if next_inlay_highlight_endpoint >= inlay_chunk.len() {
+ inlay_chunk.len()
+ } else if next_inlay_highlight_endpoint == 0 {
+ // Need to take at least one character to make progress
+ inlay_chunk
+ .chars()
+ .next()
+ .map(|c| c.len_utf8())
+ .unwrap_or(1)
+ } else if inlay_chunk.is_char_boundary(next_inlay_highlight_endpoint) {
+ next_inlay_highlight_endpoint
+ } else {
+ find_next_utf8_boundary(inlay_chunk, next_inlay_highlight_endpoint)
+ };
+
+ let (chunk, remainder) = inlay_chunk.split_at(split_index);
*inlay_chunk = remainder;
if inlay_chunk.is_empty() {
self.inlay_chunk = None;
@@ -414,7 +441,7 @@ impl<'a> Iterator for InlayChunks<'a> {
}
};
- if self.output_offset == self.transforms.end(&()).0 {
+ if self.output_offset >= self.transforms.end(&()).0 {
self.inlay_chunks = None;
self.transforms.next(&());
}
@@ -1145,6 +1172,31 @@ fn push_isomorphic(sum_tree: &mut SumTree<Transform>, summary: TextSummary) {
}
}
+/// Given a byte index that is NOT a UTF-8 boundary, find the next one.
+/// Assumes: 0 < byte_index < text.len() and !text.is_char_boundary(byte_index)
+#[inline(always)]
+fn find_next_utf8_boundary(text: &str, byte_index: usize) -> usize {
+ let bytes = text.as_bytes();
+ let mut idx = byte_index + 1;
+
+ // Scan forward until we find a boundary
+ while idx < text.len() {
+ if is_utf8_char_boundary(bytes[idx]) {
+ return idx;
+ }
+ idx += 1;
+ }
+
+ // Hit the end, return the full length
+ text.len()
+}
+
+// Private helper function taken from Rust's core::num module (which is both Apache2 and MIT licensed)
+const fn is_utf8_char_boundary(byte: u8) -> bool {
+ // This is bit magic equivalent to: b < 128 || b >= 192
+ (byte as i8) >= -0x40
+}
+
#[cfg(test)]
mod tests {
use super::*;
@@ -1884,4 +1936,210 @@ mod tests {
cx.set_global(store);
theme::init(theme::LoadThemes::JustBase, cx);
}
+
+ /// Helper to create test highlights for an inlay
+ fn create_inlay_highlights(
+ inlay_id: InlayId,
+ highlight_range: Range<usize>,
+ position: Anchor,
+ ) -> TreeMap<TypeId, TreeMap<InlayId, (HighlightStyle, InlayHighlight)>> {
+ let mut inlay_highlights = TreeMap::default();
+ let mut type_highlights = TreeMap::default();
+ type_highlights.insert(
+ inlay_id,
+ (
+ HighlightStyle::default(),
+ InlayHighlight {
+ inlay: inlay_id,
+ range: highlight_range,
+ inlay_position: position,
+ },
+ ),
+ );
+ inlay_highlights.insert(TypeId::of::<()>(), type_highlights);
+ inlay_highlights
+ }
+
+ #[gpui::test]
+ fn test_inlay_utf8_boundary_panic_fix(cx: &mut App) {
+ init_test(cx);
+
+ // This test verifies that we handle UTF-8 character boundaries correctly
+ // when splitting inlay text for highlighting. Previously, this would panic
+ // when trying to split at byte 13, which is in the middle of the '…' character.
+ //
+ // See https://github.com/zed-industries/zed/issues/33641
+ let buffer = MultiBuffer::build_simple("fn main() {}\n", cx);
+ let (mut inlay_map, _) = InlayMap::new(buffer.read(cx).snapshot(cx));
+
+ // Create an inlay with text that contains a multi-byte character
+ // The string "SortingDirec…" contains an ellipsis character '…' which is 3 bytes (E2 80 A6)
+ let inlay_text = "SortingDirec…";
+ let position = buffer.read(cx).snapshot(cx).anchor_before(Point::new(0, 5));
+
+ let inlay = Inlay {
+ id: InlayId::Hint(0),
+ position,
+ text: text::Rope::from(inlay_text),
+ color: None,
+ };
+
+ let (inlay_snapshot, _) = inlay_map.splice(&[], vec![inlay]);
+
+ // Create highlights that request a split at byte 13, which is in the middle
+ // of the '…' character (bytes 12..15). We include the full character.
+ let inlay_highlights = create_inlay_highlights(InlayId::Hint(0), 0..13, position);
+
+ let highlights = crate::display_map::Highlights {
+ text_highlights: None,
+ inlay_highlights: Some(&inlay_highlights),
+ styles: crate::display_map::HighlightStyles::default(),
+ };
+
+ // Collect chunks - this previously would panic
+ let chunks: Vec<_> = inlay_snapshot
+ .chunks(
+ InlayOffset(0)..InlayOffset(inlay_snapshot.len().0),
+ false,
+ highlights,
+ )
+ .collect();
+
+ // Verify the chunks are correct
+ let full_text: String = chunks.iter().map(|c| c.chunk.text).collect();
+ assert_eq!(full_text, "fn maSortingDirec…in() {}\n");
+
+ // Verify the highlighted portion includes the complete ellipsis character
+ let highlighted_chunks: Vec<_> = chunks
+ .iter()
+ .filter(|c| c.chunk.highlight_style.is_some() && c.chunk.is_inlay)
+ .collect();
+
+ assert_eq!(highlighted_chunks.len(), 1);
+ assert_eq!(highlighted_chunks[0].chunk.text, "SortingDirec…");
+ }
+
+ #[gpui::test]
+ fn test_inlay_utf8_boundaries(cx: &mut App) {
+ init_test(cx);
+
+ struct TestCase {
+ inlay_text: &'static str,
+ highlight_range: Range<usize>,
+ expected_highlighted: &'static str,
+ description: &'static str,
+ }
+
+ let test_cases = vec![
+ TestCase {
+ inlay_text: "Hello👋World",
+ highlight_range: 0..7,
+ expected_highlighted: "Hello👋",
+ description: "Emoji boundary - rounds up to include full emoji",
+ },
+ TestCase {
+ inlay_text: "Test→End",
+ highlight_range: 0..5,
+ expected_highlighted: "Test→",
+ description: "Arrow boundary - rounds up to include full arrow",
+ },
+ TestCase {
+ inlay_text: "café",
+ highlight_range: 0..4,
+ expected_highlighted: "café",
+ description: "Accented char boundary - rounds up to include full é",
+ },
+ TestCase {
+ inlay_text: "🎨🎭🎪",
+ highlight_range: 0..5,
+ expected_highlighted: "🎨🎭",
+ description: "Multiple emojis - partial highlight",
+ },
+ TestCase {
+ inlay_text: "普通话",
+ highlight_range: 0..4,
+ expected_highlighted: "普通",
+ description: "Chinese characters - partial highlight",
+ },
+ TestCase {
+ inlay_text: "Hello",
+ highlight_range: 0..2,
+ expected_highlighted: "He",
+ description: "ASCII only - no adjustment needed",
+ },
+ TestCase {
+ inlay_text: "👋",
+ highlight_range: 0..1,
+ expected_highlighted: "👋",
+ description: "Single emoji - partial byte range includes whole char",
+ },
+ TestCase {
+ inlay_text: "Test",
+ highlight_range: 0..0,
+ expected_highlighted: "",
+ description: "Empty range",
+ },
+ TestCase {
+ inlay_text: "🎨ABC",
+ highlight_range: 2..5,
+ expected_highlighted: "A",
+ description: "Range starting mid-emoji skips the emoji",
+ },
+ ];
+
+ for test_case in test_cases {
+ let buffer = MultiBuffer::build_simple("test", cx);
+ let (mut inlay_map, _) = InlayMap::new(buffer.read(cx).snapshot(cx));
+ let position = buffer.read(cx).snapshot(cx).anchor_before(Point::new(0, 2));
+
+ let inlay = Inlay {
+ id: InlayId::Hint(0),
+ position,
+ text: text::Rope::from(test_case.inlay_text),
+ color: None,
+ };
+
+ let (inlay_snapshot, _) = inlay_map.splice(&[], vec![inlay]);
+ let inlay_highlights = create_inlay_highlights(
+ InlayId::Hint(0),
+ test_case.highlight_range.clone(),
+ position,
+ );
+
+ let highlights = crate::display_map::Highlights {
+ text_highlights: None,
+ inlay_highlights: Some(&inlay_highlights),
+ styles: crate::display_map::HighlightStyles::default(),
+ };
+
+ let chunks: Vec<_> = inlay_snapshot
+ .chunks(
+ InlayOffset(0)..InlayOffset(inlay_snapshot.len().0),
+ false,
+ highlights,
+ )
+ .collect();
+
+ // Verify we got chunks and they total to the expected text
+ let full_text: String = chunks.iter().map(|c| c.chunk.text).collect();
+ assert_eq!(
+ full_text,
+ format!("te{}st", test_case.inlay_text),
+ "Full text mismatch for case: {}",
+ test_case.description
+ );
+
+ // Verify that the highlighted portion matches expectations
+ let highlighted_text: String = chunks
+ .iter()
+ .filter(|c| c.chunk.highlight_style.is_some() && c.chunk.is_inlay)
+ .map(|c| c.chunk.text)
+ .collect();
+ assert_eq!(
+ highlighted_text, test_case.expected_highlighted,
+ "Highlighted text mismatch for case: {} (text: '{}', range: {:?})",
+ test_case.description, test_case.inlay_text, test_case.highlight_range
+ );
+ }
+ }
}
@@ -2362,6 +2362,10 @@ impl Editor {
None => {}
}
+ if self.signature_help_state.has_multiple_signatures() {
+ key_context.add("showing_signature_help");
+ }
+
// Disable vim contexts when a sub-editor (e.g. rename/inline assistant) is focused.
if !self.focus_handle(cx).contains_focused(window, cx)
|| (self.is_focused(window) || self.mouse_menu_is_focused(window, cx))
@@ -6186,7 +6190,14 @@ impl Editor {
workspace.update(cx, |workspace, cx| {
dap::send_telemetry(&scenario, TelemetrySpawnLocation::Gutter, cx);
- workspace.start_debug_session(scenario, context, Some(buffer), window, cx);
+ workspace.start_debug_session(
+ scenario,
+ context,
+ Some(buffer),
+ None,
+ window,
+ cx,
+ );
});
Some(Task::ready(Ok(())))
}
@@ -8704,7 +8715,7 @@ impl Editor {
h_flex()
.bg(cx.theme().colors().editor_background)
.border(BORDER_WIDTH)
- .shadow_sm()
+ .shadow_xs()
.border_color(cx.theme().colors().border)
.rounded_l_lg()
.when(line_count > 1, |el| el.rounded_br_lg())
@@ -8904,7 +8915,7 @@ impl Editor {
.border_1()
.bg(Self::edit_prediction_line_popover_bg_color(cx))
.border_color(Self::edit_prediction_callout_popover_border_color(cx))
- .shadow_sm()
+ .shadow_xs()
.when(!has_keybind, |el| {
let status_colors = cx.theme().status();
@@ -11541,66 +11552,90 @@ impl Editor {
let language_settings = buffer.language_settings_at(selection.head(), cx);
let language_scope = buffer.language_scope_at(selection.head());
+ let indent_and_prefix_for_row =
+ |row: u32| -> (IndentSize, Option<String>, Option<String>) {
+ let indent = buffer.indent_size_for_line(MultiBufferRow(row));
+ let (comment_prefix, rewrap_prefix) =
+ if let Some(language_scope) = &language_scope {
+ let indent_end = Point::new(row, indent.len);
+ let comment_prefix = language_scope
+ .line_comment_prefixes()
+ .iter()
+ .find(|prefix| buffer.contains_str_at(indent_end, prefix))
+ .map(|prefix| prefix.to_string());
+ let line_end = Point::new(row, buffer.line_len(MultiBufferRow(row)));
+ let line_text_after_indent = buffer
+ .text_for_range(indent_end..line_end)
+ .collect::<String>();
+ let rewrap_prefix = language_scope
+ .rewrap_prefixes()
+ .iter()
+ .find_map(|prefix_regex| {
+ prefix_regex.find(&line_text_after_indent).map(|mat| {
+ if mat.start() == 0 {
+ Some(mat.as_str().to_string())
+ } else {
+ None
+ }
+ })
+ })
+ .flatten();
+ (comment_prefix, rewrap_prefix)
+ } else {
+ (None, None)
+ };
+ (indent, comment_prefix, rewrap_prefix)
+ };
+
let mut ranges = Vec::new();
- let mut current_range_start = first_row;
let from_empty_selection = selection.is_empty();
+ let mut current_range_start = first_row;
let mut prev_row = first_row;
- let mut prev_indent = buffer.indent_size_for_line(MultiBufferRow(first_row));
- let mut prev_comment_prefix = if let Some(language_scope) = &language_scope {
- let indent = buffer.indent_size_for_line(MultiBufferRow(first_row));
- let indent_end = Point::new(first_row, indent.len);
- language_scope
- .line_comment_prefixes()
- .iter()
- .find(|prefix| buffer.contains_str_at(indent_end, prefix))
- .cloned()
- } else {
- None
- };
+ let (
+ mut current_range_indent,
+ mut current_range_comment_prefix,
+ mut current_range_rewrap_prefix,
+ ) = indent_and_prefix_for_row(first_row);
for row in non_blank_rows_iter.skip(1) {
let has_paragraph_break = row > prev_row + 1;
- let row_indent = buffer.indent_size_for_line(MultiBufferRow(row));
- let row_comment_prefix = if let Some(language_scope) = &language_scope {
- let indent = buffer.indent_size_for_line(MultiBufferRow(row));
- let indent_end = Point::new(row, indent.len);
- language_scope
- .line_comment_prefixes()
- .iter()
- .find(|prefix| buffer.contains_str_at(indent_end, prefix))
- .cloned()
- } else {
- None
- };
+ let (row_indent, row_comment_prefix, row_rewrap_prefix) =
+ indent_and_prefix_for_row(row);
- let has_boundary_change =
- row_indent != prev_indent || row_comment_prefix != prev_comment_prefix;
+ let has_indent_change = row_indent != current_range_indent;
+ let has_comment_change = row_comment_prefix != current_range_comment_prefix;
+
+ let has_boundary_change = has_comment_change
+ || row_rewrap_prefix.is_some()
+ || (has_indent_change && current_range_comment_prefix.is_some());
if has_paragraph_break || has_boundary_change {
ranges.push((
language_settings.clone(),
Point::new(current_range_start, 0)
..Point::new(prev_row, buffer.line_len(MultiBufferRow(prev_row))),
- prev_indent,
- prev_comment_prefix.clone(),
+ current_range_indent,
+ current_range_comment_prefix.clone(),
+ current_range_rewrap_prefix.clone(),
from_empty_selection,
));
current_range_start = row;
+ current_range_indent = row_indent;
+ current_range_comment_prefix = row_comment_prefix;
+ current_range_rewrap_prefix = row_rewrap_prefix;
}
-
prev_row = row;
- prev_indent = row_indent;
- prev_comment_prefix = row_comment_prefix;
}
ranges.push((
language_settings.clone(),
Point::new(current_range_start, 0)
..Point::new(prev_row, buffer.line_len(MultiBufferRow(prev_row))),
- prev_indent,
- prev_comment_prefix,
+ current_range_indent,
+ current_range_comment_prefix,
+ current_range_rewrap_prefix,
from_empty_selection,
));
@@ -11610,8 +11645,14 @@ impl Editor {
let mut edits = Vec::new();
let mut rewrapped_row_ranges = Vec::<RangeInclusive<u32>>::new();
- for (language_settings, wrap_range, indent_size, comment_prefix, from_empty_selection) in
- wrap_ranges
+ for (
+ language_settings,
+ wrap_range,
+ indent_size,
+ comment_prefix,
+ rewrap_prefix,
+ from_empty_selection,
+ ) in wrap_ranges
{
let mut start_row = wrap_range.start.row;
let mut end_row = wrap_range.end.row;
@@ -11627,12 +11668,16 @@ impl Editor {
let tab_size = language_settings.tab_size;
- let mut line_prefix = indent_size.chars().collect::<String>();
+ let indent_prefix = indent_size.chars().collect::<String>();
+ let mut line_prefix = indent_prefix.clone();
let mut inside_comment = false;
if let Some(prefix) = &comment_prefix {
line_prefix.push_str(prefix);
inside_comment = true;
}
+ if let Some(prefix) = &rewrap_prefix {
+ line_prefix.push_str(prefix);
+ }
let allow_rewrap_based_on_language = match language_settings.allow_rewrap {
RewrapBehavior::InComments => inside_comment,
@@ -11679,12 +11724,18 @@ impl Editor {
let selection_text = buffer.text_for_range(start..end).collect::<String>();
let Some(lines_without_prefixes) = selection_text
.lines()
- .map(|line| {
- line.strip_prefix(&line_prefix)
- .or_else(|| line.trim_start().strip_prefix(&line_prefix.trim_start()))
- .with_context(|| {
- format!("line did not start with prefix {line_prefix:?}: {line:?}")
- })
+ .enumerate()
+ .map(|(ix, line)| {
+ let line_trimmed = line.trim_start();
+ if rewrap_prefix.is_some() && ix > 0 {
+ Ok(line_trimmed)
+ } else {
+ line_trimmed
+ .strip_prefix(&line_prefix.trim_start())
+ .with_context(|| {
+ format!("line did not start with prefix {line_prefix:?}: {line:?}")
+ })
+ }
})
.collect::<Result<Vec<_>, _>>()
.log_err()
@@ -11697,8 +11748,16 @@ impl Editor {
.language_settings_at(Point::new(start_row, 0), cx)
.preferred_line_length as usize
});
+
+ let subsequent_lines_prefix = if let Some(rewrap_prefix_str) = &rewrap_prefix {
+ format!("{}{}", indent_prefix, " ".repeat(rewrap_prefix_str.len()))
+ } else {
+ line_prefix.clone()
+ };
+
let wrapped_text = wrap_with_prefix(
line_prefix,
+ subsequent_lines_prefix,
lines_without_prefixes.join("\n"),
wrap_column,
tab_size,
@@ -12527,6 +12586,38 @@ impl Editor {
}
}
+ pub fn signature_help_prev(
+ &mut self,
+ _: &SignatureHelpPrevious,
+ _: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if let Some(popover) = self.signature_help_state.popover_mut() {
+ if popover.current_signature == 0 {
+ popover.current_signature = popover.signatures.len() - 1;
+ } else {
+ popover.current_signature -= 1;
+ }
+ cx.notify();
+ }
+ }
+
+ pub fn signature_help_next(
+ &mut self,
+ _: &SignatureHelpNext,
+ _: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if let Some(popover) = self.signature_help_state.popover_mut() {
+ if popover.current_signature + 1 == popover.signatures.len() {
+ popover.current_signature = 0;
+ } else {
+ popover.current_signature += 1;
+ }
+ cx.notify();
+ }
+ }
+
pub fn move_to_previous_word_start(
&mut self,
_: &MoveToPreviousWordStart,
@@ -19751,6 +19842,7 @@ impl Editor {
self.tasks_update_task = Some(self.refresh_runnables(window, cx));
self.update_edit_prediction_settings(cx);
self.refresh_inline_completion(true, false, window, cx);
+ self.refresh_inline_values(cx);
self.refresh_inlay_hints(
InlayHintRefreshReason::SettingsChange(inlay_hint_settings(
self.selections.newest_anchor().head(),
@@ -21200,18 +21292,22 @@ fn test_word_breaking_tokenizer() {
}
fn wrap_with_prefix(
- line_prefix: String,
+ first_line_prefix: String,
+ subsequent_lines_prefix: String,
unwrapped_text: String,
wrap_column: usize,
tab_size: NonZeroU32,
preserve_existing_whitespace: bool,
) -> String {
- let line_prefix_len = char_len_with_expanded_tabs(0, &line_prefix, tab_size);
+ let first_line_prefix_len = char_len_with_expanded_tabs(0, &first_line_prefix, tab_size);
+ let subsequent_lines_prefix_len =
+ char_len_with_expanded_tabs(0, &subsequent_lines_prefix, tab_size);
let mut wrapped_text = String::new();
- let mut current_line = line_prefix.clone();
+ let mut current_line = first_line_prefix.clone();
+ let mut is_first_line = true;
let tokenizer = WordBreakingTokenizer::new(&unwrapped_text);
- let mut current_line_len = line_prefix_len;
+ let mut current_line_len = first_line_prefix_len;
let mut in_whitespace = false;
for token in tokenizer {
let have_preceding_whitespace = in_whitespace;
@@ -21221,13 +21317,19 @@ fn wrap_with_prefix(
grapheme_len,
} => {
in_whitespace = false;
+ let current_prefix_len = if is_first_line {
+ first_line_prefix_len
+ } else {
+ subsequent_lines_prefix_len
+ };
if current_line_len + grapheme_len > wrap_column
- && current_line_len != line_prefix_len
+ && current_line_len != current_prefix_len
{
wrapped_text.push_str(current_line.trim_end());
wrapped_text.push('\n');
- current_line.truncate(line_prefix.len());
- current_line_len = line_prefix_len;
+ is_first_line = false;
+ current_line = subsequent_lines_prefix.clone();
+ current_line_len = subsequent_lines_prefix_len;
}
current_line.push_str(token);
current_line_len += grapheme_len;
@@ -21244,32 +21346,46 @@ fn wrap_with_prefix(
token = " ";
grapheme_len = 1;
}
+ let current_prefix_len = if is_first_line {
+ first_line_prefix_len
+ } else {
+ subsequent_lines_prefix_len
+ };
if current_line_len + grapheme_len > wrap_column {
wrapped_text.push_str(current_line.trim_end());
wrapped_text.push('\n');
- current_line.truncate(line_prefix.len());
- current_line_len = line_prefix_len;
- } else if current_line_len != line_prefix_len || preserve_existing_whitespace {
+ is_first_line = false;
+ current_line = subsequent_lines_prefix.clone();
+ current_line_len = subsequent_lines_prefix_len;
+ } else if current_line_len != current_prefix_len || preserve_existing_whitespace {
current_line.push_str(token);
current_line_len += grapheme_len;
}
}
WordBreakToken::Newline => {
in_whitespace = true;
+ let current_prefix_len = if is_first_line {
+ first_line_prefix_len
+ } else {
+ subsequent_lines_prefix_len
+ };
if preserve_existing_whitespace {
wrapped_text.push_str(current_line.trim_end());
wrapped_text.push('\n');
- current_line.truncate(line_prefix.len());
- current_line_len = line_prefix_len;
+ is_first_line = false;
+ current_line = subsequent_lines_prefix.clone();
+ current_line_len = subsequent_lines_prefix_len;
} else if have_preceding_whitespace {
continue;
- } else if current_line_len + 1 > wrap_column && current_line_len != line_prefix_len
+ } else if current_line_len + 1 > wrap_column
+ && current_line_len != current_prefix_len
{
wrapped_text.push_str(current_line.trim_end());
wrapped_text.push('\n');
- current_line.truncate(line_prefix.len());
- current_line_len = line_prefix_len;
- } else if current_line_len != line_prefix_len {
+ is_first_line = false;
+ current_line = subsequent_lines_prefix.clone();
+ current_line_len = subsequent_lines_prefix_len;
+ } else if current_line_len != current_prefix_len {
current_line.push(' ');
current_line_len += 1;
}
@@ -21287,6 +21403,7 @@ fn wrap_with_prefix(
fn test_wrap_with_prefix() {
assert_eq!(
wrap_with_prefix(
+ "# ".to_string(),
"# ".to_string(),
"abcdefg".to_string(),
4,
@@ -21297,6 +21414,7 @@ fn test_wrap_with_prefix() {
);
assert_eq!(
wrap_with_prefix(
+ "".to_string(),
"".to_string(),
"\thello world".to_string(),
8,
@@ -21307,6 +21425,7 @@ fn test_wrap_with_prefix() {
);
assert_eq!(
wrap_with_prefix(
+ "// ".to_string(),
"// ".to_string(),
"xx \nyy zz aa bb cc".to_string(),
12,
@@ -21317,6 +21436,7 @@ fn test_wrap_with_prefix() {
);
assert_eq!(
wrap_with_prefix(
+ String::new(),
String::new(),
"这是什么 \n 钢笔".to_string(),
3,
@@ -21825,10 +21945,10 @@ impl SemanticsProvider for Entity<Project> {
cx: &mut App,
) -> Option<Task<Result<Vec<LocationLink>>>> {
Some(self.update(cx, |project, cx| match kind {
- GotoDefinitionKind::Symbol => project.definition(&buffer, position, cx),
- GotoDefinitionKind::Declaration => project.declaration(&buffer, position, cx),
- GotoDefinitionKind::Type => project.type_definition(&buffer, position, cx),
- GotoDefinitionKind::Implementation => project.implementation(&buffer, position, cx),
+ GotoDefinitionKind::Symbol => project.definitions(&buffer, position, cx),
+ GotoDefinitionKind::Declaration => project.declarations(&buffer, position, cx),
+ GotoDefinitionKind::Type => project.type_definitions(&buffer, position, cx),
+ GotoDefinitionKind::Implementation => project.implementations(&buffer, position, cx),
}))
}
@@ -378,7 +378,6 @@ pub enum SnippetSortOrder {
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
-#[schemars(deny_unknown_fields)]
pub struct EditorSettingsContent {
/// Whether the cursor blinks in the editor.
///
@@ -3,7 +3,7 @@ use std::sync::Arc;
use gpui::{App, FontFeatures, FontWeight};
use project::project_settings::{InlineBlameSettings, ProjectSettings};
use settings::{EditableSettingControl, Settings};
-use theme::{FontFamilyCache, ThemeSettings};
+use theme::{FontFamilyCache, FontFamilyName, ThemeSettings};
use ui::{
CheckboxWithLabel, ContextMenu, DropdownMenu, NumericStepper, SettingsContainer, SettingsGroup,
prelude::*,
@@ -75,7 +75,7 @@ impl EditableSettingControl for BufferFontFamilyControl {
value: Self::Value,
_cx: &App,
) {
- settings.buffer_font_family = Some(value.to_string());
+ settings.buffer_font_family = Some(FontFamilyName(value.into()));
}
}
@@ -25,12 +25,12 @@ use language::{
DiagnosticSourceKind, FakeLspAdapter, LanguageConfig, LanguageConfigOverride, LanguageMatcher,
LanguageName, Override, Point,
language_settings::{
- AllLanguageSettings, AllLanguageSettingsContent, CompletionSettings,
- LanguageSettingsContent, LspInsertMode, PrettierSettings,
+ AllLanguageSettings, AllLanguageSettingsContent, CompletionSettings, FormatterList,
+ LanguageSettingsContent, LspInsertMode, PrettierSettings, SelectedFormatter,
},
tree_sitter_python,
};
-use language_settings::{Formatter, FormatterList, IndentGuideSettings};
+use language_settings::{Formatter, IndentGuideSettings};
use lsp::CompletionParams;
use multi_buffer::{IndentGuide, PathKey};
use parking_lot::Mutex;
@@ -3468,6 +3468,70 @@ async fn test_indent_outdent(cx: &mut TestAppContext) {
"});
}
+#[gpui::test]
+async fn test_indent_yaml_comments_with_multiple_cursors(cx: &mut TestAppContext) {
+ // This is a regression test for issue #33761
+ init_test(cx, |_| {});
+
+ let mut cx = EditorTestContext::new(cx).await;
+ let yaml_language = languages::language("yaml", tree_sitter_yaml::LANGUAGE.into());
+ cx.update_buffer(|buffer, cx| buffer.set_language(Some(yaml_language), cx));
+
+ cx.set_state(
+ r#"ˇ# ingress:
+ˇ# api:
+ˇ# enabled: false
+ˇ# pathType: Prefix
+ˇ# console:
+ˇ# enabled: false
+ˇ# pathType: Prefix
+"#,
+ );
+
+ // Press tab to indent all lines
+ cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx));
+
+ cx.assert_editor_state(
+ r#" ˇ# ingress:
+ ˇ# api:
+ ˇ# enabled: false
+ ˇ# pathType: Prefix
+ ˇ# console:
+ ˇ# enabled: false
+ ˇ# pathType: Prefix
+"#,
+ );
+}
+
+#[gpui::test]
+async fn test_indent_yaml_non_comments_with_multiple_cursors(cx: &mut TestAppContext) {
+ // This is a test to make sure our fix for issue #33761 didn't break anything
+ init_test(cx, |_| {});
+
+ let mut cx = EditorTestContext::new(cx).await;
+ let yaml_language = languages::language("yaml", tree_sitter_yaml::LANGUAGE.into());
+ cx.update_buffer(|buffer, cx| buffer.set_language(Some(yaml_language), cx));
+
+ cx.set_state(
+ r#"ˇingress:
+ˇ api:
+ˇ enabled: false
+ˇ pathType: Prefix
+"#,
+ );
+
+ // Press tab to indent all lines
+ cx.update_editor(|e, window, cx| e.tab(&Tab, window, cx));
+
+ cx.assert_editor_state(
+ r#"ˇingress:
+ ˇapi:
+ ˇenabled: false
+ ˇpathType: Prefix
+"#,
+ );
+}
+
#[gpui::test]
async fn test_indent_outdent_with_hard_tabs(cx: &mut TestAppContext) {
init_test(cx, |settings| {
@@ -3567,7 +3631,7 @@ async fn test_indent_outdent_with_hard_tabs(cx: &mut TestAppContext) {
#[gpui::test]
fn test_indent_outdent_with_excerpts(cx: &mut TestAppContext) {
init_test(cx, |settings| {
- settings.languages.extend([
+ settings.languages.0.extend([
(
"TOML".into(),
LanguageSettingsContent {
@@ -5145,7 +5209,7 @@ fn test_transpose(cx: &mut TestAppContext) {
#[gpui::test]
async fn test_rewrap(cx: &mut TestAppContext) {
init_test(cx, |settings| {
- settings.languages.extend([
+ settings.languages.0.extend([
(
"Markdown".into(),
LanguageSettingsContent {
@@ -5210,6 +5274,10 @@ async fn test_rewrap(cx: &mut TestAppContext) {
let markdown_language = Arc::new(Language::new(
LanguageConfig {
name: "Markdown".into(),
+ rewrap_prefixes: vec![
+ regex::Regex::new("\\d+\\.\\s+").unwrap(),
+ regex::Regex::new("[-*+]\\s+").unwrap(),
+ ],
..LanguageConfig::default()
},
None,
@@ -5372,7 +5440,82 @@ async fn test_rewrap(cx: &mut TestAppContext) {
A long long long line of markdown text
to wrap.ˇ
"},
- markdown_language,
+ markdown_language.clone(),
+ &mut cx,
+ );
+
+ // Test that rewrapping boundary works and preserves relative indent for Markdown documents
+ assert_rewrap(
+ indoc! {"
+ «1. This is a numbered list item that is very long and needs to be wrapped properly.
+ 2. This is a numbered list item that is very long and needs to be wrapped properly.
+ - This is an unordered list item that is also very long and should not merge with the numbered item.ˇ»
+ "},
+ indoc! {"
+ «1. This is a numbered list item that is
+ very long and needs to be wrapped
+ properly.
+ 2. This is a numbered list item that is
+ very long and needs to be wrapped
+ properly.
+ - This is an unordered list item that is
+ also very long and should not merge
+ with the numbered item.ˇ»
+ "},
+ markdown_language.clone(),
+ &mut cx,
+ );
+
+ // Test that rewrapping add indents for rewrapping boundary if not exists already.
+ assert_rewrap(
+ indoc! {"
+ «1. This is a numbered list item that is
+ very long and needs to be wrapped
+ properly.
+ 2. This is a numbered list item that is
+ very long and needs to be wrapped
+ properly.
+ - This is an unordered list item that is
+ also very long and should not merge with
+ the numbered item.ˇ»
+ "},
+ indoc! {"
+ «1. This is a numbered list item that is
+ very long and needs to be wrapped
+ properly.
+ 2. This is a numbered list item that is
+ very long and needs to be wrapped
+ properly.
+ - This is an unordered list item that is
+ also very long and should not merge
+ with the numbered item.ˇ»
+ "},
+ markdown_language.clone(),
+ &mut cx,
+ );
+
+ // Test that rewrapping maintain indents even when they already exists.
+ assert_rewrap(
+ indoc! {"
+ «1. This is a numbered list
+ item that is very long and needs to be wrapped properly.
+ 2. This is a numbered list
+ item that is very long and needs to be wrapped properly.
+ - This is an unordered list item that is also very long and
+ should not merge with the numbered item.ˇ»
+ "},
+ indoc! {"
+ «1. This is a numbered list item that is
+ very long and needs to be wrapped
+ properly.
+ 2. This is a numbered list item that is
+ very long and needs to be wrapped
+ properly.
+ - This is an unordered list item that is
+ also very long and should not merge
+ with the numbered item.ˇ»
+ "},
+ markdown_language.clone(),
&mut cx,
);
@@ -9326,7 +9469,7 @@ async fn test_document_format_during_save(cx: &mut TestAppContext) {
// Set rust language override and assert overridden tabsize is sent to language server
update_test_language_settings(cx, |settings| {
- settings.languages.insert(
+ settings.languages.0.insert(
"Rust".into(),
LanguageSettingsContent {
tab_size: NonZeroU32::new(8),
@@ -9890,7 +10033,7 @@ async fn test_range_format_during_save(cx: &mut TestAppContext) {
// Set Rust language override and assert overridden tabsize is sent to language server
update_test_language_settings(cx, |settings| {
- settings.languages.insert(
+ settings.languages.0.insert(
"Rust".into(),
LanguageSettingsContent {
tab_size: NonZeroU32::new(8),
@@ -9933,9 +10076,9 @@ async fn test_range_format_during_save(cx: &mut TestAppContext) {
#[gpui::test]
async fn test_document_format_manual_trigger(cx: &mut TestAppContext) {
init_test(cx, |settings| {
- settings.defaults.formatter = Some(language_settings::SelectedFormatter::List(
- FormatterList(vec![Formatter::LanguageServer { name: None }].into()),
- ))
+ settings.defaults.formatter = Some(SelectedFormatter::List(FormatterList::Single(
+ Formatter::LanguageServer { name: None },
+ )))
});
let fs = FakeFs::new(cx.executor());
@@ -10062,21 +10205,17 @@ async fn test_document_format_manual_trigger(cx: &mut TestAppContext) {
async fn test_multiple_formatters(cx: &mut TestAppContext) {
init_test(cx, |settings| {
settings.defaults.remove_trailing_whitespace_on_save = Some(true);
- settings.defaults.formatter =
- Some(language_settings::SelectedFormatter::List(FormatterList(
- vec![
- Formatter::LanguageServer { name: None },
- Formatter::CodeActions(
- [
- ("code-action-1".into(), true),
- ("code-action-2".into(), true),
- ]
- .into_iter()
- .collect(),
- ),
+ settings.defaults.formatter = Some(SelectedFormatter::List(FormatterList::Vec(vec![
+ Formatter::LanguageServer { name: None },
+ Formatter::CodeActions(
+ [
+ ("code-action-1".into(), true),
+ ("code-action-2".into(), true),
]
- .into(),
- )))
+ .into_iter()
+ .collect(),
+ ),
+ ])))
});
let fs = FakeFs::new(cx.executor());
@@ -10328,9 +10467,9 @@ async fn test_multiple_formatters(cx: &mut TestAppContext) {
#[gpui::test]
async fn test_organize_imports_manual_trigger(cx: &mut TestAppContext) {
init_test(cx, |settings| {
- settings.defaults.formatter = Some(language_settings::SelectedFormatter::List(
- FormatterList(vec![Formatter::LanguageServer { name: None }].into()),
- ))
+ settings.defaults.formatter = Some(SelectedFormatter::List(FormatterList::Vec(vec![
+ Formatter::LanguageServer { name: None },
+ ])))
});
let fs = FakeFs::new(cx.executor());
@@ -10536,7 +10675,7 @@ async fn test_concurrent_format_requests(cx: &mut TestAppContext) {
#[gpui::test]
async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) {
init_test(cx, |settings| {
- settings.defaults.formatter = Some(language_settings::SelectedFormatter::Auto)
+ settings.defaults.formatter = Some(SelectedFormatter::Auto)
});
let mut cx = EditorLspTestContext::new_rust(
@@ -10791,9 +10930,10 @@ async fn test_handle_input_for_show_signature_help_auto_signature_help_true(
cx.editor(|editor, _, _| {
let signature_help_state = editor.signature_help_state.popover().cloned();
+ let signature = signature_help_state.unwrap();
assert_eq!(
- signature_help_state.unwrap().label,
- "param1: u8, param2: u8"
+ signature.signatures[signature.current_signature].label,
+ "fn sample(param1: u8, param2: u8)"
);
});
}
@@ -10962,9 +11102,10 @@ async fn test_handle_input_with_different_show_signature_settings(cx: &mut TestA
cx.update_editor(|editor, _, _| {
let signature_help_state = editor.signature_help_state.popover().cloned();
assert!(signature_help_state.is_some());
+ let signature = signature_help_state.unwrap();
assert_eq!(
- signature_help_state.unwrap().label,
- "param1: u8, param2: u8"
+ signature.signatures[signature.current_signature].label,
+ "fn sample(param1: u8, param2: u8)"
);
editor.signature_help_state = SignatureHelpState::default();
});
@@ -11003,9 +11144,10 @@ async fn test_handle_input_with_different_show_signature_settings(cx: &mut TestA
cx.editor(|editor, _, _| {
let signature_help_state = editor.signature_help_state.popover().cloned();
assert!(signature_help_state.is_some());
+ let signature = signature_help_state.unwrap();
assert_eq!(
- signature_help_state.unwrap().label,
- "param1: u8, param2: u8"
+ signature.signatures[signature.current_signature].label,
+ "fn sample(param1: u8, param2: u8)"
);
});
}
@@ -11064,9 +11206,10 @@ async fn test_signature_help(cx: &mut TestAppContext) {
cx.editor(|editor, _, _| {
let signature_help_state = editor.signature_help_state.popover().cloned();
assert!(signature_help_state.is_some());
+ let signature = signature_help_state.unwrap();
assert_eq!(
- signature_help_state.unwrap().label,
- "param1: u8, param2: u8"
+ signature.signatures[signature.current_signature].label,
+ "fn sample(param1: u8, param2: u8)"
);
});
@@ -11274,6 +11417,132 @@ async fn test_signature_help(cx: &mut TestAppContext) {
.await;
}
+#[gpui::test]
+async fn test_signature_help_multiple_signatures(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ signature_help_provider: Some(lsp::SignatureHelpOptions {
+ ..Default::default()
+ }),
+ ..Default::default()
+ },
+ cx,
+ )
+ .await;
+
+ cx.set_state(indoc! {"
+ fn main() {
+ overloadedˇ
+ }
+ "});
+
+ cx.update_editor(|editor, window, cx| {
+ editor.handle_input("(", window, cx);
+ editor.show_signature_help(&ShowSignatureHelp, window, cx);
+ });
+
+ // Mock response with 3 signatures
+ let mocked_response = lsp::SignatureHelp {
+ signatures: vec![
+ lsp::SignatureInformation {
+ label: "fn overloaded(x: i32)".to_string(),
+ documentation: None,
+ parameters: Some(vec![lsp::ParameterInformation {
+ label: lsp::ParameterLabel::Simple("x: i32".to_string()),
+ documentation: None,
+ }]),
+ active_parameter: None,
+ },
+ lsp::SignatureInformation {
+ label: "fn overloaded(x: i32, y: i32)".to_string(),
+ documentation: None,
+ parameters: Some(vec![
+ lsp::ParameterInformation {
+ label: lsp::ParameterLabel::Simple("x: i32".to_string()),
+ documentation: None,
+ },
+ lsp::ParameterInformation {
+ label: lsp::ParameterLabel::Simple("y: i32".to_string()),
+ documentation: None,
+ },
+ ]),
+ active_parameter: None,
+ },
+ lsp::SignatureInformation {
+ label: "fn overloaded(x: i32, y: i32, z: i32)".to_string(),
+ documentation: None,
+ parameters: Some(vec![
+ lsp::ParameterInformation {
+ label: lsp::ParameterLabel::Simple("x: i32".to_string()),
+ documentation: None,
+ },
+ lsp::ParameterInformation {
+ label: lsp::ParameterLabel::Simple("y: i32".to_string()),
+ documentation: None,
+ },
+ lsp::ParameterInformation {
+ label: lsp::ParameterLabel::Simple("z: i32".to_string()),
+ documentation: None,
+ },
+ ]),
+ active_parameter: None,
+ },
+ ],
+ active_signature: Some(1),
+ active_parameter: Some(0),
+ };
+ handle_signature_help_request(&mut cx, mocked_response).await;
+
+ cx.condition(|editor, _| editor.signature_help_state.is_shown())
+ .await;
+
+ // Verify we have multiple signatures and the right one is selected
+ cx.editor(|editor, _, _| {
+ let popover = editor.signature_help_state.popover().cloned().unwrap();
+ assert_eq!(popover.signatures.len(), 3);
+ // active_signature was 1, so that should be the current
+ assert_eq!(popover.current_signature, 1);
+ assert_eq!(popover.signatures[0].label, "fn overloaded(x: i32)");
+ assert_eq!(popover.signatures[1].label, "fn overloaded(x: i32, y: i32)");
+ assert_eq!(
+ popover.signatures[2].label,
+ "fn overloaded(x: i32, y: i32, z: i32)"
+ );
+ });
+
+ // Test navigation functionality
+ cx.update_editor(|editor, window, cx| {
+ editor.signature_help_next(&crate::SignatureHelpNext, window, cx);
+ });
+
+ cx.editor(|editor, _, _| {
+ let popover = editor.signature_help_state.popover().cloned().unwrap();
+ assert_eq!(popover.current_signature, 2);
+ });
+
+ // Test wrap around
+ cx.update_editor(|editor, window, cx| {
+ editor.signature_help_next(&crate::SignatureHelpNext, window, cx);
+ });
+
+ cx.editor(|editor, _, _| {
+ let popover = editor.signature_help_state.popover().cloned().unwrap();
+ assert_eq!(popover.current_signature, 0);
+ });
+
+ // Test previous navigation
+ cx.update_editor(|editor, window, cx| {
+ editor.signature_help_prev(&crate::SignatureHelpPrevious, window, cx);
+ });
+
+ cx.editor(|editor, _, _| {
+ let popover = editor.signature_help_state.popover().cloned().unwrap();
+ assert_eq!(popover.current_signature, 2);
+ });
+}
+
#[gpui::test]
async fn test_completion_mode(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -14905,7 +15174,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut TestAppCon
.unwrap();
let _fake_server = fake_servers.next().await.unwrap();
update_test_language_settings(cx, |language_settings| {
- language_settings.languages.insert(
+ language_settings.languages.0.insert(
language_name.clone(),
LanguageSettingsContent {
tab_size: NonZeroU32::new(8),
@@ -15803,9 +16072,9 @@ fn completion_menu_entries(menu: &CompletionsMenu) -> Vec<String> {
#[gpui::test]
async fn test_document_format_with_prettier(cx: &mut TestAppContext) {
init_test(cx, |settings| {
- settings.defaults.formatter = Some(language_settings::SelectedFormatter::List(
- FormatterList(vec![Formatter::Prettier].into()),
- ))
+ settings.defaults.formatter = Some(SelectedFormatter::List(FormatterList::Single(
+ Formatter::Prettier,
+ )))
});
let fs = FakeFs::new(cx.executor());
@@ -15875,7 +16144,7 @@ async fn test_document_format_with_prettier(cx: &mut TestAppContext) {
);
update_test_language_settings(cx, |settings| {
- settings.defaults.formatter = Some(language_settings::SelectedFormatter::Auto)
+ settings.defaults.formatter = Some(SelectedFormatter::Auto)
});
let format = editor.update_in(cx, |editor, window, cx| {
editor.perform_format(
@@ -546,6 +546,8 @@ impl EditorElement {
}
});
register_action(editor, window, Editor::show_signature_help);
+ register_action(editor, window, Editor::signature_help_prev);
+ register_action(editor, window, Editor::signature_help_next);
register_action(editor, window, Editor::next_edit_prediction);
register_action(editor, window, Editor::previous_edit_prediction);
register_action(editor, window, Editor::show_inline_completion);
@@ -1873,7 +1875,7 @@ impl EditorElement {
let mut minimap = div()
.size_full()
- .shadow_sm()
+ .shadow_xs()
.px(PADDING_OFFSET)
.child(minimap_editor)
.into_any_element();
@@ -2442,7 +2444,7 @@ impl EditorElement {
.git
.inline_blame
.and_then(|settings| settings.min_column)
- .map(|col| self.column_pixels(col as usize, window, cx))
+ .map(|col| self.column_pixels(col as usize, window))
.unwrap_or(px(0.));
let min_start = content_origin.x - scroll_pixel_position.x + min_column_in_pixels;
@@ -2627,7 +2629,7 @@ impl EditorElement {
.enumerate()
.filter_map(|(i, indent_guide)| {
let single_indent_width =
- self.column_pixels(indent_guide.tab_size as usize, window, cx);
+ self.column_pixels(indent_guide.tab_size as usize, window);
let total_width = single_indent_width * indent_guide.depth as f32;
let start_x = content_origin.x + total_width - scroll_pixel_position.x;
if start_x >= text_origin.x {
@@ -2655,6 +2657,39 @@ impl EditorElement {
)
}
+ fn layout_wrap_guides(
+ &self,
+ em_advance: Pixels,
+ scroll_position: gpui::Point<f32>,
+ content_origin: gpui::Point<Pixels>,
+ scrollbar_layout: Option<&EditorScrollbars>,
+ vertical_scrollbar_width: Pixels,
+ hitbox: &Hitbox,
+ window: &Window,
+ cx: &App,
+ ) -> SmallVec<[(Pixels, bool); 2]> {
+ let scroll_left = scroll_position.x * em_advance;
+ let content_origin = content_origin.x;
+ let horizontal_offset = content_origin - scroll_left;
+ let vertical_scrollbar_width = scrollbar_layout
+ .and_then(|layout| layout.visible.then_some(vertical_scrollbar_width))
+ .unwrap_or_default();
+
+ self.editor
+ .read(cx)
+ .wrap_guides(cx)
+ .into_iter()
+ .flat_map(|(guide, active)| {
+ let wrap_position = self.column_pixels(guide, window);
+ let wrap_guide_x = wrap_position + horizontal_offset;
+ let display_wrap_guide = wrap_guide_x >= content_origin
+ && wrap_guide_x <= hitbox.bounds.right() - vertical_scrollbar_width;
+
+ display_wrap_guide.then_some((wrap_guide_x, active))
+ })
+ .collect()
+ }
+
fn calculate_indent_guide_bounds(
row_range: Range<MultiBufferRow>,
line_height: Pixels,
@@ -4985,7 +5020,7 @@ impl EditorElement {
let maybe_element = self.editor.update(cx, |editor, cx| {
if let Some(popover) = editor.signature_help_state.popover_mut() {
- let element = popover.render(max_size, cx);
+ let element = popover.render(max_size, window, cx);
Some(element)
} else {
None
@@ -5238,26 +5273,7 @@ impl EditorElement {
paint_highlight(range.start, range.end, color, edges);
}
- let scroll_left =
- layout.position_map.snapshot.scroll_position().x * layout.position_map.em_width;
-
- for (wrap_position, active) in layout.wrap_guides.iter() {
- let x = (layout.position_map.text_hitbox.origin.x
- + *wrap_position
- + layout.position_map.em_width / 2.)
- - scroll_left;
-
- let show_scrollbars = layout
- .scrollbars_layout
- .as_ref()
- .map_or(false, |layout| layout.visible);
-
- if x < layout.position_map.text_hitbox.origin.x
- || (show_scrollbars && x > self.scrollbar_left(&layout.hitbox.bounds))
- {
- continue;
- }
-
+ for (guide_x, active) in layout.wrap_guides.iter() {
let color = if *active {
cx.theme().colors().editor_active_wrap_guide
} else {
@@ -5265,7 +5281,7 @@ impl EditorElement {
};
window.paint_quad(fill(
Bounds {
- origin: point(x, layout.position_map.text_hitbox.origin.y),
+ origin: point(*guide_x, layout.position_map.text_hitbox.origin.y),
size: size(px(1.), layout.position_map.text_hitbox.size.height),
},
color,
@@ -6676,7 +6692,7 @@ impl EditorElement {
let position_map: &PositionMap = &position_map;
let line_height = position_map.line_height;
- let max_glyph_width = position_map.em_width;
+ let max_glyph_advance = position_map.em_advance;
let (delta, axis) = match delta {
gpui::ScrollDelta::Pixels(mut pixels) => {
//Trackpad
@@ -6687,15 +6703,15 @@ impl EditorElement {
gpui::ScrollDelta::Lines(lines) => {
//Not trackpad
let pixels =
- point(lines.x * max_glyph_width, lines.y * line_height);
+ point(lines.x * max_glyph_advance, lines.y * line_height);
(pixels, None)
}
};
let current_scroll_position = position_map.snapshot.scroll_position();
- let x = (current_scroll_position.x * max_glyph_width
+ let x = (current_scroll_position.x * max_glyph_advance
- (delta.x * scroll_sensitivity))
- / max_glyph_width;
+ / max_glyph_advance;
let y = (current_scroll_position.y * line_height
- (delta.y * scroll_sensitivity))
/ line_height;
@@ -6856,11 +6872,7 @@ impl EditorElement {
});
}
- fn scrollbar_left(&self, bounds: &Bounds<Pixels>) -> Pixels {
- bounds.top_right().x - self.style.scrollbar_width
- }
-
- fn column_pixels(&self, column: usize, window: &mut Window, _: &mut App) -> Pixels {
+ fn column_pixels(&self, column: usize, window: &Window) -> Pixels {
let style = &self.style;
let font_size = style.text.font_size.to_pixels(window.rem_size());
let layout = window.text_system().shape_line(
@@ -6879,14 +6891,9 @@ impl EditorElement {
layout.width
}
- fn max_line_number_width(
- &self,
- snapshot: &EditorSnapshot,
- window: &mut Window,
- cx: &mut App,
- ) -> Pixels {
+ fn max_line_number_width(&self, snapshot: &EditorSnapshot, window: &mut Window) -> Pixels {
let digit_count = snapshot.widest_line_number().ilog10() + 1;
- self.column_pixels(digit_count as usize, window, cx)
+ self.column_pixels(digit_count as usize, window)
}
fn shape_line_number(
@@ -7787,7 +7794,7 @@ impl Element for EditorElement {
} => {
let editor_handle = cx.entity().clone();
let max_line_number_width =
- self.max_line_number_width(&editor.snapshot(window, cx), window, cx);
+ self.max_line_number_width(&editor.snapshot(window, cx), window);
window.request_measured_layout(
Style::default(),
move |known_dimensions, available_space, window, cx| {
@@ -7877,7 +7884,7 @@ impl Element for EditorElement {
.gutter_dimensions(
font_id,
font_size,
- self.max_line_number_width(&snapshot, window, cx),
+ self.max_line_number_width(&snapshot, window),
cx,
)
.or_else(|| {
@@ -7952,14 +7959,6 @@ impl Element for EditorElement {
}
});
- let wrap_guides = self
- .editor
- .read(cx)
- .wrap_guides(cx)
- .iter()
- .map(|(guide, active)| (self.column_pixels(*guide, window, cx), *active))
- .collect::<SmallVec<[_; 2]>>();
-
let hitbox = window.insert_hitbox(bounds, HitboxBehavior::Normal);
let gutter_hitbox = window.insert_hitbox(
gutter_bounds(bounds, gutter_dimensions),
@@ -8591,7 +8590,7 @@ impl Element for EditorElement {
start_row,
editor_content_width,
scroll_width,
- em_width,
+ em_advance,
&line_layouts,
cx,
)
@@ -8795,6 +8794,17 @@ impl Element for EditorElement {
self.prepaint_expand_toggles(&mut expand_toggles, window, cx)
});
+ let wrap_guides = self.layout_wrap_guides(
+ em_advance,
+ scroll_position,
+ content_origin,
+ scrollbars_layout.as_ref(),
+ vertical_scrollbar_width,
+ &hitbox,
+ window,
+ cx,
+ );
+
let minimap = window.with_element_namespace("minimap", |window| {
self.layout_minimap(
&snapshot,
@@ -233,31 +233,25 @@ pub fn deploy_context_menu(
.action("Copy and Trim", Box::new(CopyAndTrim))
.action("Paste", Box::new(Paste))
.separator()
- .map(|builder| {
- let reveal_in_finder_label = if cfg!(target_os = "macos") {
+ .action_disabled_when(
+ !has_reveal_target,
+ if cfg!(target_os = "macos") {
"Reveal in Finder"
} else {
"Reveal in File Manager"
- };
- const OPEN_IN_TERMINAL_LABEL: &str = "Open in Terminal";
- if has_reveal_target {
- builder
- .action(reveal_in_finder_label, Box::new(RevealInFileManager))
- .action(OPEN_IN_TERMINAL_LABEL, Box::new(OpenInTerminal))
- } else {
- builder
- .disabled_action(reveal_in_finder_label, Box::new(RevealInFileManager))
- .disabled_action(OPEN_IN_TERMINAL_LABEL, Box::new(OpenInTerminal))
- }
- })
- .map(|builder| {
- const COPY_PERMALINK_LABEL: &str = "Copy Permalink";
- if has_git_repo {
- builder.action(COPY_PERMALINK_LABEL, Box::new(CopyPermalinkToLine))
- } else {
- builder.disabled_action(COPY_PERMALINK_LABEL, Box::new(CopyPermalinkToLine))
- }
- });
+ },
+ Box::new(RevealInFileManager),
+ )
+ .action_disabled_when(
+ !has_reveal_target,
+ "Open in Terminal",
+ Box::new(OpenInTerminal),
+ )
+ .action_disabled_when(
+ !has_git_repo,
+ "Copy Permalink",
+ Box::new(CopyPermalinkToLine),
+ );
match focus {
Some(focus) => builder.context(focus),
None => builder,
@@ -1,18 +1,22 @@
use crate::actions::ShowSignatureHelp;
-use crate::{Editor, EditorSettings, ToggleAutoSignatureHelp};
+use crate::hover_popover::open_markdown_url;
+use crate::{Editor, EditorSettings, ToggleAutoSignatureHelp, hover_markdown_style};
use gpui::{
- App, Context, HighlightStyle, MouseButton, Size, StyledText, Task, TextStyle, Window,
- combine_highlights,
+ App, Context, Div, Entity, HighlightStyle, MouseButton, ScrollHandle, Size, Stateful,
+ StyledText, Task, TextStyle, Window, combine_highlights,
};
use language::BufferSnapshot;
+use markdown::{Markdown, MarkdownElement};
use multi_buffer::{Anchor, ToOffset};
use settings::Settings;
use std::ops::Range;
use text::Rope;
use theme::ThemeSettings;
use ui::{
- ActiveTheme, AnyElement, InteractiveElement, IntoElement, ParentElement, Pixels, SharedString,
- StatefulInteractiveElement, Styled, StyledExt, div, relative,
+ ActiveTheme, AnyElement, ButtonCommon, ButtonStyle, Clickable, FluentBuilder, IconButton,
+ IconButtonShape, IconName, IconSize, InteractiveElement, IntoElement, Label, LabelCommon,
+ LabelSize, ParentElement, Pixels, Scrollbar, ScrollbarState, SharedString,
+ StatefulInteractiveElement, Styled, StyledExt, div, px, relative,
};
// Language-specific settings may define quotes as "brackets", so filter them out separately.
@@ -37,15 +41,14 @@ impl Editor {
.map(|auto_signature_help| !auto_signature_help)
.or_else(|| Some(!EditorSettings::get_global(cx).auto_signature_help));
match self.auto_signature_help {
- Some(auto_signature_help) if auto_signature_help => {
+ Some(true) => {
self.show_signature_help(&ShowSignatureHelp, window, cx);
}
- Some(_) => {
+ Some(false) => {
self.hide_signature_help(cx, SignatureHelpHiddenBy::AutoClose);
}
None => {}
}
- cx.notify();
}
pub(super) fn hide_signature_help(
@@ -54,7 +57,7 @@ impl Editor {
signature_help_hidden_by: SignatureHelpHiddenBy,
) -> bool {
if self.signature_help_state.is_shown() {
- self.signature_help_state.kill_task();
+ self.signature_help_state.task = None;
self.signature_help_state.hide(signature_help_hidden_by);
cx.notify();
true
@@ -187,31 +190,62 @@ impl Editor {
};
if let Some(language) = language {
- let text = Rope::from(signature_help.label.clone());
- let highlights = language
- .highlight_text(&text, 0..signature_help.label.len())
- .into_iter()
- .flat_map(|(range, highlight_id)| {
- Some((range, highlight_id.style(&cx.theme().syntax())?))
- });
- signature_help.highlights =
- combine_highlights(signature_help.highlights, highlights).collect()
+ for signature in &mut signature_help.signatures {
+ let text = Rope::from(signature.label.to_string());
+ let highlights = language
+ .highlight_text(&text, 0..signature.label.len())
+ .into_iter()
+ .flat_map(|(range, highlight_id)| {
+ Some((range, highlight_id.style(&cx.theme().syntax())?))
+ });
+ signature.highlights =
+ combine_highlights(signature.highlights.clone(), highlights)
+ .collect();
+ }
}
let settings = ThemeSettings::get_global(cx);
- let text_style = TextStyle {
+ let style = TextStyle {
color: cx.theme().colors().text,
font_family: settings.buffer_font.family.clone(),
font_fallbacks: settings.buffer_font.fallbacks.clone(),
font_size: settings.buffer_font_size(cx).into(),
font_weight: settings.buffer_font.weight,
line_height: relative(settings.buffer_line_height.value()),
- ..Default::default()
+ ..TextStyle::default()
};
+ let scroll_handle = ScrollHandle::new();
+ let signatures = signature_help
+ .signatures
+ .into_iter()
+ .map(|s| SignatureHelp {
+ label: s.label,
+ documentation: s.documentation,
+ highlights: s.highlights,
+ active_parameter: s.active_parameter,
+ parameter_documentation: s
+ .active_parameter
+ .and_then(|idx| s.parameters.get(idx))
+ .and_then(|param| param.documentation.clone()),
+ })
+ .collect::<Vec<_>>();
+
+ if signatures.is_empty() {
+ editor
+ .signature_help_state
+ .hide(SignatureHelpHiddenBy::AutoClose);
+ return;
+ }
+
+ let current_signature = signature_help
+ .active_signature
+ .min(signatures.len().saturating_sub(1));
let signature_help_popover = SignatureHelpPopover {
- label: signature_help.label.into(),
- highlights: signature_help.highlights,
- style: text_style,
+ scrollbar_state: ScrollbarState::new(scroll_handle.clone()),
+ style,
+ signatures,
+ current_signature,
+ scroll_handle,
};
editor
.signature_help_state
@@ -231,15 +265,11 @@ pub struct SignatureHelpState {
}
impl SignatureHelpState {
- pub fn set_task(&mut self, task: Task<()>) {
+ fn set_task(&mut self, task: Task<()>) {
self.task = Some(task);
self.hidden_by = None;
}
- pub fn kill_task(&mut self) {
- self.task = None;
- }
-
#[cfg(test)]
pub fn popover(&self) -> Option<&SignatureHelpPopover> {
self.popover.as_ref()
@@ -249,25 +279,31 @@ impl SignatureHelpState {
self.popover.as_mut()
}
- pub fn set_popover(&mut self, popover: SignatureHelpPopover) {
+ fn set_popover(&mut self, popover: SignatureHelpPopover) {
self.popover = Some(popover);
self.hidden_by = None;
}
- pub fn hide(&mut self, hidden_by: SignatureHelpHiddenBy) {
+ fn hide(&mut self, hidden_by: SignatureHelpHiddenBy) {
if self.hidden_by.is_none() {
self.popover = None;
self.hidden_by = Some(hidden_by);
}
}
- pub fn hidden_by_selection(&self) -> bool {
+ fn hidden_by_selection(&self) -> bool {
self.hidden_by == Some(SignatureHelpHiddenBy::Selection)
}
pub fn is_shown(&self) -> bool {
self.popover.is_some()
}
+
+ pub fn has_multiple_signatures(&self) -> bool {
+ self.popover
+ .as_ref()
+ .is_some_and(|popover| popover.signatures.len() > 1)
+ }
}
#[cfg(test)]
@@ -278,28 +314,170 @@ impl SignatureHelpState {
}
#[derive(Clone, Debug, PartialEq)]
+pub struct SignatureHelp {
+ pub(crate) label: SharedString,
+ documentation: Option<Entity<Markdown>>,
+ highlights: Vec<(Range<usize>, HighlightStyle)>,
+ active_parameter: Option<usize>,
+ parameter_documentation: Option<Entity<Markdown>>,
+}
+
+#[derive(Clone, Debug)]
pub struct SignatureHelpPopover {
- pub label: SharedString,
pub style: TextStyle,
- pub highlights: Vec<(Range<usize>, HighlightStyle)>,
+ pub signatures: Vec<SignatureHelp>,
+ pub current_signature: usize,
+ scroll_handle: ScrollHandle,
+ scrollbar_state: ScrollbarState,
}
impl SignatureHelpPopover {
- pub fn render(&mut self, max_size: Size<Pixels>, cx: &mut Context<Editor>) -> AnyElement {
+ pub fn render(
+ &mut self,
+ max_size: Size<Pixels>,
+ window: &mut Window,
+ cx: &mut Context<Editor>,
+ ) -> AnyElement {
+ let Some(signature) = self.signatures.get(self.current_signature) else {
+ return div().into_any_element();
+ };
+
+ let main_content = div()
+ .occlude()
+ .p_2()
+ .child(
+ div()
+ .id("signature_help_container")
+ .overflow_y_scroll()
+ .max_w(max_size.width)
+ .max_h(max_size.height)
+ .track_scroll(&self.scroll_handle)
+ .child(
+ StyledText::new(signature.label.clone()).with_default_highlights(
+ &self.style,
+ signature.highlights.iter().cloned(),
+ ),
+ )
+ .when_some(
+ signature.parameter_documentation.clone(),
+ |this, param_doc| {
+ this.child(div().h_px().bg(cx.theme().colors().border_variant).my_1())
+ .child(
+ MarkdownElement::new(
+ param_doc,
+ hover_markdown_style(window, cx),
+ )
+ .code_block_renderer(markdown::CodeBlockRenderer::Default {
+ copy_button: false,
+ border: false,
+ copy_button_on_hover: false,
+ })
+ .on_url_click(open_markdown_url),
+ )
+ },
+ )
+ .when_some(signature.documentation.clone(), |this, description| {
+ this.child(div().h_px().bg(cx.theme().colors().border_variant).my_1())
+ .child(
+ MarkdownElement::new(description, hover_markdown_style(window, cx))
+ .code_block_renderer(markdown::CodeBlockRenderer::Default {
+ copy_button: false,
+ border: false,
+ copy_button_on_hover: false,
+ })
+ .on_url_click(open_markdown_url),
+ )
+ }),
+ )
+ .child(self.render_vertical_scrollbar(cx));
+ let controls = if self.signatures.len() > 1 {
+ let prev_button = IconButton::new("signature_help_prev", IconName::ChevronUp)
+ .shape(IconButtonShape::Square)
+ .style(ButtonStyle::Subtle)
+ .icon_size(IconSize::Small)
+ .tooltip(move |window, cx| {
+ ui::Tooltip::for_action(
+ "Previous Signature",
+ &crate::SignatureHelpPrevious,
+ window,
+ cx,
+ )
+ })
+ .on_click(cx.listener(|editor, _, window, cx| {
+ editor.signature_help_prev(&crate::SignatureHelpPrevious, window, cx);
+ }));
+
+ let next_button = IconButton::new("signature_help_next", IconName::ChevronDown)
+ .shape(IconButtonShape::Square)
+ .style(ButtonStyle::Subtle)
+ .icon_size(IconSize::Small)
+ .tooltip(move |window, cx| {
+ ui::Tooltip::for_action("Next Signature", &crate::SignatureHelpNext, window, cx)
+ })
+ .on_click(cx.listener(|editor, _, window, cx| {
+ editor.signature_help_next(&crate::SignatureHelpNext, window, cx);
+ }));
+
+ let page = Label::new(format!(
+ "{}/{}",
+ self.current_signature + 1,
+ self.signatures.len()
+ ))
+ .size(LabelSize::Small);
+
+ Some(
+ div()
+ .flex()
+ .flex_col()
+ .items_center()
+ .gap_0p5()
+ .px_0p5()
+ .py_0p5()
+ .children([
+ prev_button.into_any_element(),
+ div().child(page).into_any_element(),
+ next_button.into_any_element(),
+ ])
+ .into_any_element(),
+ )
+ } else {
+ None
+ };
div()
- .id("signature_help_popover")
.elevation_2(cx)
- .overflow_y_scroll()
- .max_w(max_size.width)
- .max_h(max_size.height)
- .on_mouse_move(|_, _, cx| cx.stop_propagation())
.on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation())
- .child(
- div().px_2().py_0p5().child(
- StyledText::new(self.label.clone())
- .with_default_highlights(&self.style, self.highlights.iter().cloned()),
- ),
- )
+ .on_mouse_move(|_, _, cx| cx.stop_propagation())
+ .flex()
+ .flex_row()
+ .when_some(controls, |this, controls| {
+ this.children(vec![
+ div().flex().items_end().child(controls),
+ div().w_px().bg(cx.theme().colors().border_variant),
+ ])
+ })
+ .child(main_content)
.into_any_element()
}
+
+ fn render_vertical_scrollbar(&self, cx: &mut Context<Editor>) -> Stateful<Div> {
+ div()
+ .occlude()
+ .id("signature_help_scrollbar")
+ .on_mouse_move(cx.listener(|_, _, _, cx| {
+ cx.notify();
+ cx.stop_propagation()
+ }))
+ .on_hover(|_, _, cx| cx.stop_propagation())
+ .on_any_mouse_down(|_, _, cx| cx.stop_propagation())
+ .on_mouse_up(MouseButton::Left, |_, _, cx| cx.stop_propagation())
+ .on_scroll_wheel(cx.listener(|_, _, _, cx| cx.notify()))
+ .h_full()
+ .absolute()
+ .right_1()
+ .top_1()
+ .bottom_1()
+ .w(px(12.))
+ .cursor_default()
+ .children(Scrollbar::vertical(self.scrollbar_state.clone()))
+ }
}
@@ -32,7 +32,7 @@ client.workspace = true
collections.workspace = true
debug_adapter_extension.workspace = true
dirs.workspace = true
-dotenv.workspace = true
+dotenvy.workspace = true
env_logger.workspace = true
extension.workspace = true
fs.workspace = true
@@ -63,7 +63,7 @@ struct Args {
}
fn main() {
- dotenv::from_filename(CARGO_MANIFEST_DIR.join(".env")).ok();
+ dotenvy::from_filename(CARGO_MANIFEST_DIR.join(".env")).ok();
env_logger::init();
@@ -1054,6 +1054,15 @@ pub fn response_events_to_markdown(
| LanguageModelCompletionEvent::StartMessage { .. }
| LanguageModelCompletionEvent::StatusUpdate { .. },
) => {}
+ Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
+ json_parse_error, ..
+ }) => {
+ flush_buffers(&mut response, &mut text_buffer, &mut thinking_buffer);
+ response.push_str(&format!(
+ "**Error**: parse error in tool use JSON: {}\n\n",
+ json_parse_error
+ ));
+ }
Err(error) => {
flush_buffers(&mut response, &mut text_buffer, &mut thinking_buffer);
response.push_str(&format!("**Error**: {}\n\n", error));
@@ -1132,6 +1141,17 @@ impl ThreadDialog {
| Ok(LanguageModelCompletionEvent::StartMessage { .. })
| Ok(LanguageModelCompletionEvent::Stop(_)) => {}
+ Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
+ json_parse_error,
+ ..
+ }) => {
+ flush_text(&mut current_text, &mut content);
+ content.push(MessageContent::Text(format!(
+ "ERROR: parse error in tool use JSON: {}",
+ json_parse_error
+ )));
+ }
+
Err(error) => {
flush_text(&mut current_text, &mut content);
content.push(MessageContent::Text(format!("ERROR: {}", error)));
@@ -1,5 +1,6 @@
use crate::{
- ExtensionLibraryKind, ExtensionManifest, GrammarManifestEntry, parse_wasm_extension_version,
+ ExtensionLibraryKind, ExtensionManifest, GrammarManifestEntry, build_debug_adapter_schema_path,
+ parse_wasm_extension_version,
};
use anyhow::{Context as _, Result, bail};
use async_compression::futures::bufread::GzipDecoder;
@@ -99,12 +100,8 @@ impl ExtensionBuilder {
}
for (debug_adapter_name, meta) in &mut extension_manifest.debug_adapters {
- let debug_adapter_relative_schema_path =
- meta.schema_path.clone().unwrap_or_else(|| {
- Path::new("debug_adapter_schemas")
- .join(Path::new(debug_adapter_name.as_ref()).with_extension("json"))
- });
- let debug_adapter_schema_path = extension_dir.join(debug_adapter_relative_schema_path);
+ let debug_adapter_schema_path =
+ extension_dir.join(build_debug_adapter_schema_path(debug_adapter_name, meta));
let debug_adapter_schema = fs::read_to_string(&debug_adapter_schema_path)
.with_context(|| {
@@ -130,6 +130,22 @@ impl ExtensionManifest {
Ok(())
}
+
+ pub fn allow_remote_load(&self) -> bool {
+ !self.language_servers.is_empty()
+ || !self.debug_adapters.is_empty()
+ || !self.debug_locators.is_empty()
+ }
+}
+
+pub fn build_debug_adapter_schema_path(
+ adapter_name: &Arc<str>,
+ meta: &DebugAdapterManifestEntry,
+) -> PathBuf {
+ meta.schema_path.clone().unwrap_or_else(|| {
+ Path::new("debug_adapter_schemas")
+ .join(Path::new(adapter_name.as_ref()).with_extension("json"))
+ })
}
/// A capability for an extension.
@@ -320,6 +336,29 @@ mod tests {
}
}
+ #[test]
+ fn test_build_adapter_schema_path_with_schema_path() {
+ let adapter_name = Arc::from("my_adapter");
+ let entry = DebugAdapterManifestEntry {
+ schema_path: Some(PathBuf::from("foo/bar")),
+ };
+
+ let path = build_debug_adapter_schema_path(&adapter_name, &entry);
+ assert_eq!(path, PathBuf::from("foo/bar"));
+ }
+
+ #[test]
+ fn test_build_adapter_schema_path_without_schema_path() {
+ let adapter_name = Arc::from("my_adapter");
+ let entry = DebugAdapterManifestEntry { schema_path: None };
+
+ let path = build_debug_adapter_schema_path(&adapter_name, &entry);
+ assert_eq!(
+ path,
+ PathBuf::from("debug_adapter_schemas").join("my_adapter.json")
+ );
+ }
+
#[test]
fn test_allow_exact_match() {
let manifest = ExtensionManifest {
@@ -33,7 +33,7 @@ interface dap {
}
/// Debug Config is the "highest-level" configuration for a debug session.
- /// It comes from a new session modal UI; thus, it is essentially debug-adapter-agnostic.
+ /// It comes from a new process modal UI; thus, it is essentially debug-adapter-agnostic.
/// It is expected of the extension to translate this generic configuration into something that can be debugged by the adapter (debug scenario).
record debug-config {
/// Name of the debug task
@@ -178,7 +178,13 @@ pub struct ExtensionIndexLanguageEntry {
pub grammar: Option<Arc<str>>,
}
-actions!(zed, [ReloadExtensions]);
+actions!(
+ zed,
+ [
+ /// Reloads all installed extensions.
+ ReloadExtensions
+ ]
+);
pub fn init(
extension_host_proxy: Arc<ExtensionHostProxy>,
@@ -1633,6 +1639,23 @@ impl ExtensionStore {
}
}
+ for (adapter_name, meta) in loaded_extension.manifest.debug_adapters.iter() {
+ let schema_path = &extension::build_debug_adapter_schema_path(adapter_name, meta);
+
+ if fs.is_file(&src_dir.join(schema_path)).await {
+ match schema_path.parent() {
+ Some(parent) => fs.create_dir(&tmp_dir.join(parent)).await?,
+ None => {}
+ }
+ fs.copy_file(
+ &src_dir.join(schema_path),
+ &tmp_dir.join(schema_path),
+ fs::CopyOptions::default(),
+ )
+ .await?
+ }
+ }
+
Ok(())
})
}
@@ -1647,7 +1670,7 @@ impl ExtensionStore {
.extensions
.iter()
.filter_map(|(id, entry)| {
- if entry.manifest.language_servers.is_empty() {
+ if !entry.manifest.allow_remote_load() {
return None;
}
Some(proto::Extension {
@@ -4,8 +4,8 @@ use anyhow::{Context as _, Result};
use client::{TypedEnvelope, proto};
use collections::{HashMap, HashSet};
use extension::{
- Extension, ExtensionHostProxy, ExtensionLanguageProxy, ExtensionLanguageServerProxy,
- ExtensionManifest,
+ Extension, ExtensionDebugAdapterProviderProxy, ExtensionHostProxy, ExtensionLanguageProxy,
+ ExtensionLanguageServerProxy, ExtensionManifest,
};
use fs::{Fs, RemoveOptions, RenameOptions};
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, Task, WeakEntity};
@@ -125,7 +125,7 @@ impl HeadlessExtensionStore {
let manifest = Arc::new(ExtensionManifest::load(fs.clone(), &extension_dir).await?);
- debug_assert!(!manifest.languages.is_empty() || !manifest.language_servers.is_empty());
+ debug_assert!(!manifest.languages.is_empty() || manifest.allow_remote_load());
if manifest.version.as_ref() != extension.version.as_str() {
anyhow::bail!(
@@ -165,12 +165,13 @@ impl HeadlessExtensionStore {
})?;
}
- if manifest.language_servers.is_empty() {
+ if !manifest.allow_remote_load() {
return Ok(());
}
- let wasm_extension: Arc<dyn Extension> =
- Arc::new(WasmExtension::load(extension_dir, &manifest, wasm_host.clone(), &cx).await?);
+ let wasm_extension: Arc<dyn Extension> = Arc::new(
+ WasmExtension::load(extension_dir.clone(), &manifest, wasm_host.clone(), &cx).await?,
+ );
for (language_server_id, language_server_config) in &manifest.language_servers {
for language in language_server_config.languages() {
@@ -186,6 +187,28 @@ impl HeadlessExtensionStore {
);
})?;
}
+ log::info!("Loaded language server: {}", language_server_id);
+ }
+
+ for (debug_adapter, meta) in &manifest.debug_adapters {
+ let schema_path = extension::build_debug_adapter_schema_path(debug_adapter, meta);
+
+ this.update(cx, |this, _cx| {
+ this.proxy.register_debug_adapter(
+ wasm_extension.clone(),
+ debug_adapter.clone(),
+ &extension_dir.join(schema_path),
+ );
+ })?;
+ log::info!("Loaded debug adapter: {}", debug_adapter);
+ }
+
+ for debug_locator in manifest.debug_locators.keys() {
+ this.update(cx, |this, _cx| {
+ this.proxy
+ .register_debug_locator(wasm_extension.clone(), debug_locator.clone());
+ })?;
+ log::info!("Loaded debug locator: {}", debug_locator);
}
Ok(())
@@ -999,7 +999,7 @@ impl Extension {
) -> Result<Result<DebugRequest, String>> {
match self {
Extension::V0_6_0(ext) => {
- let build_config_template = resolved_build_task.into();
+ let build_config_template = resolved_build_task.try_into()?;
let dap_request = ext
.call_run_dap_locator(store, &locator_name, &build_config_template)
.await?
@@ -299,15 +299,17 @@ impl From<extension::DebugScenario> for DebugScenario {
}
}
-impl From<SpawnInTerminal> for ResolvedTask {
- fn from(value: SpawnInTerminal) -> Self {
- Self {
+impl TryFrom<SpawnInTerminal> for ResolvedTask {
+ type Error = anyhow::Error;
+
+ fn try_from(value: SpawnInTerminal) -> Result<Self, Self::Error> {
+ Ok(Self {
label: value.label,
- command: value.command,
+ command: value.command.context("missing command")?,
args: value.args,
env: value.env.into_iter().collect(),
cwd: value.cwd.map(|s| s.to_string_lossy().into_owned()),
- }
+ })
}
}
@@ -70,6 +70,7 @@ const SUGGESTIONS_BY_EXTENSION_ID: &[(&str, &[&str])] = &[
("templ", &["templ"]),
("terraform", &["tf", "tfvars", "hcl"]),
("toml", &["Cargo.lock", "toml"]),
+ ("typst", &["typ"]),
("vue", &["vue"]),
("wgsl", &["wgsl"]),
("wit", &["wit"]),
@@ -38,7 +38,13 @@ use crate::extension_version_selector::{
ExtensionVersionSelector, ExtensionVersionSelectorDelegate,
};
-actions!(zed, [InstallDevExtension]);
+actions!(
+ zed,
+ [
+ /// Installs an extension from a local directory for development.
+ InstallDevExtension
+ ]
+);
pub fn init(cx: &mut App) {
cx.observe_new(move |workspace: &mut Workspace, window, cx| {
@@ -11,9 +11,13 @@ pub mod system_specs;
actions!(
zed,
[
+ /// Copies system specifications to the clipboard for bug reports.
CopySystemSpecsIntoClipboard,
+ /// Opens email client to send feedback to Zed support.
EmailZed,
+ /// Opens the Zed repository on GitHub.
OpenZedRepo,
+ /// Opens the feature request form.
RequestFeature,
]
);
@@ -47,7 +47,14 @@ use workspace::{
actions!(
file_finder,
- [SelectPrevious, ToggleFilterMenu, ToggleSplitMenu]
+ [
+ /// Selects the previous item in the file finder.
+ SelectPrevious,
+ /// Toggles the file filter menu.
+ ToggleFilterMenu,
+ /// Toggles the split direction menu.
+ ToggleSplitMenu
+ ]
);
impl ModalView for FileFinder {
@@ -31,40 +31,67 @@ actions!(
git,
[
// per-hunk
+ /// Toggles the staged state of the hunk at cursor.
ToggleStaged,
+ /// Stages the current hunk and moves to the next one.
StageAndNext,
+ /// Unstages the current hunk and moves to the next one.
UnstageAndNext,
+ /// Restores the selected hunks to their original state.
#[action(deprecated_aliases = ["editor::RevertSelectedHunks"])]
Restore,
// per-file
+ /// Shows git blame information for the current file.
#[action(deprecated_aliases = ["editor::ToggleGitBlame"])]
Blame,
+ /// Stages the current file.
StageFile,
+ /// Unstages the current file.
UnstageFile,
// repo-wide
+ /// Stages all changes in the repository.
StageAll,
+ /// Unstages all changes in the repository.
UnstageAll,
+ /// Restores all tracked files to their last committed state.
RestoreTrackedFiles,
+ /// Moves all untracked files to trash.
TrashUntrackedFiles,
+ /// Undoes the last commit, keeping changes in the working directory.
Uncommit,
+ /// Pushes commits to the remote repository.
Push,
+ /// Pushes commits to a specific remote branch.
PushTo,
+ /// Force pushes commits to the remote repository.
ForcePush,
+ /// Pulls changes from the remote repository.
Pull,
+ /// Fetches changes from the remote repository.
Fetch,
+ /// Fetches changes from a specific remote.
FetchFrom,
+ /// Creates a new commit with staged changes.
Commit,
+ /// Amends the last commit with staged changes.
Amend,
+ /// Cancels the current git operation.
Cancel,
+ /// Expands the commit message editor.
ExpandCommitEditor,
+ /// Generates a commit message using AI.
GenerateCommitMessage,
+ /// Initializes a new git repository.
Init,
+ /// Opens all modified files in the editor.
OpenModifiedFiles,
]
);
+/// Restores a file to its last committed state, discarding local changes.
#[derive(Clone, Debug, Default, PartialEq, Deserialize, JsonSchema, Action)]
#[action(namespace = git, deprecated_aliases = ["editor::RevertFile"])]
+#[serde(deny_unknown_fields)]
pub struct RestoreFile {
#[serde(default)]
pub skip_prompt: bool,
@@ -77,11 +77,17 @@ use zed_llm_client::CompletionIntent;
actions!(
git_panel,
[
+ /// Closes the git panel.
Close,
+ /// Toggles focus on the git panel.
ToggleFocus,
+ /// Opens the git panel menu.
OpenMenu,
+ /// Focuses on the commit message editor.
FocusEditor,
+ /// Focuses on the changes list.
FocusChanges,
+ /// Toggles automatic co-author suggestions.
ToggleFillCoAuthors,
]
);
@@ -122,40 +128,29 @@ fn git_panel_context_menu(
ContextMenu::build(window, cx, move |context_menu, _, _| {
context_menu
.context(focus_handle)
- .map(|menu| {
- if state.has_unstaged_changes {
- menu.action("Stage All", StageAll.boxed_clone())
- } else {
- menu.disabled_action("Stage All", StageAll.boxed_clone())
- }
- })
- .map(|menu| {
- if state.has_staged_changes {
- menu.action("Unstage All", UnstageAll.boxed_clone())
- } else {
- menu.disabled_action("Unstage All", UnstageAll.boxed_clone())
- }
- })
+ .action_disabled_when(
+ !state.has_unstaged_changes,
+ "Stage All",
+ StageAll.boxed_clone(),
+ )
+ .action_disabled_when(
+ !state.has_staged_changes,
+ "Unstage All",
+ UnstageAll.boxed_clone(),
+ )
.separator()
.action("Open Diff", project_diff::Diff.boxed_clone())
.separator()
- .map(|menu| {
- if state.has_tracked_changes {
- menu.action("Discard Tracked Changes", RestoreTrackedFiles.boxed_clone())
- } else {
- menu.disabled_action(
- "Discard Tracked Changes",
- RestoreTrackedFiles.boxed_clone(),
- )
- }
- })
- .map(|menu| {
- if state.has_new_changes {
- menu.action("Trash Untracked Files", TrashUntrackedFiles.boxed_clone())
- } else {
- menu.disabled_action("Trash Untracked Files", TrashUntrackedFiles.boxed_clone())
- }
- })
+ .action_disabled_when(
+ !state.has_tracked_changes,
+ "Discard Tracked Changes",
+ RestoreTrackedFiles.boxed_clone(),
+ )
+ .action_disabled_when(
+ !state.has_new_changes,
+ "Trash Untracked Files",
+ TrashUntrackedFiles.boxed_clone(),
+ )
})
}
@@ -388,6 +383,7 @@ pub(crate) fn commit_message_editor(
commit_editor.set_collaboration_hub(Box::new(project));
commit_editor.set_use_autoclose(false);
commit_editor.set_show_gutter(false, cx);
+ commit_editor.set_use_modal_editing(true);
commit_editor.set_show_wrap_guides(false, cx);
commit_editor.set_show_indent_guides(false, cx);
let placeholder = placeholder.unwrap_or("Enter commit message".into());
@@ -31,7 +31,13 @@ pub mod project_diff;
pub(crate) mod remote_output;
pub mod repository_selector;
-actions!(git, [ResetOnboarding]);
+actions!(
+ git,
+ [
+ /// Resets the git onboarding state to show the tutorial again.
+ ResetOnboarding
+ ]
+);
pub fn init(cx: &mut App) {
GitPanelSettings::register(cx);
@@ -41,7 +41,15 @@ use workspace::{
searchable::SearchableItemHandle,
};
-actions!(git, [Diff, Add]);
+actions!(
+ git,
+ [
+ /// Shows the diff between the working directory and the index.
+ Diff,
+ /// Adds files to the git staging area.
+ Add
+ ]
+);
pub struct ProjectDiff {
project: Entity<Project>,
@@ -12,7 +12,7 @@ license = "Apache-2.0"
workspace = true
[features]
-default = ["http_client", "font-kit", "wayland", "x11"]
+default = ["http_client", "font-kit", "wayland", "x11", "windows-manifest"]
test-support = [
"leak-detection",
"collections/test-support",
@@ -50,7 +50,6 @@ wayland = [
"filedescriptor",
"xkbcommon",
"open",
- "scap",
]
x11 = [
"blade-graphics",
@@ -67,9 +66,11 @@ x11 = [
"x11-clipboard",
"filedescriptor",
"open",
+]
+screen-capture = [
"scap",
]
-
+windows-manifest = []
[lib]
path = "src/gpui.rs"
@@ -17,7 +17,7 @@ fn main() {
#[cfg(target_os = "macos")]
macos::build();
}
- #[cfg(target_os = "windows")]
+ #[cfg(all(target_os = "windows", feature = "windows-manifest"))]
Ok("windows") => {
let manifest = std::path::Path::new("resources/windows/gpui.manifest.xml");
let rc_file = std::path::Path::new("resources/windows/gpui.rc");
@@ -126,7 +126,7 @@ mod macos {
"ContentMask".into(),
"Uniforms".into(),
"AtlasTile".into(),
- "PathRasterizationInputIndex".into(),
+ "PathInputIndex".into(),
"PathVertex_ScaledPixels".into(),
"ShadowInputIndex".into(),
"Shadow".into(),
@@ -1,9 +1,13 @@
use gpui::{
Application, Background, Bounds, ColorSpace, Context, MouseDownEvent, Path, PathBuilder,
- PathStyle, Pixels, Point, Render, SharedString, StrokeOptions, Window, WindowOptions, canvas,
- div, linear_color_stop, linear_gradient, point, prelude::*, px, rgb, size,
+ PathStyle, Pixels, Point, Render, SharedString, StrokeOptions, Window, WindowBounds,
+ WindowOptions, canvas, div, linear_color_stop, linear_gradient, point, prelude::*, px, rgb,
+ size,
};
+const DEFAULT_WINDOW_WIDTH: Pixels = px(1024.0);
+const DEFAULT_WINDOW_HEIGHT: Pixels = px(768.0);
+
struct PaintingViewer {
default_lines: Vec<(Path<Pixels>, Background)>,
lines: Vec<Vec<Point<Pixels>>>,
@@ -147,8 +151,6 @@ impl PaintingViewer {
px(320.0 + (i as f32 * 10.0).sin() * 40.0),
));
}
- let path = builder.build().unwrap();
- lines.push((path, gpui::green().into()));
Self {
default_lines: lines.clone(),
@@ -183,9 +185,13 @@ fn button(
}
impl Render for PaintingViewer {
- fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ window.request_animation_frame();
+
let default_lines = self.default_lines.clone();
let lines = self.lines.clone();
+ let window_size = window.bounds().size;
+ let scale = window_size.width / DEFAULT_WINDOW_WIDTH;
let dashed = self.dashed;
div()
@@ -222,7 +228,7 @@ impl Render for PaintingViewer {
move |_, _, _| {},
move |_, _, window, _| {
for (path, color) in default_lines {
- window.paint_path(path, color);
+ window.paint_path(path.clone().scale(scale), color);
}
for points in lines {
@@ -298,6 +304,11 @@ fn main() {
cx.open_window(
WindowOptions {
focus: true,
+ window_bounds: Some(WindowBounds::Windowed(Bounds::centered(
+ None,
+ size(DEFAULT_WINDOW_WIDTH, DEFAULT_WINDOW_HEIGHT),
+ cx,
+ ))),
..Default::default()
},
|window, cx| cx.new(|cx| PaintingViewer::new(window, cx)),
@@ -156,6 +156,10 @@ impl Render for Shadow {
.w_full()
.children(vec![
example("None", Shadow::base()),
+ // 2Xsmall shadow
+ example("2X Small", Shadow::base().shadow_2xs()),
+ // Xsmall shadow
+ example("Extra Small", Shadow::base().shadow_xs()),
// Small shadow
example("Small", Shadow::base().shadow_sm()),
// Medium shadow
@@ -125,9 +125,7 @@ pub trait Action: Any + Send {
Self: Sized;
/// Optional JSON schema for the action's input data.
- fn action_json_schema(
- _: &mut schemars::r#gen::SchemaGenerator,
- ) -> Option<schemars::schema::Schema>
+ fn action_json_schema(_: &mut schemars::SchemaGenerator) -> Option<schemars::Schema>
where
Self: Sized,
{
@@ -152,6 +150,15 @@ pub trait Action: Any + Send {
{
None
}
+
+ /// The documentation for this action, if any. When using the derive macro for actions
+ /// this will be automatically generated from the doc comments on the action struct.
+ fn documentation() -> Option<&'static str>
+ where
+ Self: Sized,
+ {
+ None
+ }
}
impl std::fmt::Debug for dyn Action {
@@ -218,6 +225,7 @@ pub(crate) struct ActionRegistry {
all_names: Vec<&'static str>, // So we can return a static slice.
deprecated_aliases: HashMap<&'static str, &'static str>, // deprecated name -> preferred name
deprecation_messages: HashMap<&'static str, &'static str>, // action name -> deprecation message
+ documentation: HashMap<&'static str, &'static str>, // action name -> documentation
}
impl Default for ActionRegistry {
@@ -225,6 +233,7 @@ impl Default for ActionRegistry {
let mut this = ActionRegistry {
by_name: Default::default(),
names_by_type_id: Default::default(),
+ documentation: Default::default(),
all_names: Default::default(),
deprecated_aliases: Default::default(),
deprecation_messages: Default::default(),
@@ -238,7 +247,7 @@ impl Default for ActionRegistry {
struct ActionData {
pub build: ActionBuilder,
- pub json_schema: fn(&mut schemars::r#gen::SchemaGenerator) -> Option<schemars::schema::Schema>,
+ pub json_schema: fn(&mut schemars::SchemaGenerator) -> Option<schemars::Schema>,
}
/// This type must be public so that our macros can build it in other crates.
@@ -253,9 +262,10 @@ pub struct MacroActionData {
pub name: &'static str,
pub type_id: TypeId,
pub build: ActionBuilder,
- pub json_schema: fn(&mut schemars::r#gen::SchemaGenerator) -> Option<schemars::schema::Schema>,
+ pub json_schema: fn(&mut schemars::SchemaGenerator) -> Option<schemars::Schema>,
pub deprecated_aliases: &'static [&'static str],
pub deprecation_message: Option<&'static str>,
+ pub documentation: Option<&'static str>,
}
inventory::collect!(MacroActionBuilder);
@@ -278,6 +288,7 @@ impl ActionRegistry {
json_schema: A::action_json_schema,
deprecated_aliases: A::deprecated_aliases(),
deprecation_message: A::deprecation_message(),
+ documentation: A::documentation(),
});
}
@@ -318,6 +329,9 @@ impl ActionRegistry {
if let Some(deprecation_msg) = action.deprecation_message {
self.deprecation_messages.insert(name, deprecation_msg);
}
+ if let Some(documentation) = action.documentation {
+ self.documentation.insert(name, documentation);
+ }
}
/// Construct an action based on its name and optional JSON parameters sourced from the keymap.
@@ -357,8 +371,8 @@ impl ActionRegistry {
pub fn action_schemas(
&self,
- generator: &mut schemars::r#gen::SchemaGenerator,
- ) -> Vec<(&'static str, Option<schemars::schema::Schema>)> {
+ generator: &mut schemars::SchemaGenerator,
+ ) -> Vec<(&'static str, Option<schemars::Schema>)> {
// Use the order from all_names so that the resulting schema has sensible order.
self.all_names
.iter()
@@ -379,6 +393,10 @@ impl ActionRegistry {
pub fn deprecation_messages(&self) -> &HashMap<&'static str, &'static str> {
&self.deprecation_messages
}
+
+ pub fn documentation(&self) -> &HashMap<&'static str, &'static str> {
+ &self.documentation
+ }
}
/// Generate a list of all the registered actions.
@@ -1334,6 +1334,11 @@ impl App {
self.pending_effects.push_back(Effect::RefreshWindows);
}
+ /// Get all key bindings in the app.
+ pub fn key_bindings(&self) -> Rc<RefCell<Keymap>> {
+ self.keymap.clone()
+ }
+
/// Register a global listener for actions invoked via the keyboard.
pub fn on_action<A: Action>(&mut self, listener: impl Fn(&A, &mut Self) + 'static) {
self.global_action_listeners
@@ -1388,8 +1393,8 @@ impl App {
/// Get all non-internal actions that have been registered, along with their schemas.
pub fn action_schemas(
&self,
- generator: &mut schemars::r#gen::SchemaGenerator,
- ) -> Vec<(&'static str, Option<schemars::schema::Schema>)> {
+ generator: &mut schemars::SchemaGenerator,
+ ) -> Vec<(&'static str, Option<schemars::Schema>)> {
self.actions.action_schemas(generator)
}
@@ -1398,11 +1403,16 @@ impl App {
self.actions.deprecated_aliases()
}
- /// Get a list of all action deprecation messages.
+ /// Get a map from an action name to the deprecation messages.
pub fn action_deprecation_messages(&self) -> &HashMap<&'static str, &'static str> {
self.actions.deprecation_messages()
}
+ /// Get a map from an action name to the documentation.
+ pub fn action_documentation(&self) -> &HashMap<&'static str, &'static str> {
+ self.actions.documentation()
+ }
+
/// Register a callback to be invoked when the application is about to quit.
/// It is not possible to cancel the quit event at this point.
pub fn on_app_quit<Fut>(
@@ -1,9 +1,10 @@
use anyhow::{Context as _, bail};
-use schemars::{JsonSchema, SchemaGenerator, schema::Schema};
+use schemars::{JsonSchema, json_schema};
use serde::{
Deserialize, Deserializer, Serialize, Serializer,
de::{self, Visitor},
};
+use std::borrow::Cow;
use std::{
fmt::{self, Display, Formatter},
hash::{Hash, Hasher},
@@ -99,22 +100,14 @@ impl Visitor<'_> for RgbaVisitor {
}
impl JsonSchema for Rgba {
- fn schema_name() -> String {
- "Rgba".to_string()
+ fn schema_name() -> Cow<'static, str> {
+ "Rgba".into()
}
- fn json_schema(_generator: &mut SchemaGenerator) -> Schema {
- use schemars::schema::{InstanceType, SchemaObject, StringValidation};
-
- Schema::Object(SchemaObject {
- instance_type: Some(InstanceType::String.into()),
- string: Some(Box::new(StringValidation {
- pattern: Some(
- r"^#([0-9a-fA-F]{3}|[0-9a-fA-F]{4}|[0-9a-fA-F]{6}|[0-9a-fA-F]{8})$".to_string(),
- ),
- ..Default::default()
- })),
- ..Default::default()
+ fn json_schema(_generator: &mut schemars::SchemaGenerator) -> schemars::Schema {
+ json_schema!({
+ "type": "string",
+ "pattern": "^#([0-9a-fA-F]{3}|[0-9a-fA-F]{4}|[0-9a-fA-F]{6}|[0-9a-fA-F]{8})$"
})
}
}
@@ -629,11 +622,11 @@ impl From<Rgba> for Hsla {
}
impl JsonSchema for Hsla {
- fn schema_name() -> String {
+ fn schema_name() -> Cow<'static, str> {
Rgba::schema_name()
}
- fn json_schema(generator: &mut SchemaGenerator) -> Schema {
+ fn json_schema(generator: &mut schemars::SchemaGenerator) -> schemars::Schema {
Rgba::json_schema(generator)
}
}
@@ -613,10 +613,10 @@ pub trait InteractiveElement: Sized {
/// Track the focus state of the given focus handle on this element.
/// If the focus handle is focused by the application, this element will
/// apply its focused styles.
- fn track_focus(mut self, focus_handle: &FocusHandle) -> FocusableWrapper<Self> {
+ fn track_focus(mut self, focus_handle: &FocusHandle) -> Self {
self.interactivity().focusable = true;
self.interactivity().tracked_focus_handle = Some(focus_handle.clone());
- FocusableWrapper { element: self }
+ self
}
/// Set the keymap context for this element. This will be used to determine
@@ -980,15 +980,35 @@ pub trait InteractiveElement: Sized {
self.interactivity().block_mouse_except_scroll();
self
}
+
+ /// Set the given styles to be applied when this element, specifically, is focused.
+ /// Requires that the element is focusable. Elements can be made focusable using [`InteractiveElement::track_focus`].
+ fn focus(mut self, f: impl FnOnce(StyleRefinement) -> StyleRefinement) -> Self
+ where
+ Self: Sized,
+ {
+ self.interactivity().focus_style = Some(Box::new(f(StyleRefinement::default())));
+ self
+ }
+
+ /// Set the given styles to be applied when this element is inside another element that is focused.
+ /// Requires that the element is focusable. Elements can be made focusable using [`InteractiveElement::track_focus`].
+ fn in_focus(mut self, f: impl FnOnce(StyleRefinement) -> StyleRefinement) -> Self
+ where
+ Self: Sized,
+ {
+ self.interactivity().in_focus_style = Some(Box::new(f(StyleRefinement::default())));
+ self
+ }
}
/// A trait for elements that want to use the standard GPUI interactivity features
/// that require state.
pub trait StatefulInteractiveElement: InteractiveElement {
/// Set this element to focusable.
- fn focusable(mut self) -> FocusableWrapper<Self> {
+ fn focusable(mut self) -> Self {
self.interactivity().focusable = true;
- FocusableWrapper { element: self }
+ self
}
/// Set the overflow x and y to scroll.
@@ -1118,27 +1138,6 @@ pub trait StatefulInteractiveElement: InteractiveElement {
}
}
-/// A trait for providing focus related APIs to interactive elements
-pub trait FocusableElement: InteractiveElement {
- /// Set the given styles to be applied when this element, specifically, is focused.
- fn focus(mut self, f: impl FnOnce(StyleRefinement) -> StyleRefinement) -> Self
- where
- Self: Sized,
- {
- self.interactivity().focus_style = Some(Box::new(f(StyleRefinement::default())));
- self
- }
-
- /// Set the given styles to be applied when this element is inside another element that is focused.
- fn in_focus(mut self, f: impl FnOnce(StyleRefinement) -> StyleRefinement) -> Self
- where
- Self: Sized,
- {
- self.interactivity().in_focus_style = Some(Box::new(f(StyleRefinement::default())));
- self
- }
-}
-
pub(crate) type MouseDownListener =
Box<dyn Fn(&MouseDownEvent, DispatchPhase, &Hitbox, &mut Window, &mut App) + 'static>;
pub(crate) type MouseUpListener =
@@ -2777,126 +2776,6 @@ impl GroupHitboxes {
}
}
-/// A wrapper around an element that can be focused.
-pub struct FocusableWrapper<E> {
- /// The element that is focusable
- pub element: E,
-}
-
-impl<E: InteractiveElement> FocusableElement for FocusableWrapper<E> {}
-
-impl<E> InteractiveElement for FocusableWrapper<E>
-where
- E: InteractiveElement,
-{
- fn interactivity(&mut self) -> &mut Interactivity {
- self.element.interactivity()
- }
-}
-
-impl<E: StatefulInteractiveElement> StatefulInteractiveElement for FocusableWrapper<E> {}
-
-impl<E> Styled for FocusableWrapper<E>
-where
- E: Styled,
-{
- fn style(&mut self) -> &mut StyleRefinement {
- self.element.style()
- }
-}
-
-impl FocusableWrapper<Div> {
- /// Add a listener to be called when the children of this `Div` are prepainted.
- /// This allows you to store the [`Bounds`] of the children for later use.
- pub fn on_children_prepainted(
- mut self,
- listener: impl Fn(Vec<Bounds<Pixels>>, &mut Window, &mut App) + 'static,
- ) -> Self {
- self.element = self.element.on_children_prepainted(listener);
- self
- }
-}
-
-impl<E> Element for FocusableWrapper<E>
-where
- E: Element,
-{
- type RequestLayoutState = E::RequestLayoutState;
- type PrepaintState = E::PrepaintState;
-
- fn id(&self) -> Option<ElementId> {
- self.element.id()
- }
-
- fn source_location(&self) -> Option<&'static core::panic::Location<'static>> {
- self.element.source_location()
- }
-
- fn request_layout(
- &mut self,
- id: Option<&GlobalElementId>,
- inspector_id: Option<&InspectorElementId>,
- window: &mut Window,
- cx: &mut App,
- ) -> (LayoutId, Self::RequestLayoutState) {
- self.element.request_layout(id, inspector_id, window, cx)
- }
-
- fn prepaint(
- &mut self,
- id: Option<&GlobalElementId>,
- inspector_id: Option<&InspectorElementId>,
- bounds: Bounds<Pixels>,
- state: &mut Self::RequestLayoutState,
- window: &mut Window,
- cx: &mut App,
- ) -> E::PrepaintState {
- self.element
- .prepaint(id, inspector_id, bounds, state, window, cx)
- }
-
- fn paint(
- &mut self,
- id: Option<&GlobalElementId>,
- inspector_id: Option<&InspectorElementId>,
- bounds: Bounds<Pixels>,
- request_layout: &mut Self::RequestLayoutState,
- prepaint: &mut Self::PrepaintState,
- window: &mut Window,
- cx: &mut App,
- ) {
- self.element.paint(
- id,
- inspector_id,
- bounds,
- request_layout,
- prepaint,
- window,
- cx,
- )
- }
-}
-
-impl<E> IntoElement for FocusableWrapper<E>
-where
- E: IntoElement,
-{
- type Element = E::Element;
-
- fn into_element(self) -> Self::Element {
- self.element.into_element()
- }
-}
-
-impl<E> ParentElement for FocusableWrapper<E>
-where
- E: ParentElement,
-{
- fn extend(&mut self, elements: impl IntoIterator<Item = AnyElement>) {
- self.element.extend(elements)
- }
-}
-
/// A wrapper around an element that can store state, produced after assigning an ElementId.
pub struct Stateful<E> {
pub(crate) element: E,
@@ -2927,8 +2806,6 @@ where
}
}
-impl<E: FocusableElement> FocusableElement for Stateful<E> {}
-
impl<E> Element for Stateful<E>
where
E: Element,
@@ -25,7 +25,7 @@ use std::{
use thiserror::Error;
use util::ResultExt;
-use super::{FocusableElement, Stateful, StatefulInteractiveElement};
+use super::{Stateful, StatefulInteractiveElement};
/// The delay before showing the loading state.
pub const LOADING_DELAY: Duration = Duration::from_millis(200);
@@ -509,8 +509,6 @@ impl IntoElement for Img {
}
}
-impl FocusableElement for Img {}
-
impl StatefulInteractiveElement for Img {}
impl ImageSource {
@@ -10,8 +10,8 @@
use crate::{
AnyElement, App, AvailableSpace, Bounds, ContentMask, DispatchPhase, Edges, Element, EntityId,
FocusHandle, GlobalElementId, Hitbox, HitboxBehavior, InspectorElementId, IntoElement,
- Overflow, Pixels, Point, ScrollWheelEvent, Size, Style, StyleRefinement, Styled, Window, point,
- px, size,
+ Overflow, Pixels, Point, ScrollDelta, ScrollWheelEvent, Size, Style, StyleRefinement, Styled,
+ Window, point, px, size,
};
use collections::VecDeque;
use refineable::Refineable as _;
@@ -291,6 +291,31 @@ impl ListState {
self.0.borrow().logical_scroll_top()
}
+ /// Scroll the list by the given offset
+ pub fn scroll_by(&self, distance: Pixels) {
+ if distance == px(0.) {
+ return;
+ }
+
+ let current_offset = self.logical_scroll_top();
+ let state = &mut *self.0.borrow_mut();
+ let mut cursor = state.items.cursor::<ListItemSummary>(&());
+ cursor.seek(&Count(current_offset.item_ix), Bias::Right, &());
+
+ let start_pixel_offset = cursor.start().height + current_offset.offset_in_item;
+ let new_pixel_offset = (start_pixel_offset + distance).max(px(0.));
+ if new_pixel_offset > start_pixel_offset {
+ cursor.seek_forward(&Height(new_pixel_offset), Bias::Right, &());
+ } else {
+ cursor.seek(&Height(new_pixel_offset), Bias::Right, &());
+ }
+
+ state.logical_scroll_top = Some(ListOffset {
+ item_ix: cursor.start().count,
+ offset_in_item: new_pixel_offset - cursor.start().height,
+ });
+ }
+
/// Scroll the list to the given offset
pub fn scroll_to(&self, mut scroll_top: ListOffset) {
let state = &mut *self.0.borrow_mut();
@@ -962,12 +987,15 @@ impl Element for List {
let height = bounds.size.height;
let scroll_top = prepaint.layout.scroll_top;
let hitbox_id = prepaint.hitbox.id;
+ let mut accumulated_scroll_delta = ScrollDelta::default();
window.on_mouse_event(move |event: &ScrollWheelEvent, phase, window, cx| {
if phase == DispatchPhase::Bubble && hitbox_id.should_handle_scroll(window) {
+ accumulated_scroll_delta = accumulated_scroll_delta.coalesce(event.delta);
+ let pixel_delta = accumulated_scroll_delta.pixel_delta(px(20.));
list_state.0.borrow_mut().scroll(
&scroll_top,
height,
- event.delta.pixel_delta(px(20.)),
+ pixel_delta,
current_view,
window,
cx,
@@ -1116,4 +1144,52 @@ mod test {
assert_eq!(state.logical_scroll_top().item_ix, 0);
assert_eq!(state.logical_scroll_top().offset_in_item, px(0.));
}
+
+ #[gpui::test]
+ fn test_scroll_by_positive_and_negative_distance(cx: &mut TestAppContext) {
+ use crate::{
+ AppContext, Context, Element, IntoElement, ListState, Render, Styled, Window, div,
+ list, point, px, size,
+ };
+
+ let cx = cx.add_empty_window();
+
+ let state = ListState::new(5, crate::ListAlignment::Top, px(10.), |_, _, _| {
+ div().h(px(20.)).w_full().into_any()
+ });
+
+ struct TestView(ListState);
+ impl Render for TestView {
+ fn render(&mut self, _: &mut Window, _: &mut Context<Self>) -> impl IntoElement {
+ list(self.0.clone()).w_full().h_full()
+ }
+ }
+
+ // Paint
+ cx.draw(point(px(0.), px(0.)), size(px(100.), px(100.)), |_, cx| {
+ cx.new(|_| TestView(state.clone()))
+ });
+
+ // Test positive distance: start at item 1, move down 30px
+ state.scroll_by(px(30.));
+
+ // Should move to item 2
+ let offset = state.logical_scroll_top();
+ assert_eq!(offset.item_ix, 1);
+ assert_eq!(offset.offset_in_item, px(10.));
+
+ // Test negative distance: start at item 2, move up 30px
+ state.scroll_by(px(-30.));
+
+ // Should move back to item 1
+ let offset = state.logical_scroll_top();
+ assert_eq!(offset.item_ix, 0);
+ assert_eq!(offset.offset_in_item, px(0.));
+
+ // Test zero distance
+ state.scroll_by(px(0.));
+ let offset = state.logical_scroll_top();
+ assert_eq!(offset.item_ix, 0);
+ assert_eq!(offset.offset_in_item, px(0.));
+ }
}
@@ -7,8 +7,8 @@
use crate::{
AnyElement, App, AvailableSpace, Bounds, ContentMask, Element, ElementId, GlobalElementId,
Hitbox, InspectorElementId, InteractiveElement, Interactivity, IntoElement, IsZero, LayoutId,
- ListSizingBehavior, Overflow, Pixels, ScrollHandle, Size, StyleRefinement, Styled, Window,
- point, size,
+ ListSizingBehavior, Overflow, Pixels, Point, ScrollHandle, Size, StyleRefinement, Styled,
+ Window, point, size,
};
use smallvec::SmallVec;
use std::{cell::RefCell, cmp, ops::Range, rc::Rc};
@@ -42,6 +42,7 @@ where
item_count,
item_to_measure_index: 0,
render_items: Box::new(render_range),
+ top_slot: None,
decorations: Vec::new(),
interactivity: Interactivity {
element_id: Some(id),
@@ -61,6 +62,7 @@ pub struct UniformList {
render_items: Box<
dyn for<'a> Fn(Range<usize>, &'a mut Window, &'a mut App) -> SmallVec<[AnyElement; 64]>,
>,
+ top_slot: Option<Box<dyn UniformListTopSlot>>,
decorations: Vec<Box<dyn UniformListDecoration>>,
interactivity: Interactivity,
scroll_handle: Option<UniformListScrollHandle>,
@@ -71,6 +73,7 @@ pub struct UniformList {
/// Frame state used by the [UniformList].
pub struct UniformListFrameState {
items: SmallVec<[AnyElement; 32]>,
+ top_slot_items: SmallVec<[AnyElement; 8]>,
decorations: SmallVec<[AnyElement; 1]>,
}
@@ -88,6 +91,8 @@ pub enum ScrollStrategy {
/// May not be possible if there's not enough list items above the item scrolled to:
/// in this case, the element will be placed at the closest possible position.
Center,
+ /// Scrolls the element to be at the given item index from the top of the viewport.
+ ToPosition(usize),
}
#[derive(Clone, Debug, Default)]
@@ -212,6 +217,7 @@ impl Element for UniformList {
UniformListFrameState {
items: SmallVec::new(),
decorations: SmallVec::new(),
+ top_slot_items: SmallVec::new(),
},
)
}
@@ -345,6 +351,15 @@ impl Element for UniformList {
}
}
}
+ ScrollStrategy::ToPosition(sticky_index) => {
+ let target_y_in_viewport = item_height * sticky_index;
+ let target_scroll_top = item_top - target_y_in_viewport;
+ let max_scroll_top =
+ (content_height - list_height).max(Pixels::ZERO);
+ let new_scroll_top =
+ target_scroll_top.clamp(Pixels::ZERO, max_scroll_top);
+ updated_scroll_offset.y = -new_scroll_top;
+ }
}
scroll_offset = *updated_scroll_offset
}
@@ -354,7 +369,17 @@ impl Element for UniformList {
let last_visible_element_ix = ((-scroll_offset.y + padded_bounds.size.height)
/ item_height)
.ceil() as usize;
- let visible_range = first_visible_element_ix
+ let initial_range = first_visible_element_ix
+ ..cmp::min(last_visible_element_ix, self.item_count);
+
+ let mut top_slot_elements = if let Some(ref mut top_slot) = self.top_slot {
+ top_slot.compute(initial_range, window, cx)
+ } else {
+ SmallVec::new()
+ };
+ let top_slot_offset = top_slot_elements.len();
+
+ let visible_range = (top_slot_offset + first_visible_element_ix)
..cmp::min(last_visible_element_ix, self.item_count);
let items = if y_flipped {
@@ -393,6 +418,20 @@ impl Element for UniformList {
frame_state.items.push(item);
}
+ if let Some(ref top_slot) = self.top_slot {
+ top_slot.prepaint(
+ &mut top_slot_elements,
+ padded_bounds,
+ item_height,
+ scroll_offset,
+ padding,
+ can_scroll_horizontally,
+ window,
+ cx,
+ );
+ }
+ frame_state.top_slot_items = top_slot_elements;
+
let bounds = Bounds::new(
padded_bounds.origin
+ point(
@@ -454,6 +493,9 @@ impl Element for UniformList {
for decoration in &mut request_layout.decorations {
decoration.paint(window, cx);
}
+ if let Some(ref top_slot) = self.top_slot {
+ top_slot.paint(&mut request_layout.top_slot_items, window, cx);
+ }
},
)
}
@@ -483,6 +525,35 @@ pub trait UniformListDecoration {
) -> AnyElement;
}
+/// A trait for implementing top slots in a [`UniformList`].
+/// Top slots are elements that appear at the top of the list and can adjust
+/// the visible range of list items.
+pub trait UniformListTopSlot {
+ /// Returns elements to render at the top slot for the given visible range.
+ fn compute(
+ &mut self,
+ visible_range: Range<usize>,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> SmallVec<[AnyElement; 8]>;
+
+ /// Layout and prepaint the top slot elements.
+ fn prepaint(
+ &self,
+ elements: &mut SmallVec<[AnyElement; 8]>,
+ bounds: Bounds<Pixels>,
+ item_height: Pixels,
+ scroll_offset: Point<Pixels>,
+ padding: crate::Edges<Pixels>,
+ can_scroll_horizontally: bool,
+ window: &mut Window,
+ cx: &mut App,
+ );
+
+ /// Paint the top slot elements.
+ fn paint(&self, elements: &mut SmallVec<[AnyElement; 8]>, window: &mut Window, cx: &mut App);
+}
+
impl UniformList {
/// Selects a specific list item for measurement.
pub fn with_width_from_item(mut self, item_index: Option<usize>) -> Self {
@@ -521,6 +592,12 @@ impl UniformList {
self
}
+ /// Sets a top slot for the list.
+ pub fn with_top_slot(mut self, top_slot: impl UniformListTopSlot + 'static) -> Self {
+ self.top_slot = Some(Box::new(top_slot));
+ self
+ }
+
fn measure_item(
&self,
list_width: Option<Pixels>,
@@ -6,8 +6,9 @@ use anyhow::{Context as _, anyhow};
use core::fmt::Debug;
use derive_more::{Add, AddAssign, Div, DivAssign, Mul, Neg, Sub, SubAssign};
use refineable::Refineable;
-use schemars::{JsonSchema, SchemaGenerator, schema::Schema};
+use schemars::{JsonSchema, json_schema};
use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
+use std::borrow::Cow;
use std::{
cmp::{self, PartialOrd},
fmt::{self, Display},
@@ -3229,20 +3230,14 @@ impl TryFrom<&'_ str> for AbsoluteLength {
}
impl JsonSchema for AbsoluteLength {
- fn schema_name() -> String {
- "AbsoluteLength".to_string()
+ fn schema_name() -> Cow<'static, str> {
+ "AbsoluteLength".into()
}
- fn json_schema(_generator: &mut SchemaGenerator) -> Schema {
- use schemars::schema::{InstanceType, SchemaObject, StringValidation};
-
- Schema::Object(SchemaObject {
- instance_type: Some(InstanceType::String.into()),
- string: Some(Box::new(StringValidation {
- pattern: Some(r"^-?\d+(\.\d+)?(px|rem)$".to_string()),
- ..Default::default()
- })),
- ..Default::default()
+ fn json_schema(_generator: &mut schemars::SchemaGenerator) -> schemars::Schema {
+ json_schema!({
+ "type": "string",
+ "pattern": r"^-?\d+(\.\d+)?(px|rem)$"
})
}
}
@@ -3366,20 +3361,14 @@ impl TryFrom<&'_ str> for DefiniteLength {
}
impl JsonSchema for DefiniteLength {
- fn schema_name() -> String {
- "DefiniteLength".to_string()
+ fn schema_name() -> Cow<'static, str> {
+ "DefiniteLength".into()
}
- fn json_schema(_generator: &mut SchemaGenerator) -> Schema {
- use schemars::schema::{InstanceType, SchemaObject, StringValidation};
-
- Schema::Object(SchemaObject {
- instance_type: Some(InstanceType::String.into()),
- string: Some(Box::new(StringValidation {
- pattern: Some(r"^-?\d+(\.\d+)?(px|rem|%)$".to_string()),
- ..Default::default()
- })),
- ..Default::default()
+ fn json_schema(_generator: &mut schemars::SchemaGenerator) -> schemars::Schema {
+ json_schema!({
+ "type": "string",
+ "pattern": r"^-?\d+(\.\d+)?(px|rem|%)$"
})
}
}
@@ -3480,20 +3469,14 @@ impl TryFrom<&'_ str> for Length {
}
impl JsonSchema for Length {
- fn schema_name() -> String {
- "Length".to_string()
+ fn schema_name() -> Cow<'static, str> {
+ "Length".into()
}
- fn json_schema(_generator: &mut SchemaGenerator) -> Schema {
- use schemars::schema::{InstanceType, SchemaObject, StringValidation};
-
- Schema::Object(SchemaObject {
- instance_type: Some(InstanceType::String.into()),
- string: Some(Box::new(StringValidation {
- pattern: Some(r"^(auto|-?\d+(\.\d+)?(px|rem|%))$".to_string()),
- ..Default::default()
- })),
- ..Default::default()
+ fn json_schema(_generator: &mut schemars::SchemaGenerator) -> schemars::Schema {
+ json_schema!({
+ "type": "string",
+ "pattern": r"^(auto|-?\d+(\.\d+)?(px|rem|%))$"
})
}
}
@@ -2,7 +2,7 @@ use std::rc::Rc;
use collections::HashMap;
-use crate::{Action, InvalidKeystrokeError, KeyBindingContextPredicate, Keystroke};
+use crate::{Action, InvalidKeystrokeError, KeyBindingContextPredicate, Keystroke, SharedString};
use smallvec::SmallVec;
/// A keybinding and its associated metadata, from the keymap.
@@ -11,6 +11,8 @@ pub struct KeyBinding {
pub(crate) keystrokes: SmallVec<[Keystroke; 2]>,
pub(crate) context_predicate: Option<Rc<KeyBindingContextPredicate>>,
pub(crate) meta: Option<KeyBindingMetaIndex>,
+ /// The json input string used when building the keybinding, if any
+ pub(crate) action_input: Option<SharedString>,
}
impl Clone for KeyBinding {
@@ -20,6 +22,7 @@ impl Clone for KeyBinding {
keystrokes: self.keystrokes.clone(),
context_predicate: self.context_predicate.clone(),
meta: self.meta,
+ action_input: self.action_input.clone(),
}
}
}
@@ -32,7 +35,7 @@ impl KeyBinding {
} else {
None
};
- Self::load(keystrokes, Box::new(action), context_predicate, None).unwrap()
+ Self::load(keystrokes, Box::new(action), context_predicate, None, None).unwrap()
}
/// Load a keybinding from the given raw data.
@@ -41,6 +44,7 @@ impl KeyBinding {
action: Box<dyn Action>,
context_predicate: Option<Rc<KeyBindingContextPredicate>>,
key_equivalents: Option<&HashMap<char, char>>,
+ action_input: Option<SharedString>,
) -> std::result::Result<Self, InvalidKeystrokeError> {
let mut keystrokes: SmallVec<[Keystroke; 2]> = keystrokes
.split_whitespace()
@@ -62,6 +66,7 @@ impl KeyBinding {
action,
context_predicate,
meta: None,
+ action_input,
})
}
@@ -110,6 +115,11 @@ impl KeyBinding {
pub fn meta(&self) -> Option<KeyBindingMetaIndex> {
self.meta
}
+
+ /// Get the action input associated with the action for this binding
+ pub fn action_input(&self) -> Option<SharedString> {
+ self.action_input.clone()
+ }
}
impl std::fmt::Debug for KeyBinding {
@@ -336,10 +336,7 @@ impl PathBuilder {
let v1 = buf.vertices[i1];
let v2 = buf.vertices[i2];
- path.push_triangle(
- (v0.into(), v1.into(), v2.into()),
- (point(0., 1.), point(0., 1.), point(0., 1.)),
- );
+ path.push_triangle((v0.into(), v1.into(), v2.into()));
}
path
@@ -25,6 +25,7 @@ mod test;
mod windows;
#[cfg(all(
+ feature = "screen-capture",
any(target_os = "linux", target_os = "freebsd"),
any(feature = "wayland", feature = "x11"),
))]
@@ -151,7 +152,7 @@ pub fn guess_compositor() -> &'static str {
pub(crate) fn current_platform(_headless: bool) -> Rc<dyn Platform> {
Rc::new(
WindowsPlatform::new()
- .inspect_err(|err| show_error("Error: Zed failed to launch", err.to_string()))
+ .inspect_err(|err| show_error("Failed to launch", err.to_string()))
.unwrap(),
)
}
@@ -176,10 +177,28 @@ pub(crate) trait Platform: 'static {
None
}
+ #[cfg(feature = "screen-capture")]
fn is_screen_capture_supported(&self) -> bool;
+ #[cfg(not(feature = "screen-capture"))]
+ fn is_screen_capture_supported(&self) -> bool {
+ false
+ }
+ #[cfg(feature = "screen-capture")]
fn screen_capture_sources(
&self,
) -> oneshot::Receiver<Result<Vec<Box<dyn ScreenCaptureSource>>>>;
+ #[cfg(not(feature = "screen-capture"))]
+ fn screen_capture_sources(
+ &self,
+ ) -> oneshot::Receiver<anyhow::Result<Vec<Box<dyn ScreenCaptureSource>>>> {
+ let (sources_tx, sources_rx) = oneshot::channel();
+ sources_tx
+ .send(Err(anyhow::anyhow!(
+ "gpui was compiled without the screen-capture feature"
+ )))
+ .ok();
+ sources_rx
+ }
fn open_window(
&self,
@@ -770,7 +789,6 @@ pub(crate) struct AtlasTextureId {
pub(crate) enum AtlasTextureKind {
Monochrome = 0,
Polychrome = 1,
- Path = 2,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
@@ -10,8 +10,6 @@ use etagere::BucketedAtlasAllocator;
use parking_lot::Mutex;
use std::{borrow::Cow, ops, sync::Arc};
-pub(crate) const PATH_TEXTURE_FORMAT: gpu::TextureFormat = gpu::TextureFormat::R16Float;
-
pub(crate) struct BladeAtlas(Mutex<BladeAtlasState>);
struct PendingUpload {
@@ -27,7 +25,6 @@ struct BladeAtlasState {
tiles_by_key: FxHashMap<AtlasKey, AtlasTile>,
initializations: Vec<AtlasTextureId>,
uploads: Vec<PendingUpload>,
- path_sample_count: u32,
}
#[cfg(gles)]
@@ -41,13 +38,13 @@ impl BladeAtlasState {
}
pub struct BladeTextureInfo {
+ #[allow(dead_code)]
pub size: gpu::Extent,
pub raw_view: gpu::TextureView,
- pub msaa_view: Option<gpu::TextureView>,
}
impl BladeAtlas {
- pub(crate) fn new(gpu: &Arc<gpu::Context>, path_sample_count: u32) -> Self {
+ pub(crate) fn new(gpu: &Arc<gpu::Context>) -> Self {
BladeAtlas(Mutex::new(BladeAtlasState {
gpu: Arc::clone(gpu),
upload_belt: BufferBelt::new(BufferBeltDescriptor {
@@ -59,7 +56,6 @@ impl BladeAtlas {
tiles_by_key: Default::default(),
initializations: Vec::new(),
uploads: Vec::new(),
- path_sample_count,
}))
}
@@ -67,6 +63,7 @@ impl BladeAtlas {
self.0.lock().destroy();
}
+ #[allow(dead_code)]
pub(crate) fn clear_textures(&self, texture_kind: AtlasTextureKind) {
let mut lock = self.0.lock();
let textures = &mut lock.storage[texture_kind];
@@ -75,19 +72,6 @@ impl BladeAtlas {
}
}
- /// Allocate a rectangle and make it available for rendering immediately (without waiting for `before_frame`)
- pub fn allocate_for_rendering(
- &self,
- size: Size<DevicePixels>,
- texture_kind: AtlasTextureKind,
- gpu_encoder: &mut gpu::CommandEncoder,
- ) -> AtlasTile {
- let mut lock = self.0.lock();
- let tile = lock.allocate(size, texture_kind);
- lock.flush_initializations(gpu_encoder);
- tile
- }
-
pub fn before_frame(&self, gpu_encoder: &mut gpu::CommandEncoder) {
let mut lock = self.0.lock();
lock.flush(gpu_encoder);
@@ -109,7 +93,6 @@ impl BladeAtlas {
depth: 1,
},
raw_view: texture.raw_view,
- msaa_view: texture.msaa_view,
}
}
}
@@ -200,48 +183,8 @@ impl BladeAtlasState {
format = gpu::TextureFormat::Bgra8UnormSrgb;
usage = gpu::TextureUsage::COPY | gpu::TextureUsage::RESOURCE;
}
- AtlasTextureKind::Path => {
- format = PATH_TEXTURE_FORMAT;
- usage = gpu::TextureUsage::COPY
- | gpu::TextureUsage::RESOURCE
- | gpu::TextureUsage::TARGET;
- }
}
- // We currently only enable MSAA for path textures.
- let (msaa, msaa_view) = if self.path_sample_count > 1 && kind == AtlasTextureKind::Path {
- let msaa = self.gpu.create_texture(gpu::TextureDesc {
- name: "msaa path texture",
- format,
- size: gpu::Extent {
- width: size.width.into(),
- height: size.height.into(),
- depth: 1,
- },
- array_layer_count: 1,
- mip_level_count: 1,
- sample_count: self.path_sample_count,
- dimension: gpu::TextureDimension::D2,
- usage: gpu::TextureUsage::TARGET,
- external: None,
- });
-
- (
- Some(msaa),
- Some(self.gpu.create_texture_view(
- msaa,
- gpu::TextureViewDesc {
- name: "msaa texture view",
- format,
- dimension: gpu::ViewDimension::D2,
- subresources: &Default::default(),
- },
- )),
- )
- } else {
- (None, None)
- };
-
let raw = self.gpu.create_texture(gpu::TextureDesc {
name: "atlas",
format,
@@ -279,8 +222,6 @@ impl BladeAtlasState {
format,
raw,
raw_view,
- msaa,
- msaa_view,
live_atlas_keys: 0,
};
@@ -340,7 +281,6 @@ impl BladeAtlasState {
struct BladeAtlasStorage {
monochrome_textures: AtlasTextureList<BladeAtlasTexture>,
polychrome_textures: AtlasTextureList<BladeAtlasTexture>,
- path_textures: AtlasTextureList<BladeAtlasTexture>,
}
impl ops::Index<AtlasTextureKind> for BladeAtlasStorage {
@@ -349,7 +289,6 @@ impl ops::Index<AtlasTextureKind> for BladeAtlasStorage {
match kind {
crate::AtlasTextureKind::Monochrome => &self.monochrome_textures,
crate::AtlasTextureKind::Polychrome => &self.polychrome_textures,
- crate::AtlasTextureKind::Path => &self.path_textures,
}
}
}
@@ -359,7 +298,6 @@ impl ops::IndexMut<AtlasTextureKind> for BladeAtlasStorage {
match kind {
crate::AtlasTextureKind::Monochrome => &mut self.monochrome_textures,
crate::AtlasTextureKind::Polychrome => &mut self.polychrome_textures,
- crate::AtlasTextureKind::Path => &mut self.path_textures,
}
}
}
@@ -370,7 +308,6 @@ impl ops::Index<AtlasTextureId> for BladeAtlasStorage {
let textures = match id.kind {
crate::AtlasTextureKind::Monochrome => &self.monochrome_textures,
crate::AtlasTextureKind::Polychrome => &self.polychrome_textures,
- crate::AtlasTextureKind::Path => &self.path_textures,
};
textures[id.index as usize].as_ref().unwrap()
}
@@ -384,9 +321,6 @@ impl BladeAtlasStorage {
for mut texture in self.polychrome_textures.drain().flatten() {
texture.destroy(gpu);
}
- for mut texture in self.path_textures.drain().flatten() {
- texture.destroy(gpu);
- }
}
}
@@ -395,8 +329,6 @@ struct BladeAtlasTexture {
allocator: BucketedAtlasAllocator,
raw: gpu::Texture,
raw_view: gpu::TextureView,
- msaa: Option<gpu::Texture>,
- msaa_view: Option<gpu::TextureView>,
format: gpu::TextureFormat,
live_atlas_keys: u32,
}
@@ -424,12 +356,6 @@ impl BladeAtlasTexture {
fn destroy(&mut self, gpu: &gpu::Context) {
gpu.destroy_texture(self.raw);
gpu.destroy_texture_view(self.raw_view);
- if let Some(msaa) = self.msaa {
- gpu.destroy_texture(msaa);
- }
- if let Some(msaa_view) = self.msaa_view {
- gpu.destroy_texture_view(msaa_view);
- }
}
fn bytes_per_pixel(&self) -> u8 {
@@ -1,24 +1,19 @@
// Doing `if let` gives you nice scoping with passes/encoders
#![allow(irrefutable_let_patterns)]
-use super::{BladeAtlas, BladeContext, PATH_TEXTURE_FORMAT};
+use super::{BladeAtlas, BladeContext};
use crate::{
- AtlasTextureKind, AtlasTile, Background, Bounds, ContentMask, DevicePixels, GpuSpecs,
- MonochromeSprite, Path, PathId, PathVertex, PolychromeSprite, PrimitiveBatch, Quad,
- ScaledPixels, Scene, Shadow, Size, Underline,
+ Background, Bounds, ContentMask, DevicePixels, GpuSpecs, MonochromeSprite, PathVertex,
+ PolychromeSprite, PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size, Underline,
};
-use blade_graphics as gpu;
+use blade_graphics::{self as gpu};
use blade_util::{BufferBelt, BufferBeltDescriptor};
use bytemuck::{Pod, Zeroable};
-use collections::HashMap;
#[cfg(target_os = "macos")]
use media::core_video::CVMetalTextureCache;
use std::{mem, sync::Arc};
const MAX_FRAME_TIME_MS: u32 = 10000;
-// Use 4x MSAA, all devices support it.
-// https://developer.apple.com/documentation/metal/mtldevice/1433355-supportstexturesamplecount
-const DEFAULT_PATH_SAMPLE_COUNT: u32 = 4;
#[repr(C)]
#[derive(Clone, Copy, Pod, Zeroable)]
@@ -65,17 +60,10 @@ struct ShaderShadowsData {
b_shadows: gpu::BufferPiece,
}
-#[derive(blade_macros::ShaderData)]
-struct ShaderPathRasterizationData {
- globals: GlobalParams,
- b_path_vertices: gpu::BufferPiece,
-}
-
#[derive(blade_macros::ShaderData)]
struct ShaderPathsData {
globals: GlobalParams,
- t_sprite: gpu::TextureView,
- s_sprite: gpu::Sampler,
+ b_path_vertices: gpu::BufferPiece,
b_path_sprites: gpu::BufferPiece,
}
@@ -115,13 +103,27 @@ struct ShaderSurfacesData {
struct PathSprite {
bounds: Bounds<ScaledPixels>,
color: Background,
- tile: AtlasTile,
+}
+
+/// Argument buffer layout for `draw_indirect` commands.
+#[repr(C)]
+#[derive(Copy, Clone, Debug, Default, Pod, Zeroable)]
+pub struct DrawIndirectArgs {
+ /// The number of vertices to draw.
+ pub vertex_count: u32,
+ /// The number of instances to draw.
+ pub instance_count: u32,
+ /// The Index of the first vertex to draw.
+ pub first_vertex: u32,
+ /// The instance ID of the first instance to draw.
+ ///
+ /// Has to be 0, unless [`Features::INDIRECT_FIRST_INSTANCE`](crate::Features::INDIRECT_FIRST_INSTANCE) is enabled.
+ pub first_instance: u32,
}
struct BladePipelines {
quads: gpu::RenderPipeline,
shadows: gpu::RenderPipeline,
- path_rasterization: gpu::RenderPipeline,
paths: gpu::RenderPipeline,
underlines: gpu::RenderPipeline,
mono_sprites: gpu::RenderPipeline,
@@ -130,7 +132,7 @@ struct BladePipelines {
}
impl BladePipelines {
- fn new(gpu: &gpu::Context, surface_info: gpu::SurfaceInfo, path_sample_count: u32) -> Self {
+ fn new(gpu: &gpu::Context, surface_info: gpu::SurfaceInfo, sample_count: u32) -> Self {
use gpu::ShaderData as _;
log::info!(
@@ -178,7 +180,10 @@ impl BladePipelines {
depth_stencil: None,
fragment: Some(shader.at("fs_quad")),
color_targets,
- multisample_state: gpu::MultisampleState::default(),
+ multisample_state: gpu::MultisampleState {
+ sample_count,
+ ..Default::default()
+ },
}),
shadows: gpu.create_render_pipeline(gpu::RenderPipelineDesc {
name: "shadows",
@@ -192,26 +197,8 @@ impl BladePipelines {
depth_stencil: None,
fragment: Some(shader.at("fs_shadow")),
color_targets,
- multisample_state: gpu::MultisampleState::default(),
- }),
- path_rasterization: gpu.create_render_pipeline(gpu::RenderPipelineDesc {
- name: "path_rasterization",
- data_layouts: &[&ShaderPathRasterizationData::layout()],
- vertex: shader.at("vs_path_rasterization"),
- vertex_fetches: &[],
- primitive: gpu::PrimitiveState {
- topology: gpu::PrimitiveTopology::TriangleList,
- ..Default::default()
- },
- depth_stencil: None,
- fragment: Some(shader.at("fs_path_rasterization")),
- color_targets: &[gpu::ColorTargetState {
- format: PATH_TEXTURE_FORMAT,
- blend: Some(gpu::BlendState::ADDITIVE),
- write_mask: gpu::ColorWrites::default(),
- }],
multisample_state: gpu::MultisampleState {
- sample_count: path_sample_count,
+ sample_count,
..Default::default()
},
}),
@@ -221,13 +208,16 @@ impl BladePipelines {
vertex: shader.at("vs_path"),
vertex_fetches: &[],
primitive: gpu::PrimitiveState {
- topology: gpu::PrimitiveTopology::TriangleStrip,
+ topology: gpu::PrimitiveTopology::TriangleList,
..Default::default()
},
depth_stencil: None,
fragment: Some(shader.at("fs_path")),
color_targets,
- multisample_state: gpu::MultisampleState::default(),
+ multisample_state: gpu::MultisampleState {
+ sample_count,
+ ..Default::default()
+ },
}),
underlines: gpu.create_render_pipeline(gpu::RenderPipelineDesc {
name: "underlines",
@@ -241,7 +231,10 @@ impl BladePipelines {
depth_stencil: None,
fragment: Some(shader.at("fs_underline")),
color_targets,
- multisample_state: gpu::MultisampleState::default(),
+ multisample_state: gpu::MultisampleState {
+ sample_count,
+ ..Default::default()
+ },
}),
mono_sprites: gpu.create_render_pipeline(gpu::RenderPipelineDesc {
name: "mono-sprites",
@@ -255,7 +248,10 @@ impl BladePipelines {
depth_stencil: None,
fragment: Some(shader.at("fs_mono_sprite")),
color_targets,
- multisample_state: gpu::MultisampleState::default(),
+ multisample_state: gpu::MultisampleState {
+ sample_count,
+ ..Default::default()
+ },
}),
poly_sprites: gpu.create_render_pipeline(gpu::RenderPipelineDesc {
name: "poly-sprites",
@@ -269,7 +265,10 @@ impl BladePipelines {
depth_stencil: None,
fragment: Some(shader.at("fs_poly_sprite")),
color_targets,
- multisample_state: gpu::MultisampleState::default(),
+ multisample_state: gpu::MultisampleState {
+ sample_count,
+ ..Default::default()
+ },
}),
surfaces: gpu.create_render_pipeline(gpu::RenderPipelineDesc {
name: "surfaces",
@@ -283,7 +282,10 @@ impl BladePipelines {
depth_stencil: None,
fragment: Some(shader.at("fs_surface")),
color_targets,
- multisample_state: gpu::MultisampleState::default(),
+ multisample_state: gpu::MultisampleState {
+ sample_count,
+ ..Default::default()
+ },
}),
}
}
@@ -291,7 +293,6 @@ impl BladePipelines {
fn destroy(&mut self, gpu: &gpu::Context) {
gpu.destroy_render_pipeline(&mut self.quads);
gpu.destroy_render_pipeline(&mut self.shadows);
- gpu.destroy_render_pipeline(&mut self.path_rasterization);
gpu.destroy_render_pipeline(&mut self.paths);
gpu.destroy_render_pipeline(&mut self.underlines);
gpu.destroy_render_pipeline(&mut self.mono_sprites);
@@ -317,12 +318,13 @@ pub struct BladeRenderer {
last_sync_point: Option<gpu::SyncPoint>,
pipelines: BladePipelines,
instance_belt: BufferBelt,
- path_tiles: HashMap<PathId, AtlasTile>,
atlas: Arc<BladeAtlas>,
atlas_sampler: gpu::Sampler,
#[cfg(target_os = "macos")]
core_video_texture_cache: CVMetalTextureCache,
- path_sample_count: u32,
+ sample_count: u32,
+ texture_msaa: Option<gpu::Texture>,
+ texture_view_msaa: Option<gpu::TextureView>,
}
impl BladeRenderer {
@@ -331,6 +333,18 @@ impl BladeRenderer {
window: &I,
config: BladeSurfaceConfig,
) -> anyhow::Result<Self> {
+ // workaround for https://github.com/zed-industries/zed/issues/26143
+ let sample_count = std::env::var("ZED_SAMPLE_COUNT")
+ .ok()
+ .or_else(|| std::env::var("ZED_PATH_SAMPLE_COUNT").ok())
+ .and_then(|v| v.parse().ok())
+ .or_else(|| {
+ [4, 2, 1]
+ .into_iter()
+ .find(|count| context.gpu.supports_texture_sample_count(*count))
+ })
+ .unwrap_or(1);
+
let surface_config = gpu::SurfaceConfig {
size: config.size,
usage: gpu::TextureUsage::TARGET,
@@ -344,22 +358,27 @@ impl BladeRenderer {
.create_surface_configured(window, surface_config)
.map_err(|err| anyhow::anyhow!("Failed to create surface: {err:?}"))?;
+ let (texture_msaa, texture_view_msaa) = create_msaa_texture_if_needed(
+ &context.gpu,
+ surface.info().format,
+ config.size.width,
+ config.size.height,
+ sample_count,
+ )
+ .unzip();
+
let command_encoder = context.gpu.create_command_encoder(gpu::CommandEncoderDesc {
name: "main",
buffer_count: 2,
});
- // workaround for https://github.com/zed-industries/zed/issues/26143
- let path_sample_count = std::env::var("ZED_PATH_SAMPLE_COUNT")
- .ok()
- .and_then(|v| v.parse().ok())
- .unwrap_or(DEFAULT_PATH_SAMPLE_COUNT);
- let pipelines = BladePipelines::new(&context.gpu, surface.info(), path_sample_count);
+
+ let pipelines = BladePipelines::new(&context.gpu, surface.info(), sample_count);
let instance_belt = BufferBelt::new(BufferBeltDescriptor {
memory: gpu::Memory::Shared,
min_chunk_size: 0x1000,
alignment: 0x40, // Vulkan `minStorageBufferOffsetAlignment` on Intel Xe
});
- let atlas = Arc::new(BladeAtlas::new(&context.gpu, path_sample_count));
+ let atlas = Arc::new(BladeAtlas::new(&context.gpu));
let atlas_sampler = context.gpu.create_sampler(gpu::SamplerDesc {
name: "atlas",
mag_filter: gpu::FilterMode::Linear,
@@ -383,12 +402,13 @@ impl BladeRenderer {
last_sync_point: None,
pipelines,
instance_belt,
- path_tiles: HashMap::default(),
atlas,
atlas_sampler,
#[cfg(target_os = "macos")]
core_video_texture_cache,
- path_sample_count,
+ sample_count,
+ texture_msaa,
+ texture_view_msaa,
})
}
@@ -441,6 +461,24 @@ impl BladeRenderer {
self.surface_config.size = gpu_size;
self.gpu
.reconfigure_surface(&mut self.surface, self.surface_config);
+
+ if let Some(texture_msaa) = self.texture_msaa {
+ self.gpu.destroy_texture(texture_msaa);
+ }
+ if let Some(texture_view_msaa) = self.texture_view_msaa {
+ self.gpu.destroy_texture_view(texture_view_msaa);
+ }
+
+ let (texture_msaa, texture_view_msaa) = create_msaa_texture_if_needed(
+ &self.gpu,
+ self.surface.info().format,
+ gpu_size.width,
+ gpu_size.height,
+ self.sample_count,
+ )
+ .unzip();
+ self.texture_msaa = texture_msaa;
+ self.texture_view_msaa = texture_view_msaa;
}
}
@@ -451,8 +489,7 @@ impl BladeRenderer {
self.gpu
.reconfigure_surface(&mut self.surface, self.surface_config);
self.pipelines.destroy(&self.gpu);
- self.pipelines =
- BladePipelines::new(&self.gpu, self.surface.info(), self.path_sample_count);
+ self.pipelines = BladePipelines::new(&self.gpu, self.surface.info(), self.sample_count);
}
}
@@ -490,80 +527,6 @@ impl BladeRenderer {
objc2::rc::Retained::as_ptr(&self.surface.metal_layer()) as *mut _
}
- #[profiling::function]
- fn rasterize_paths(&mut self, paths: &[Path<ScaledPixels>]) {
- self.path_tiles.clear();
- let mut vertices_by_texture_id = HashMap::default();
-
- for path in paths {
- let clipped_bounds = path
- .bounds
- .intersect(&path.content_mask.bounds)
- .map_origin(|origin| origin.floor())
- .map_size(|size| size.ceil());
- let tile = self.atlas.allocate_for_rendering(
- clipped_bounds.size.map(Into::into),
- AtlasTextureKind::Path,
- &mut self.command_encoder,
- );
- vertices_by_texture_id
- .entry(tile.texture_id)
- .or_insert(Vec::new())
- .extend(path.vertices.iter().map(|vertex| PathVertex {
- xy_position: vertex.xy_position - clipped_bounds.origin
- + tile.bounds.origin.map(Into::into),
- st_position: vertex.st_position,
- content_mask: ContentMask {
- bounds: tile.bounds.map(Into::into),
- },
- }));
- self.path_tiles.insert(path.id, tile);
- }
-
- for (texture_id, vertices) in vertices_by_texture_id {
- let tex_info = self.atlas.get_texture_info(texture_id);
- let globals = GlobalParams {
- viewport_size: [tex_info.size.width as f32, tex_info.size.height as f32],
- premultiplied_alpha: 0,
- pad: 0,
- };
-
- let vertex_buf = unsafe { self.instance_belt.alloc_typed(&vertices, &self.gpu) };
- let frame_view = tex_info.raw_view;
- let color_target = if let Some(msaa_view) = tex_info.msaa_view {
- gpu::RenderTarget {
- view: msaa_view,
- init_op: gpu::InitOp::Clear(gpu::TextureColor::OpaqueBlack),
- finish_op: gpu::FinishOp::ResolveTo(frame_view),
- }
- } else {
- gpu::RenderTarget {
- view: frame_view,
- init_op: gpu::InitOp::Clear(gpu::TextureColor::OpaqueBlack),
- finish_op: gpu::FinishOp::Store,
- }
- };
-
- if let mut pass = self.command_encoder.render(
- "paths",
- gpu::RenderTargetSet {
- colors: &[color_target],
- depth_stencil: None,
- },
- ) {
- let mut encoder = pass.with(&self.pipelines.path_rasterization);
- encoder.bind(
- 0,
- &ShaderPathRasterizationData {
- globals,
- b_path_vertices: vertex_buf,
- },
- );
- encoder.draw(0, vertices.len() as u32, 0, 1);
- }
- }
- }
-
pub fn destroy(&mut self) {
self.wait_for_gpu();
self.atlas.destroy();
@@ -572,17 +535,26 @@ impl BladeRenderer {
self.gpu.destroy_command_encoder(&mut self.command_encoder);
self.pipelines.destroy(&self.gpu);
self.gpu.destroy_surface(&mut self.surface);
+ if let Some(texture_msaa) = self.texture_msaa {
+ self.gpu.destroy_texture(texture_msaa);
+ }
+ if let Some(texture_view_msaa) = self.texture_view_msaa {
+ self.gpu.destroy_texture_view(texture_view_msaa);
+ }
}
pub fn draw(&mut self, scene: &Scene) {
self.command_encoder.start();
self.atlas.before_frame(&mut self.command_encoder);
- self.rasterize_paths(scene.paths());
let frame = {
profiling::scope!("acquire frame");
self.surface.acquire_frame()
};
+ let frame_view = frame.texture_view();
+ if let Some(texture_msaa) = self.texture_msaa {
+ self.command_encoder.init_texture(texture_msaa);
+ }
self.command_encoder.init_texture(frame.texture());
let globals = GlobalParams {
@@ -597,14 +569,25 @@ impl BladeRenderer {
pad: 0,
};
+ let target = if let Some(texture_view_msaa) = self.texture_view_msaa {
+ gpu::RenderTarget {
+ view: texture_view_msaa,
+ init_op: gpu::InitOp::Clear(gpu::TextureColor::TransparentBlack),
+ finish_op: gpu::FinishOp::ResolveTo(frame_view),
+ }
+ } else {
+ gpu::RenderTarget {
+ view: frame_view,
+ init_op: gpu::InitOp::Clear(gpu::TextureColor::TransparentBlack),
+ finish_op: gpu::FinishOp::Store,
+ }
+ };
+
+ // draw to the target texture
if let mut pass = self.command_encoder.render(
"main",
gpu::RenderTargetSet {
- colors: &[gpu::RenderTarget {
- view: frame.texture_view(),
- init_op: gpu::InitOp::Clear(gpu::TextureColor::TransparentBlack),
- finish_op: gpu::FinishOp::Store,
- }],
+ colors: &[target],
depth_stencil: None,
},
) {
@@ -639,32 +622,55 @@ impl BladeRenderer {
}
PrimitiveBatch::Paths(paths) => {
let mut encoder = pass.with(&self.pipelines.paths);
- // todo(linux): group by texture ID
- for path in paths {
- let tile = &self.path_tiles[&path.id];
- let tex_info = self.atlas.get_texture_info(tile.texture_id);
- let origin = path.bounds.intersect(&path.content_mask.bounds).origin;
- let sprites = [PathSprite {
- bounds: Bounds {
- origin: origin.map(|p| p.floor()),
- size: tile.bounds.size.map(Into::into),
+
+ let mut vertices = Vec::new();
+ let mut sprites = Vec::with_capacity(paths.len());
+ let mut draw_indirect_commands = Vec::with_capacity(paths.len());
+ let mut first_vertex = 0;
+
+ for (i, path) in paths.iter().enumerate() {
+ draw_indirect_commands.push(DrawIndirectArgs {
+ vertex_count: path.vertices.len() as u32,
+ instance_count: 1,
+ first_vertex,
+ first_instance: i as u32,
+ });
+ first_vertex += path.vertices.len() as u32;
+
+ vertices.extend(path.vertices.iter().map(|v| PathVertex {
+ xy_position: v.xy_position,
+ content_mask: ContentMask {
+ bounds: path.content_mask.bounds,
},
+ }));
+
+ sprites.push(PathSprite {
+ bounds: path.bounds,
color: path.color,
- tile: (*tile).clone(),
- }];
-
- let instance_buf =
- unsafe { self.instance_belt.alloc_typed(&sprites, &self.gpu) };
- encoder.bind(
- 0,
- &ShaderPathsData {
- globals,
- t_sprite: tex_info.raw_view,
- s_sprite: self.atlas_sampler,
- b_path_sprites: instance_buf,
- },
- );
- encoder.draw(0, 4, 0, sprites.len() as u32);
+ });
+ }
+
+ let b_path_vertices =
+ unsafe { self.instance_belt.alloc_typed(&vertices, &self.gpu) };
+ let instance_buf =
+ unsafe { self.instance_belt.alloc_typed(&sprites, &self.gpu) };
+ let indirect_buf = unsafe {
+ self.instance_belt
+ .alloc_typed(&draw_indirect_commands, &self.gpu)
+ };
+
+ encoder.bind(
+ 0,
+ &ShaderPathsData {
+ globals,
+ b_path_vertices,
+ b_path_sprites: instance_buf,
+ },
+ );
+
+ for i in 0..paths.len() {
+ encoder.draw_indirect(indirect_buf.buffer.at(indirect_buf.offset
+ + (i * mem::size_of::<DrawIndirectArgs>()) as u64));
}
}
PrimitiveBatch::Underlines(underlines) => {
@@ -817,9 +823,47 @@ impl BladeRenderer {
profiling::scope!("finish");
self.instance_belt.flush(&sync_point);
self.atlas.after_frame(&sync_point);
- self.atlas.clear_textures(AtlasTextureKind::Path);
self.wait_for_gpu();
self.last_sync_point = Some(sync_point);
}
}
+
+fn create_msaa_texture_if_needed(
+ gpu: &gpu::Context,
+ format: gpu::TextureFormat,
+ width: u32,
+ height: u32,
+ sample_count: u32,
+) -> Option<(gpu::Texture, gpu::TextureView)> {
+ if sample_count <= 1 {
+ return None;
+ }
+
+ let texture_msaa = gpu.create_texture(gpu::TextureDesc {
+ name: "msaa",
+ format,
+ size: gpu::Extent {
+ width,
+ height,
+ depth: 1,
+ },
+ array_layer_count: 1,
+ mip_level_count: 1,
+ sample_count,
+ dimension: gpu::TextureDimension::D2,
+ usage: gpu::TextureUsage::TARGET,
+ external: None,
+ });
+ let texture_view_msaa = gpu.create_texture_view(
+ texture_msaa,
+ gpu::TextureViewDesc {
+ name: "msaa view",
+ format,
+ dimension: gpu::ViewDimension::D2,
+ subresources: &Default::default(),
+ },
+ );
+
+ Some((texture_msaa, texture_view_msaa))
+}
@@ -922,59 +922,23 @@ fn fs_shadow(input: ShadowVarying) -> @location(0) vec4<f32> {
return blend_color(input.color, alpha);
}
-// --- path rasterization --- //
+// --- paths --- //
struct PathVertex {
xy_position: vec2<f32>,
- st_position: vec2<f32>,
content_mask: Bounds,
}
-var<storage, read> b_path_vertices: array<PathVertex>;
-
-struct PathRasterizationVarying {
- @builtin(position) position: vec4<f32>,
- @location(0) st_position: vec2<f32>,
- //TODO: use `clip_distance` once Naga supports it
- @location(3) clip_distances: vec4<f32>,
-}
-
-@vertex
-fn vs_path_rasterization(@builtin(vertex_index) vertex_id: u32) -> PathRasterizationVarying {
- let v = b_path_vertices[vertex_id];
-
- var out = PathRasterizationVarying();
- out.position = to_device_position_impl(v.xy_position);
- out.st_position = v.st_position;
- out.clip_distances = distance_from_clip_rect_impl(v.xy_position, v.content_mask);
- return out;
-}
-
-@fragment
-fn fs_path_rasterization(input: PathRasterizationVarying) -> @location(0) f32 {
- let dx = dpdx(input.st_position);
- let dy = dpdy(input.st_position);
- if (any(input.clip_distances < vec4<f32>(0.0))) {
- return 0.0;
- }
-
- let gradient = 2.0 * input.st_position.xx * vec2<f32>(dx.x, dy.x) - vec2<f32>(dx.y, dy.y);
- let f = input.st_position.x * input.st_position.x - input.st_position.y;
- let distance = f / length(gradient);
- return saturate(0.5 - distance);
-}
-
-// --- paths --- //
struct PathSprite {
bounds: Bounds,
color: Background,
- tile: AtlasTile,
}
+var<storage, read> b_path_vertices: array<PathVertex>;
var<storage, read> b_path_sprites: array<PathSprite>;
struct PathVarying {
@builtin(position) position: vec4<f32>,
- @location(0) tile_position: vec2<f32>,
+ @location(0) clip_distances: vec4<f32>,
@location(1) @interpolate(flat) instance_id: u32,
@location(2) @interpolate(flat) color_solid: vec4<f32>,
@location(3) @interpolate(flat) color0: vec4<f32>,
@@ -983,13 +947,12 @@ struct PathVarying {
@vertex
fn vs_path(@builtin(vertex_index) vertex_id: u32, @builtin(instance_index) instance_id: u32) -> PathVarying {
- let unit_vertex = vec2<f32>(f32(vertex_id & 1u), 0.5 * f32(vertex_id & 2u));
+ let v = b_path_vertices[vertex_id];
let sprite = b_path_sprites[instance_id];
- // Don't apply content mask because it was already accounted for when rasterizing the path.
var out = PathVarying();
- out.position = to_device_position(unit_vertex, sprite.bounds);
- out.tile_position = to_tile_position(unit_vertex, sprite.tile);
+ out.position = to_device_position_impl(v.xy_position);
+ out.clip_distances = distance_from_clip_rect_impl(v.xy_position, v.content_mask);
out.instance_id = instance_id;
let gradient = prepare_gradient_color(
@@ -1006,13 +969,15 @@ fn vs_path(@builtin(vertex_index) vertex_id: u32, @builtin(instance_index) insta
@fragment
fn fs_path(input: PathVarying) -> @location(0) vec4<f32> {
- let sample = textureSample(t_sprite, s_sprite, input.tile_position).r;
- let mask = 1.0 - abs(1.0 - sample % 2.0);
+ if any(input.clip_distances < vec4<f32>(0.0)) {
+ return vec4<f32>(0.0);
+ }
+
let sprite = b_path_sprites[input.instance_id];
let background = sprite.color;
let color = gradient_color(background, input.position.xy, sprite.bounds,
input.color_solid, input.color0, input.color1);
- return blend_color(color, mask);
+ return blend_color(color, 1.0);
}
// --- underlines --- //
@@ -23,7 +23,7 @@ pub(crate) use wayland::*;
#[cfg(feature = "x11")]
pub(crate) use x11::*;
-#[cfg(any(feature = "wayland", feature = "x11"))]
+#[cfg(all(feature = "screen-capture", any(feature = "wayland", feature = "x11")))]
pub(crate) type PlatformScreenCaptureFrame = scap::frame::Frame;
-#[cfg(not(any(feature = "wayland", feature = "x11")))]
+#[cfg(not(all(feature = "screen-capture", any(feature = "wayland", feature = "x11"))))]
pub(crate) type PlatformScreenCaptureFrame = ();
@@ -1,16 +1,14 @@
use std::cell::RefCell;
use std::rc::Rc;
-use anyhow::anyhow;
use calloop::{EventLoop, LoopHandle};
-use futures::channel::oneshot;
use util::ResultExt;
use crate::platform::linux::LinuxClient;
use crate::platform::{LinuxCommon, PlatformWindow};
use crate::{
AnyWindowHandle, CursorStyle, DisplayId, LinuxKeyboardLayout, PlatformDisplay,
- PlatformKeyboardLayout, ScreenCaptureSource, WindowParams,
+ PlatformKeyboardLayout, WindowParams,
};
pub struct HeadlessClientState {
@@ -67,15 +65,18 @@ impl LinuxClient for HeadlessClient {
None
}
+ #[cfg(feature = "screen-capture")]
fn is_screen_capture_supported(&self) -> bool {
false
}
+ #[cfg(feature = "screen-capture")]
fn screen_capture_sources(
&self,
- ) -> oneshot::Receiver<anyhow::Result<Vec<Box<dyn ScreenCaptureSource>>>> {
- let (mut tx, rx) = oneshot::channel();
- tx.send(Err(anyhow!(
+ ) -> futures::channel::oneshot::Receiver<anyhow::Result<Vec<Box<dyn crate::ScreenCaptureSource>>>>
+ {
+ let (mut tx, rx) = futures::channel::oneshot::channel();
+ tx.send(Err(anyhow::anyhow!(
"Headless mode does not support screen capture."
)))
.ok();
@@ -26,7 +26,7 @@ use crate::{
Action, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, DisplayId,
ForegroundExecutor, Keymap, LinuxDispatcher, Menu, MenuItem, OwnedMenu, PathPromptOptions,
Pixels, Platform, PlatformDisplay, PlatformKeyboardLayout, PlatformTextSystem, PlatformWindow,
- Point, Result, ScreenCaptureSource, Task, WindowAppearance, WindowParams, px,
+ Point, Result, Task, WindowAppearance, WindowParams, px,
};
#[cfg(any(feature = "wayland", feature = "x11"))]
@@ -51,10 +51,12 @@ pub trait LinuxClient {
#[allow(unused)]
fn display(&self, id: DisplayId) -> Option<Rc<dyn PlatformDisplay>>;
fn primary_display(&self) -> Option<Rc<dyn PlatformDisplay>>;
+ #[cfg(feature = "screen-capture")]
fn is_screen_capture_supported(&self) -> bool;
+ #[cfg(feature = "screen-capture")]
fn screen_capture_sources(
&self,
- ) -> oneshot::Receiver<Result<Vec<Box<dyn ScreenCaptureSource>>>>;
+ ) -> oneshot::Receiver<Result<Vec<Box<dyn crate::ScreenCaptureSource>>>>;
fn open_window(
&self,
@@ -198,8 +200,8 @@ impl<P: LinuxClient + 'static> Platform for P {
app_path = app_path.display()
);
- // execute the script using /bin/bash
- let restart_process = Command::new("/bin/bash")
+ let restart_process = Command::new("/usr/bin/env")
+ .arg("bash")
.arg("-c")
.arg(script)
.process_group(0)
@@ -235,13 +237,15 @@ impl<P: LinuxClient + 'static> Platform for P {
self.displays()
}
+ #[cfg(feature = "screen-capture")]
fn is_screen_capture_supported(&self) -> bool {
self.is_screen_capture_supported()
}
+ #[cfg(feature = "screen-capture")]
fn screen_capture_sources(
&self,
- ) -> oneshot::Receiver<Result<Vec<Box<dyn ScreenCaptureSource>>>> {
+ ) -> oneshot::Receiver<Result<Vec<Box<dyn crate::ScreenCaptureSource>>>> {
self.screen_capture_sources()
}
@@ -7,7 +7,6 @@ use std::{
time::{Duration, Instant},
};
-use anyhow::anyhow;
use calloop::{
EventLoop, LoopHandle,
timer::{TimeoutAction, Timer},
@@ -15,7 +14,6 @@ use calloop::{
use calloop_wayland_source::WaylandSource;
use collections::HashMap;
use filedescriptor::Pipe;
-use futures::channel::oneshot;
use http_client::Url;
use smallvec::SmallVec;
use util::ResultExt;
@@ -77,8 +75,8 @@ use crate::{
FileDropEvent, ForegroundExecutor, KeyDownEvent, KeyUpEvent, Keystroke, LinuxCommon,
LinuxKeyboardLayout, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent,
MouseExitEvent, MouseMoveEvent, MouseUpEvent, NavigationDirection, Pixels, PlatformDisplay,
- PlatformInput, PlatformKeyboardLayout, Point, SCROLL_LINES, ScaledPixels, ScreenCaptureSource,
- ScrollDelta, ScrollWheelEvent, Size, TouchPhase, WindowParams, point, px, size,
+ PlatformInput, PlatformKeyboardLayout, Point, SCROLL_LINES, ScaledPixels, ScrollDelta,
+ ScrollWheelEvent, Size, TouchPhase, WindowParams, point, px, size,
};
use crate::{
SharedString,
@@ -666,20 +664,25 @@ impl LinuxClient for WaylandClient {
None
}
+ #[cfg(feature = "screen-capture")]
fn is_screen_capture_supported(&self) -> bool {
false
}
+ #[cfg(feature = "screen-capture")]
fn screen_capture_sources(
&self,
- ) -> oneshot::Receiver<anyhow::Result<Vec<Box<dyn ScreenCaptureSource>>>> {
+ ) -> futures::channel::oneshot::Receiver<anyhow::Result<Vec<Box<dyn crate::ScreenCaptureSource>>>>
+ {
// TODO: Get screen capture working on wayland. Be sure to try window resizing as that may
// be tricky.
//
// start_scap_default_target_source()
- let (sources_tx, sources_rx) = oneshot::channel();
+ let (sources_tx, sources_rx) = futures::channel::oneshot::channel();
sources_tx
- .send(Err(anyhow!("Wayland screen capture not yet implemented.")))
+ .send(Err(anyhow::anyhow!(
+ "Wayland screen capture not yet implemented."
+ )))
.ok();
sources_rx
}
@@ -15,7 +15,6 @@ use calloop::{
generic::{FdWrapper, Generic},
};
use collections::HashMap;
-use futures::channel::oneshot;
use http_client::Url;
use log::Level;
use smallvec::SmallVec;
@@ -59,13 +58,12 @@ use crate::platform::{
reveal_path_internal,
xdg_desktop_portal::{Event as XDPEvent, XDPEventSource},
},
- scap_screen_capture::scap_screen_sources,
};
use crate::{
AnyWindowHandle, Bounds, ClipboardItem, CursorStyle, DisplayId, FileDropEvent, Keystroke,
LinuxKeyboardLayout, Modifiers, ModifiersChangedEvent, MouseButton, Pixels, Platform,
PlatformDisplay, PlatformInput, PlatformKeyboardLayout, Point, RequestFrameOptions,
- ScaledPixels, ScreenCaptureSource, ScrollDelta, Size, TouchPhase, WindowParams, X11Window,
+ ScaledPixels, ScrollDelta, Size, TouchPhase, WindowParams, X11Window,
modifiers_from_xinput_info, point, px,
};
@@ -1479,14 +1477,19 @@ impl LinuxClient for X11Client {
))
}
+ #[cfg(feature = "screen-capture")]
fn is_screen_capture_supported(&self) -> bool {
true
}
+ #[cfg(feature = "screen-capture")]
fn screen_capture_sources(
&self,
- ) -> oneshot::Receiver<anyhow::Result<Vec<Box<dyn ScreenCaptureSource>>>> {
- scap_screen_sources(&self.0.borrow().common.foreground_executor)
+ ) -> futures::channel::oneshot::Receiver<anyhow::Result<Vec<Box<dyn crate::ScreenCaptureSource>>>>
+ {
+ crate::platform::scap_screen_capture::scap_screen_sources(
+ &self.0.borrow().common.foreground_executor,
+ )
}
fn open_window(
@@ -5,6 +5,8 @@ mod display;
mod display_link;
mod events;
mod keyboard;
+
+#[cfg(feature = "screen-capture")]
mod screen_capture;
#[cfg(not(feature = "macos-blade"))]
@@ -13,14 +13,12 @@ use std::borrow::Cow;
pub(crate) struct MetalAtlas(Mutex<MetalAtlasState>);
impl MetalAtlas {
- pub(crate) fn new(device: Device, path_sample_count: u32) -> Self {
+ pub(crate) fn new(device: Device) -> Self {
MetalAtlas(Mutex::new(MetalAtlasState {
device: AssertSend(device),
monochrome_textures: Default::default(),
polychrome_textures: Default::default(),
- path_textures: Default::default(),
tiles_by_key: Default::default(),
- path_sample_count,
}))
}
@@ -28,10 +26,7 @@ impl MetalAtlas {
self.0.lock().texture(id).metal_texture.clone()
}
- pub(crate) fn msaa_texture(&self, id: AtlasTextureId) -> Option<metal::Texture> {
- self.0.lock().texture(id).msaa_texture.clone()
- }
-
+ #[allow(dead_code)]
pub(crate) fn allocate(
&self,
size: Size<DevicePixels>,
@@ -40,12 +35,12 @@ impl MetalAtlas {
self.0.lock().allocate(size, texture_kind)
}
+ #[allow(dead_code)]
pub(crate) fn clear_textures(&self, texture_kind: AtlasTextureKind) {
let mut lock = self.0.lock();
let textures = match texture_kind {
AtlasTextureKind::Monochrome => &mut lock.monochrome_textures,
AtlasTextureKind::Polychrome => &mut lock.polychrome_textures,
- AtlasTextureKind::Path => &mut lock.path_textures,
};
for texture in textures.iter_mut() {
texture.clear();
@@ -57,9 +52,7 @@ struct MetalAtlasState {
device: AssertSend<Device>,
monochrome_textures: AtlasTextureList<MetalAtlasTexture>,
polychrome_textures: AtlasTextureList<MetalAtlasTexture>,
- path_textures: AtlasTextureList<MetalAtlasTexture>,
tiles_by_key: FxHashMap<AtlasKey, AtlasTile>,
- path_sample_count: u32,
}
impl PlatformAtlas for MetalAtlas {
@@ -94,7 +87,6 @@ impl PlatformAtlas for MetalAtlas {
let textures = match id.kind {
AtlasTextureKind::Monochrome => &mut lock.monochrome_textures,
AtlasTextureKind::Polychrome => &mut lock.polychrome_textures,
- AtlasTextureKind::Path => &mut lock.polychrome_textures,
};
let Some(texture_slot) = textures
@@ -128,7 +120,6 @@ impl MetalAtlasState {
let textures = match texture_kind {
AtlasTextureKind::Monochrome => &mut self.monochrome_textures,
AtlasTextureKind::Polychrome => &mut self.polychrome_textures,
- AtlasTextureKind::Path => &mut self.path_textures,
};
if let Some(tile) = textures
@@ -173,31 +164,14 @@ impl MetalAtlasState {
pixel_format = metal::MTLPixelFormat::BGRA8Unorm;
usage = metal::MTLTextureUsage::ShaderRead;
}
- AtlasTextureKind::Path => {
- pixel_format = metal::MTLPixelFormat::R16Float;
- usage = metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead;
- }
}
texture_descriptor.set_pixel_format(pixel_format);
texture_descriptor.set_usage(usage);
let metal_texture = self.device.new_texture(&texture_descriptor);
- // We currently only enable MSAA for path textures.
- let msaa_texture = if self.path_sample_count > 1 && kind == AtlasTextureKind::Path {
- let mut descriptor = texture_descriptor.clone();
- descriptor.set_texture_type(metal::MTLTextureType::D2Multisample);
- descriptor.set_storage_mode(metal::MTLStorageMode::Private);
- descriptor.set_sample_count(self.path_sample_count as _);
- let msaa_texture = self.device.new_texture(&descriptor);
- Some(msaa_texture)
- } else {
- None
- };
-
let texture_list = match kind {
AtlasTextureKind::Monochrome => &mut self.monochrome_textures,
AtlasTextureKind::Polychrome => &mut self.polychrome_textures,
- AtlasTextureKind::Path => &mut self.path_textures,
};
let index = texture_list.free_list.pop();
@@ -209,7 +183,6 @@ impl MetalAtlasState {
},
allocator: etagere::BucketedAtlasAllocator::new(size.into()),
metal_texture: AssertSend(metal_texture),
- msaa_texture: AssertSend(msaa_texture),
live_atlas_keys: 0,
};
@@ -226,7 +199,6 @@ impl MetalAtlasState {
let textures = match id.kind {
crate::AtlasTextureKind::Monochrome => &self.monochrome_textures,
crate::AtlasTextureKind::Polychrome => &self.polychrome_textures,
- crate::AtlasTextureKind::Path => &self.path_textures,
};
textures[id.index as usize].as_ref().unwrap()
}
@@ -236,7 +208,6 @@ struct MetalAtlasTexture {
id: AtlasTextureId,
allocator: BucketedAtlasAllocator,
metal_texture: AssertSend<metal::Texture>,
- msaa_texture: AssertSend<Option<metal::Texture>>,
live_atlas_keys: u32,
}
@@ -1,27 +1,28 @@
use super::metal_atlas::MetalAtlas;
use crate::{
- AtlasTextureId, AtlasTextureKind, AtlasTile, Background, Bounds, ContentMask, DevicePixels,
- MonochromeSprite, PaintSurface, Path, PathId, PathVertex, PolychromeSprite, PrimitiveBatch,
- Quad, ScaledPixels, Scene, Shadow, Size, Surface, Underline, point, size,
+ AtlasTextureId, Background, Bounds, ContentMask, DevicePixels, MonochromeSprite, PaintSurface,
+ Path, PathVertex, PolychromeSprite, PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size,
+ Surface, Underline, point, size,
};
-use anyhow::{Context as _, Result};
+use anyhow::Result;
use block::ConcreteBlock;
use cocoa::{
base::{NO, YES},
foundation::{NSSize, NSUInteger},
quartzcore::AutoresizingMask,
};
-use collections::HashMap;
use core_foundation::base::TCFType;
use core_video::{
metal_texture::CVMetalTextureGetTexture, metal_texture_cache::CVMetalTextureCache,
pixel_buffer::kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
};
use foreign_types::{ForeignType, ForeignTypeRef};
-use metal::{CAMetalLayer, CommandQueue, MTLPixelFormat, MTLResourceOptions, NSRange};
+use metal::{
+ CAMetalLayer, CommandQueue, MTLDrawPrimitivesIndirectArguments, MTLPixelFormat,
+ MTLResourceOptions, NSRange,
+};
use objc::{self, msg_send, sel, sel_impl};
use parking_lot::Mutex;
-use smallvec::SmallVec;
use std::{cell::Cell, ffi::c_void, mem, ptr, sync::Arc};
// Exported to metal
@@ -31,9 +32,6 @@ pub(crate) type PointF = crate::Point<f32>;
const SHADERS_METALLIB: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
#[cfg(feature = "runtime_shaders")]
const SHADERS_SOURCE_FILE: &str = include_str!(concat!(env!("OUT_DIR"), "/stitched_shaders.metal"));
-// Use 4x MSAA, all devices support it.
-// https://developer.apple.com/documentation/metal/mtldevice/1433355-supportstexturesamplecount
-const PATH_SAMPLE_COUNT: u32 = 4;
pub type Context = Arc<Mutex<InstanceBufferPool>>;
pub type Renderer = MetalRenderer;
@@ -98,8 +96,7 @@ pub(crate) struct MetalRenderer {
layer: metal::MetalLayer,
presents_with_transaction: bool,
command_queue: CommandQueue,
- paths_rasterization_pipeline_state: metal::RenderPipelineState,
- path_sprites_pipeline_state: metal::RenderPipelineState,
+ path_pipeline_state: metal::RenderPipelineState,
shadows_pipeline_state: metal::RenderPipelineState,
quads_pipeline_state: metal::RenderPipelineState,
underlines_pipeline_state: metal::RenderPipelineState,
@@ -111,6 +108,8 @@ pub(crate) struct MetalRenderer {
instance_buffer_pool: Arc<Mutex<InstanceBufferPool>>,
sprite_atlas: Arc<MetalAtlas>,
core_video_texture_cache: core_video::metal_texture_cache::CVMetalTextureCache,
+ sample_count: u64,
+ msaa_texture: Option<metal::Texture>,
}
impl MetalRenderer {
@@ -169,22 +168,19 @@ impl MetalRenderer {
MTLResourceOptions::StorageModeManaged,
);
- let paths_rasterization_pipeline_state = build_path_rasterization_pipeline_state(
- &device,
- &library,
- "paths_rasterization",
- "path_rasterization_vertex",
- "path_rasterization_fragment",
- MTLPixelFormat::R16Float,
- PATH_SAMPLE_COUNT,
- );
- let path_sprites_pipeline_state = build_pipeline_state(
+ let sample_count = [4, 2, 1]
+ .into_iter()
+ .find(|count| device.supports_texture_sample_count(*count))
+ .unwrap_or(1);
+
+ let path_pipeline_state = build_pipeline_state(
&device,
&library,
- "path_sprites",
- "path_sprite_vertex",
- "path_sprite_fragment",
+ "paths",
+ "path_vertex",
+ "path_fragment",
MTLPixelFormat::BGRA8Unorm,
+ sample_count,
);
let shadows_pipeline_state = build_pipeline_state(
&device,
@@ -193,6 +189,7 @@ impl MetalRenderer {
"shadow_vertex",
"shadow_fragment",
MTLPixelFormat::BGRA8Unorm,
+ sample_count,
);
let quads_pipeline_state = build_pipeline_state(
&device,
@@ -201,6 +198,7 @@ impl MetalRenderer {
"quad_vertex",
"quad_fragment",
MTLPixelFormat::BGRA8Unorm,
+ sample_count,
);
let underlines_pipeline_state = build_pipeline_state(
&device,
@@ -209,6 +207,7 @@ impl MetalRenderer {
"underline_vertex",
"underline_fragment",
MTLPixelFormat::BGRA8Unorm,
+ sample_count,
);
let monochrome_sprites_pipeline_state = build_pipeline_state(
&device,
@@ -217,6 +216,7 @@ impl MetalRenderer {
"monochrome_sprite_vertex",
"monochrome_sprite_fragment",
MTLPixelFormat::BGRA8Unorm,
+ sample_count,
);
let polychrome_sprites_pipeline_state = build_pipeline_state(
&device,
@@ -225,6 +225,7 @@ impl MetalRenderer {
"polychrome_sprite_vertex",
"polychrome_sprite_fragment",
MTLPixelFormat::BGRA8Unorm,
+ sample_count,
);
let surfaces_pipeline_state = build_pipeline_state(
&device,
@@ -233,20 +234,21 @@ impl MetalRenderer {
"surface_vertex",
"surface_fragment",
MTLPixelFormat::BGRA8Unorm,
+ sample_count,
);
let command_queue = device.new_command_queue();
- let sprite_atlas = Arc::new(MetalAtlas::new(device.clone(), PATH_SAMPLE_COUNT));
+ let sprite_atlas = Arc::new(MetalAtlas::new(device.clone()));
let core_video_texture_cache =
CVMetalTextureCache::new(None, device.clone(), None).unwrap();
+ let msaa_texture = create_msaa_texture(&device, &layer, sample_count);
Self {
device,
layer,
presents_with_transaction: false,
command_queue,
- paths_rasterization_pipeline_state,
- path_sprites_pipeline_state,
+ path_pipeline_state,
shadows_pipeline_state,
quads_pipeline_state,
underlines_pipeline_state,
@@ -257,6 +259,8 @@ impl MetalRenderer {
instance_buffer_pool,
sprite_atlas,
core_video_texture_cache,
+ sample_count,
+ msaa_texture,
}
}
@@ -289,6 +293,8 @@ impl MetalRenderer {
setDrawableSize: size
];
}
+
+ self.msaa_texture = create_msaa_texture(&self.device, &self.layer, self.sample_count);
}
pub fn update_transparency(&self, _transparent: bool) {
@@ -375,25 +381,23 @@ impl MetalRenderer {
let command_queue = self.command_queue.clone();
let command_buffer = command_queue.new_command_buffer();
let mut instance_offset = 0;
-
- let path_tiles = self
- .rasterize_paths(
- scene.paths(),
- instance_buffer,
- &mut instance_offset,
- command_buffer,
- )
- .with_context(|| format!("rasterizing {} paths", scene.paths().len()))?;
-
let render_pass_descriptor = metal::RenderPassDescriptor::new();
let color_attachment = render_pass_descriptor
.color_attachments()
.object_at(0)
.unwrap();
- color_attachment.set_texture(Some(drawable.texture()));
- color_attachment.set_load_action(metal::MTLLoadAction::Clear);
- color_attachment.set_store_action(metal::MTLStoreAction::Store);
+ if let Some(msaa_texture_ref) = self.msaa_texture.as_deref() {
+ color_attachment.set_texture(Some(msaa_texture_ref));
+ color_attachment.set_load_action(metal::MTLLoadAction::Clear);
+ color_attachment.set_store_action(metal::MTLStoreAction::MultisampleResolve);
+ color_attachment.set_resolve_texture(Some(drawable.texture()));
+ } else {
+ color_attachment.set_load_action(metal::MTLLoadAction::Clear);
+ color_attachment.set_texture(Some(drawable.texture()));
+ color_attachment.set_store_action(metal::MTLStoreAction::Store);
+ }
+
let alpha = if self.layer.is_opaque() { 1. } else { 0. };
color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., alpha));
let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
@@ -425,7 +429,6 @@ impl MetalRenderer {
),
PrimitiveBatch::Paths(paths) => self.draw_paths(
paths,
- &path_tiles,
instance_buffer,
&mut instance_offset,
viewport_size,
@@ -493,106 +496,6 @@ impl MetalRenderer {
Ok(command_buffer.to_owned())
}
- fn rasterize_paths(
- &self,
- paths: &[Path<ScaledPixels>],
- instance_buffer: &mut InstanceBuffer,
- instance_offset: &mut usize,
- command_buffer: &metal::CommandBufferRef,
- ) -> Option<HashMap<PathId, AtlasTile>> {
- self.sprite_atlas.clear_textures(AtlasTextureKind::Path);
-
- let mut tiles = HashMap::default();
- let mut vertices_by_texture_id = HashMap::default();
- for path in paths {
- let clipped_bounds = path.bounds.intersect(&path.content_mask.bounds);
-
- let tile = self
- .sprite_atlas
- .allocate(clipped_bounds.size.map(Into::into), AtlasTextureKind::Path)?;
- vertices_by_texture_id
- .entry(tile.texture_id)
- .or_insert(Vec::new())
- .extend(path.vertices.iter().map(|vertex| PathVertex {
- xy_position: vertex.xy_position - clipped_bounds.origin
- + tile.bounds.origin.map(Into::into),
- st_position: vertex.st_position,
- content_mask: ContentMask {
- bounds: tile.bounds.map(Into::into),
- },
- }));
- tiles.insert(path.id, tile);
- }
-
- for (texture_id, vertices) in vertices_by_texture_id {
- align_offset(instance_offset);
- let vertices_bytes_len = mem::size_of_val(vertices.as_slice());
- let next_offset = *instance_offset + vertices_bytes_len;
- if next_offset > instance_buffer.size {
- return None;
- }
-
- let render_pass_descriptor = metal::RenderPassDescriptor::new();
- let color_attachment = render_pass_descriptor
- .color_attachments()
- .object_at(0)
- .unwrap();
-
- let texture = self.sprite_atlas.metal_texture(texture_id);
- let msaa_texture = self.sprite_atlas.msaa_texture(texture_id);
-
- if let Some(msaa_texture) = msaa_texture {
- color_attachment.set_texture(Some(&msaa_texture));
- color_attachment.set_resolve_texture(Some(&texture));
- color_attachment.set_load_action(metal::MTLLoadAction::Clear);
- color_attachment.set_store_action(metal::MTLStoreAction::MultisampleResolve);
- } else {
- color_attachment.set_texture(Some(&texture));
- color_attachment.set_load_action(metal::MTLLoadAction::Clear);
- color_attachment.set_store_action(metal::MTLStoreAction::Store);
- }
- color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
-
- let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
- command_encoder.set_render_pipeline_state(&self.paths_rasterization_pipeline_state);
- command_encoder.set_vertex_buffer(
- PathRasterizationInputIndex::Vertices as u64,
- Some(&instance_buffer.metal_buffer),
- *instance_offset as u64,
- );
- let texture_size = Size {
- width: DevicePixels::from(texture.width()),
- height: DevicePixels::from(texture.height()),
- };
- command_encoder.set_vertex_bytes(
- PathRasterizationInputIndex::AtlasTextureSize as u64,
- mem::size_of_val(&texture_size) as u64,
- &texture_size as *const Size<DevicePixels> as *const _,
- );
-
- let buffer_contents = unsafe {
- (instance_buffer.metal_buffer.contents() as *mut u8).add(*instance_offset)
- };
- unsafe {
- ptr::copy_nonoverlapping(
- vertices.as_ptr() as *const u8,
- buffer_contents,
- vertices_bytes_len,
- );
- }
-
- command_encoder.draw_primitives(
- metal::MTLPrimitiveType::Triangle,
- 0,
- vertices.len() as u64,
- );
- command_encoder.end_encoding();
- *instance_offset = next_offset;
- }
-
- Some(tiles)
- }
-
fn draw_shadows(
&self,
shadows: &[Shadow],
@@ -718,7 +621,6 @@ impl MetalRenderer {
fn draw_paths(
&self,
paths: &[Path<ScaledPixels>],
- tiles_by_path_id: &HashMap<PathId, AtlasTile>,
instance_buffer: &mut InstanceBuffer,
instance_offset: &mut usize,
viewport_size: Size<DevicePixels>,
@@ -728,100 +630,108 @@ impl MetalRenderer {
return true;
}
- command_encoder.set_render_pipeline_state(&self.path_sprites_pipeline_state);
- command_encoder.set_vertex_buffer(
- SpriteInputIndex::Vertices as u64,
- Some(&self.unit_vertices),
- 0,
- );
- command_encoder.set_vertex_bytes(
- SpriteInputIndex::ViewportSize as u64,
- mem::size_of_val(&viewport_size) as u64,
- &viewport_size as *const Size<DevicePixels> as *const _,
- );
+ command_encoder.set_render_pipeline_state(&self.path_pipeline_state);
- let mut prev_texture_id = None;
- let mut sprites = SmallVec::<[_; 1]>::new();
- let mut paths_and_tiles = paths
- .iter()
- .map(|path| (path, tiles_by_path_id.get(&path.id).unwrap()))
- .peekable();
+ unsafe {
+ let base_addr = instance_buffer.metal_buffer.contents();
+ let mut p = (base_addr as *mut u8).add(*instance_offset);
+ let mut draw_indirect_commands = Vec::with_capacity(paths.len());
+
+ // copy vertices
+ let vertices_offset = (p as usize) - (base_addr as usize);
+ let mut first_vertex = 0;
+ for (i, path) in paths.iter().enumerate() {
+ if (p as usize) - (base_addr as usize)
+ + (mem::size_of::<PathVertex<ScaledPixels>>() * path.vertices.len())
+ > instance_buffer.size
+ {
+ return false;
+ }
- loop {
- if let Some((path, tile)) = paths_and_tiles.peek() {
- if prev_texture_id.map_or(true, |texture_id| texture_id == tile.texture_id) {
- prev_texture_id = Some(tile.texture_id);
- let origin = path.bounds.intersect(&path.content_mask.bounds).origin;
- sprites.push(PathSprite {
- bounds: Bounds {
- origin: origin.map(|p| p.floor()),
- size: tile.bounds.size.map(Into::into),
+ for v in &path.vertices {
+ *(p as *mut PathVertex<ScaledPixels>) = PathVertex {
+ xy_position: v.xy_position,
+ content_mask: ContentMask {
+ bounds: path.content_mask.bounds,
},
- color: path.color,
- tile: (*tile).clone(),
- });
- paths_and_tiles.next();
- continue;
+ };
+ p = p.add(mem::size_of::<PathVertex<ScaledPixels>>());
}
+
+ draw_indirect_commands.push(MTLDrawPrimitivesIndirectArguments {
+ vertexCount: path.vertices.len() as u32,
+ instanceCount: 1,
+ vertexStart: first_vertex,
+ baseInstance: i as u32,
+ });
+ first_vertex += path.vertices.len() as u32;
}
- if sprites.is_empty() {
- break;
- } else {
- align_offset(instance_offset);
- let texture_id = prev_texture_id.take().unwrap();
- let texture: metal::Texture = self.sprite_atlas.metal_texture(texture_id);
- let texture_size = size(
- DevicePixels(texture.width() as i32),
- DevicePixels(texture.height() as i32),
- );
+ // copy sprites
+ let sprites_offset = (p as u64) - (base_addr as u64);
+ if (p as usize) - (base_addr as usize) + (mem::size_of::<PathSprite>() * paths.len())
+ > instance_buffer.size
+ {
+ return false;
+ }
+ for path in paths {
+ *(p as *mut PathSprite) = PathSprite {
+ bounds: path.bounds,
+ color: path.color,
+ };
+ p = p.add(mem::size_of::<PathSprite>());
+ }
- command_encoder.set_vertex_buffer(
- SpriteInputIndex::Sprites as u64,
- Some(&instance_buffer.metal_buffer),
- *instance_offset as u64,
- );
- command_encoder.set_vertex_bytes(
- SpriteInputIndex::AtlasTextureSize as u64,
- mem::size_of_val(&texture_size) as u64,
- &texture_size as *const Size<DevicePixels> as *const _,
- );
- command_encoder.set_fragment_buffer(
- SpriteInputIndex::Sprites as u64,
- Some(&instance_buffer.metal_buffer),
- *instance_offset as u64,
- );
- command_encoder
- .set_fragment_texture(SpriteInputIndex::AtlasTexture as u64, Some(&texture));
+ // copy indirect commands
+ let icb_bytes_len = mem::size_of_val(draw_indirect_commands.as_slice());
+ let icb_offset = (p as u64) - (base_addr as u64);
+ if (p as usize) - (base_addr as usize) + icb_bytes_len > instance_buffer.size {
+ return false;
+ }
+ ptr::copy_nonoverlapping(
+ draw_indirect_commands.as_ptr() as *const u8,
+ p,
+ icb_bytes_len,
+ );
+ p = p.add(icb_bytes_len);
- let sprite_bytes_len = mem::size_of_val(sprites.as_slice());
- let next_offset = *instance_offset + sprite_bytes_len;
- if next_offset > instance_buffer.size {
- return false;
- }
+ // draw path
+ command_encoder.set_vertex_buffer(
+ PathInputIndex::Vertices as u64,
+ Some(&instance_buffer.metal_buffer),
+ vertices_offset as u64,
+ );
- let buffer_contents = unsafe {
- (instance_buffer.metal_buffer.contents() as *mut u8).add(*instance_offset)
- };
+ command_encoder.set_vertex_bytes(
+ PathInputIndex::ViewportSize as u64,
+ mem::size_of_val(&viewport_size) as u64,
+ &viewport_size as *const Size<DevicePixels> as *const _,
+ );
- unsafe {
- ptr::copy_nonoverlapping(
- sprites.as_ptr() as *const u8,
- buffer_contents,
- sprite_bytes_len,
- );
- }
+ command_encoder.set_vertex_buffer(
+ PathInputIndex::Sprites as u64,
+ Some(&instance_buffer.metal_buffer),
+ sprites_offset,
+ );
- command_encoder.draw_primitives_instanced(
+ command_encoder.set_fragment_buffer(
+ PathInputIndex::Sprites as u64,
+ Some(&instance_buffer.metal_buffer),
+ sprites_offset,
+ );
+
+ for i in 0..paths.len() {
+ command_encoder.draw_primitives_indirect(
metal::MTLPrimitiveType::Triangle,
- 0,
- 6,
- sprites.len() as u64,
+ &instance_buffer.metal_buffer,
+ icb_offset
+ + (i * std::mem::size_of::<MTLDrawPrimitivesIndirectArguments>()) as u64,
);
- *instance_offset = next_offset;
- sprites.clear();
}
+
+ *instance_offset = (p as usize) - (base_addr as usize);
}
+
true
}
@@ -1143,6 +1053,7 @@ fn build_pipeline_state(
vertex_fn_name: &str,
fragment_fn_name: &str,
pixel_format: metal::MTLPixelFormat,
+ sample_count: u64,
) -> metal::RenderPipelineState {
let vertex_fn = library
.get_function(vertex_fn_name, None)
@@ -1155,6 +1066,7 @@ fn build_pipeline_state(
descriptor.set_label(label);
descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
+ descriptor.set_sample_count(sample_count);
let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
color_attachment.set_pixel_format(pixel_format);
color_attachment.set_blending_enabled(true);
@@ -1170,48 +1082,43 @@ fn build_pipeline_state(
.expect("could not create render pipeline state")
}
-fn build_path_rasterization_pipeline_state(
- device: &metal::DeviceRef,
- library: &metal::LibraryRef,
- label: &str,
- vertex_fn_name: &str,
- fragment_fn_name: &str,
- pixel_format: metal::MTLPixelFormat,
- path_sample_count: u32,
-) -> metal::RenderPipelineState {
- let vertex_fn = library
- .get_function(vertex_fn_name, None)
- .expect("error locating vertex function");
- let fragment_fn = library
- .get_function(fragment_fn_name, None)
- .expect("error locating fragment function");
+// Align to multiples of 256 make Metal happy.
+fn align_offset(offset: &mut usize) {
+ *offset = (*offset).div_ceil(256) * 256;
+}
- let descriptor = metal::RenderPipelineDescriptor::new();
- descriptor.set_label(label);
- descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
- descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
- if path_sample_count > 1 {
- descriptor.set_raster_sample_count(path_sample_count as _);
- descriptor.set_alpha_to_coverage_enabled(true);
+fn create_msaa_texture(
+ device: &metal::Device,
+ layer: &metal::MetalLayer,
+ sample_count: u64,
+) -> Option<metal::Texture> {
+ let viewport_size = layer.drawable_size();
+ let width = viewport_size.width.ceil() as u64;
+ let height = viewport_size.height.ceil() as u64;
+
+ if width == 0 || height == 0 {
+ return None;
}
- let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
- color_attachment.set_pixel_format(pixel_format);
- color_attachment.set_blending_enabled(true);
- color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
- color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
- color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
- color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
- color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
- color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
- device
- .new_render_pipeline_state(&descriptor)
- .expect("could not create render pipeline state")
-}
+ if sample_count <= 1 {
+ return None;
+ }
-// Align to multiples of 256 make Metal happy.
-fn align_offset(offset: &mut usize) {
- *offset = (*offset).div_ceil(256) * 256;
+ let texture_descriptor = metal::TextureDescriptor::new();
+ texture_descriptor.set_texture_type(metal::MTLTextureType::D2Multisample);
+
+ // MTLStorageMode default is `shared` only for Apple silicon GPUs. Use `private` for Apple and Intel GPUs both.
+ // Reference: https://developer.apple.com/documentation/metal/choosing-a-resource-storage-mode-for-apple-gpus
+ texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
+
+ texture_descriptor.set_width(width);
+ texture_descriptor.set_height(height);
+ texture_descriptor.set_pixel_format(layer.pixel_format());
+ texture_descriptor.set_usage(metal::MTLTextureUsage::RenderTarget);
+ texture_descriptor.set_sample_count(sample_count);
+
+ let metal_texture = device.new_texture(&texture_descriptor);
+ Some(metal_texture)
}
#[repr(C)]
@@ -1255,9 +1162,10 @@ enum SurfaceInputIndex {
}
#[repr(C)]
-enum PathRasterizationInputIndex {
+enum PathInputIndex {
Vertices = 0,
- AtlasTextureSize = 1,
+ ViewportSize = 1,
+ Sprites = 2,
}
#[derive(Clone, Debug, Eq, PartialEq)]
@@ -1265,7 +1173,6 @@ enum PathRasterizationInputIndex {
pub struct PathSprite {
pub bounds: Bounds<ScaledPixels>,
pub color: Background,
- pub tile: AtlasTile,
}
#[derive(Clone, Debug, Eq, PartialEq)]
@@ -2,14 +2,14 @@ use super::{
BoolExt, MacKeyboardLayout,
attributed_string::{NSAttributedString, NSMutableAttributedString},
events::key_to_native,
- is_macos_version_at_least, renderer, screen_capture,
+ renderer,
};
use crate::{
Action, AnyWindowHandle, BackgroundExecutor, ClipboardEntry, ClipboardItem, ClipboardString,
CursorStyle, ForegroundExecutor, Image, ImageFormat, KeyContext, Keymap, MacDispatcher,
MacDisplay, MacWindow, Menu, MenuItem, PathPromptOptions, Platform, PlatformDisplay,
- PlatformKeyboardLayout, PlatformTextSystem, PlatformWindow, Result, ScreenCaptureSource,
- SemanticVersion, Task, WindowAppearance, WindowParams, hash,
+ PlatformKeyboardLayout, PlatformTextSystem, PlatformWindow, Result, SemanticVersion, Task,
+ WindowAppearance, WindowParams, hash,
};
use anyhow::{Context as _, anyhow};
use block::ConcreteBlock;
@@ -22,8 +22,8 @@ use cocoa::{
},
base::{BOOL, NO, YES, id, nil, selector},
foundation::{
- NSArray, NSAutoreleasePool, NSBundle, NSData, NSInteger, NSOperatingSystemVersion,
- NSProcessInfo, NSRange, NSString, NSUInteger, NSURL,
+ NSArray, NSAutoreleasePool, NSBundle, NSData, NSInteger, NSProcessInfo, NSRange, NSString,
+ NSUInteger, NSURL,
},
};
use core_foundation::{
@@ -572,15 +572,17 @@ impl Platform for MacPlatform {
.collect()
}
+ #[cfg(feature = "screen-capture")]
fn is_screen_capture_supported(&self) -> bool {
- let min_version = NSOperatingSystemVersion::new(12, 3, 0);
- is_macos_version_at_least(min_version)
+ let min_version = cocoa::foundation::NSOperatingSystemVersion::new(12, 3, 0);
+ super::is_macos_version_at_least(min_version)
}
+ #[cfg(feature = "screen-capture")]
fn screen_capture_sources(
&self,
- ) -> oneshot::Receiver<Result<Vec<Box<dyn ScreenCaptureSource>>>> {
- screen_capture::get_sources()
+ ) -> oneshot::Receiver<Result<Vec<Box<dyn crate::ScreenCaptureSource>>>> {
+ super::screen_capture::get_sources()
}
fn active_window(&self) -> Option<AnyWindowHandle> {
@@ -698,76 +698,27 @@ fragment float4 polychrome_sprite_fragment(
return color;
}
-struct PathRasterizationVertexOutput {
+struct PathVertexOutput {
float4 position [[position]];
- float2 st_position;
- float clip_rect_distance [[clip_distance]][4];
-};
-
-struct PathRasterizationFragmentInput {
- float4 position [[position]];
- float2 st_position;
-};
-
-vertex PathRasterizationVertexOutput path_rasterization_vertex(
- uint vertex_id [[vertex_id]],
- constant PathVertex_ScaledPixels *vertices
- [[buffer(PathRasterizationInputIndex_Vertices)]],
- constant Size_DevicePixels *atlas_size
- [[buffer(PathRasterizationInputIndex_AtlasTextureSize)]]) {
- PathVertex_ScaledPixels v = vertices[vertex_id];
- float2 vertex_position = float2(v.xy_position.x, v.xy_position.y);
- float2 viewport_size = float2(atlas_size->width, atlas_size->height);
- return PathRasterizationVertexOutput{
- float4(vertex_position / viewport_size * float2(2., -2.) +
- float2(-1., 1.),
- 0., 1.),
- float2(v.st_position.x, v.st_position.y),
- {v.xy_position.x - v.content_mask.bounds.origin.x,
- v.content_mask.bounds.origin.x + v.content_mask.bounds.size.width -
- v.xy_position.x,
- v.xy_position.y - v.content_mask.bounds.origin.y,
- v.content_mask.bounds.origin.y + v.content_mask.bounds.size.height -
- v.xy_position.y}};
-}
-
-fragment float4 path_rasterization_fragment(PathRasterizationFragmentInput input
- [[stage_in]]) {
- float2 dx = dfdx(input.st_position);
- float2 dy = dfdy(input.st_position);
- float2 gradient = float2((2. * input.st_position.x) * dx.x - dx.y,
- (2. * input.st_position.x) * dy.x - dy.y);
- float f = (input.st_position.x * input.st_position.x) - input.st_position.y;
- float distance = f / length(gradient);
- float alpha = saturate(0.5 - distance);
- return float4(alpha, 0., 0., 1.);
-}
-
-struct PathSpriteVertexOutput {
- float4 position [[position]];
- float2 tile_position;
uint sprite_id [[flat]];
float4 solid_color [[flat]];
float4 color0 [[flat]];
float4 color1 [[flat]];
+ float4 clip_distance;
};
-vertex PathSpriteVertexOutput path_sprite_vertex(
- uint unit_vertex_id [[vertex_id]], uint sprite_id [[instance_id]],
- constant float2 *unit_vertices [[buffer(SpriteInputIndex_Vertices)]],
- constant PathSprite *sprites [[buffer(SpriteInputIndex_Sprites)]],
- constant Size_DevicePixels *viewport_size
- [[buffer(SpriteInputIndex_ViewportSize)]],
- constant Size_DevicePixels *atlas_size
- [[buffer(SpriteInputIndex_AtlasTextureSize)]]) {
-
- float2 unit_vertex = unit_vertices[unit_vertex_id];
+vertex PathVertexOutput path_vertex(
+ uint vertex_id [[vertex_id]],
+ constant PathVertex_ScaledPixels *vertices [[buffer(PathInputIndex_Vertices)]],
+ uint sprite_id [[instance_id]],
+ constant PathSprite *sprites [[buffer(PathInputIndex_Sprites)]],
+ constant Size_DevicePixels *input_viewport_size [[buffer(PathInputIndex_ViewportSize)]]) {
+ PathVertex_ScaledPixels v = vertices[vertex_id];
+ float2 vertex_position = float2(v.xy_position.x, v.xy_position.y);
+ float2 viewport_size = float2((float)input_viewport_size->width,
+ (float)input_viewport_size->height);
PathSprite sprite = sprites[sprite_id];
- // Don't apply content mask because it was already accounted for when
- // rasterizing the path.
- float4 device_position =
- to_device_position(unit_vertex, sprite.bounds, viewport_size);
- float2 tile_position = to_tile_position(unit_vertex, sprite.tile, atlas_size);
+ float4 device_position = float4(vertex_position / viewport_size * float2(2., -2.) + float2(-1., 1.), 0., 1.);
GradientColor gradient = prepare_fill_color(
sprite.color.tag,
@@ -777,30 +728,32 @@ vertex PathSpriteVertexOutput path_sprite_vertex(
sprite.color.colors[1].color
);
- return PathSpriteVertexOutput{
+ return PathVertexOutput{
device_position,
- tile_position,
sprite_id,
gradient.solid,
gradient.color0,
- gradient.color1
+ gradient.color1,
+ {v.xy_position.x - v.content_mask.bounds.origin.x,
+ v.content_mask.bounds.origin.x + v.content_mask.bounds.size.width -
+ v.xy_position.x,
+ v.xy_position.y - v.content_mask.bounds.origin.y,
+ v.content_mask.bounds.origin.y + v.content_mask.bounds.size.height -
+ v.xy_position.y}
};
}
-fragment float4 path_sprite_fragment(
- PathSpriteVertexOutput input [[stage_in]],
- constant PathSprite *sprites [[buffer(SpriteInputIndex_Sprites)]],
- texture2d<float> atlas_texture [[texture(SpriteInputIndex_AtlasTexture)]]) {
- constexpr sampler atlas_texture_sampler(mag_filter::linear,
- min_filter::linear);
- float4 sample =
- atlas_texture.sample(atlas_texture_sampler, input.tile_position);
- float mask = 1. - abs(1. - fmod(sample.r, 2.));
+fragment float4 path_fragment(
+ PathVertexOutput input [[stage_in]],
+ constant PathSprite *sprites [[buffer(PathInputIndex_Sprites)]]) {
+ if (any(input.clip_distance < float4(0.0))) {
+ return float4(0.0);
+ }
+
PathSprite sprite = sprites[input.sprite_id];
Background background = sprite.color;
float4 color = fill_color(background, input.position.xy, sprite.bounds,
input.solid_color, input.color0, input.color1);
- color.a *= mask;
return color;
}
@@ -263,10 +263,12 @@ impl Platform for TestPlatform {
Some(self.active_display.clone())
}
+ #[cfg(feature = "screen-capture")]
fn is_screen_capture_supported(&self) -> bool {
true
}
+ #[cfg(feature = "screen-capture")]
fn screen_capture_sources(
&self,
) -> oneshot::Receiver<Result<Vec<Box<dyn ScreenCaptureSource>>>> {
@@ -341,7 +341,7 @@ impl PlatformAtlas for TestAtlas {
crate::AtlasTile {
texture_id: AtlasTextureId {
index: texture_id,
- kind: crate::AtlasTextureKind::Path,
+ kind: crate::AtlasTextureKind::Polychrome,
},
tile_id: TileId(tile_id),
padding: 0,
@@ -93,7 +93,7 @@ pub(crate) fn handle_msg(
WM_IME_STARTCOMPOSITION => handle_ime_position(handle, state_ptr),
WM_IME_COMPOSITION => handle_ime_composition(handle, lparam, state_ptr),
WM_SETCURSOR => handle_set_cursor(handle, lparam, state_ptr),
- WM_SETTINGCHANGE => handle_system_settings_changed(handle, lparam, state_ptr),
+ WM_SETTINGCHANGE => handle_system_settings_changed(handle, wparam, lparam, state_ptr),
WM_INPUTLANGCHANGE => handle_input_language_changed(lparam, state_ptr),
WM_GPUI_CURSOR_STYLE_CHANGED => handle_cursor_changed(lparam, state_ptr),
_ => None,
@@ -466,12 +466,7 @@ fn handle_keyup_msg(
}
fn handle_char_msg(wparam: WPARAM, state_ptr: Rc<WindowsWindowStatePtr>) -> Option<isize> {
- let Some(input) = char::from_u32(wparam.0 as u32)
- .filter(|c| !c.is_control())
- .map(String::from)
- else {
- return Some(1);
- };
+ let input = parse_char_message(wparam, &state_ptr)?;
with_input_handler(&state_ptr, |input_handler| {
input_handler.replace_text_in_range(None, &input);
});
@@ -1152,37 +1147,23 @@ fn handle_set_cursor(
fn handle_system_settings_changed(
handle: HWND,
+ wparam: WPARAM,
lparam: LPARAM,
state_ptr: Rc<WindowsWindowStatePtr>,
) -> Option<isize> {
- let mut lock = state_ptr.state.borrow_mut();
- let display = lock.display;
- // system settings
- lock.system_settings.update(display);
- // mouse double click
- lock.click_state.system_update();
- // window border offset
- lock.border_offset.update(handle).log_err();
- drop(lock);
-
- // lParam is a pointer to a string that indicates the area containing the system parameter
- // that was changed.
- let parameter = PCWSTR::from_raw(lparam.0 as _);
- if unsafe { !parameter.is_null() && !parameter.is_empty() } {
- if let Some(parameter_string) = unsafe { parameter.to_string() }.log_err() {
- log::info!("System settings changed: {}", parameter_string);
- match parameter_string.as_str() {
- "ImmersiveColorSet" => {
- handle_system_theme_changed(handle, state_ptr);
- }
- _ => {}
- }
- }
- }
-
+ if wparam.0 != 0 {
+ let mut lock = state_ptr.state.borrow_mut();
+ let display = lock.display;
+ lock.system_settings.update(display, wparam.0);
+ lock.click_state.system_update(wparam.0);
+ lock.border_offset.update(handle).log_err();
+ } else {
+ handle_system_theme_changed(handle, lparam, state_ptr)?;
+ };
// Force to trigger WM_NCCALCSIZE event to ensure that we handle auto hide
// taskbar correctly.
notify_frame_changed(handle);
+
Some(0)
}
@@ -1199,17 +1180,34 @@ fn handle_system_command(wparam: WPARAM, state_ptr: Rc<WindowsWindowStatePtr>) -
fn handle_system_theme_changed(
handle: HWND,
+ lparam: LPARAM,
state_ptr: Rc<WindowsWindowStatePtr>,
) -> Option<isize> {
- let mut callback = state_ptr
- .state
- .borrow_mut()
- .callbacks
- .appearance_changed
- .take()?;
- callback();
- state_ptr.state.borrow_mut().callbacks.appearance_changed = Some(callback);
- configure_dwm_dark_mode(handle);
+ // lParam is a pointer to a string that indicates the area containing the system parameter
+ // that was changed.
+ let parameter = PCWSTR::from_raw(lparam.0 as _);
+ if unsafe { !parameter.is_null() && !parameter.is_empty() } {
+ if let Some(parameter_string) = unsafe { parameter.to_string() }.log_err() {
+ log::info!("System settings changed: {}", parameter_string);
+ match parameter_string.as_str() {
+ "ImmersiveColorSet" => {
+ let new_appearance = system_appearance()
+ .context("unable to get system appearance when handling ImmersiveColorSet")
+ .log_err()?;
+ let mut lock = state_ptr.state.borrow_mut();
+ if new_appearance != lock.appearance {
+ lock.appearance = new_appearance;
+ let mut callback = lock.callbacks.appearance_changed.take()?;
+ drop(lock);
+ callback();
+ state_ptr.state.borrow_mut().callbacks.appearance_changed = Some(callback);
+ configure_dwm_dark_mode(handle, new_appearance);
+ }
+ }
+ _ => {}
+ }
+ }
+ }
Some(0)
}
@@ -1225,6 +1223,36 @@ fn handle_input_language_changed(
Some(0)
}
+#[inline]
+fn parse_char_message(wparam: WPARAM, state_ptr: &Rc<WindowsWindowStatePtr>) -> Option<String> {
+ let code_point = wparam.loword();
+ let mut lock = state_ptr.state.borrow_mut();
+ // https://www.unicode.org/versions/Unicode16.0.0/core-spec/chapter-3/#G2630
+ match code_point {
+ 0xD800..=0xDBFF => {
+ // High surrogate, wait for low surrogate
+ lock.pending_surrogate = Some(code_point);
+ None
+ }
+ 0xDC00..=0xDFFF => {
+ if let Some(high_surrogate) = lock.pending_surrogate.take() {
+ // Low surrogate, combine with pending high surrogate
+ String::from_utf16(&[high_surrogate, code_point]).ok()
+ } else {
+ // Invalid low surrogate without a preceding high surrogate
+ log::warn!(
+ "Received low surrogate without a preceding high surrogate: {code_point:x}"
+ );
+ None
+ }
+ }
+ _ => {
+ lock.pending_surrogate = None;
+ String::from_utf16(&[code_point]).ok()
+ }
+ }
+}
+
#[inline]
fn translate_message(handle: HWND, wparam: WPARAM, lparam: LPARAM) {
let msg = MSG {
@@ -1267,6 +1295,10 @@ where
capslock: current_capslock(),
}))
}
+ VK_PACKET => {
+ translate_message(handle, wparam, lparam);
+ None
+ }
VK_CAPITAL => {
let capslock = current_capslock();
if state
@@ -432,10 +432,12 @@ impl Platform for WindowsPlatform {
WindowsDisplay::primary_monitor().map(|display| Rc::new(display) as Rc<dyn PlatformDisplay>)
}
+ #[cfg(feature = "screen-capture")]
fn is_screen_capture_supported(&self) -> bool {
false
}
+ #[cfg(feature = "screen-capture")]
fn screen_capture_sources(
&self,
) -> oneshot::Receiver<Result<Vec<Box<dyn ScreenCaptureSource>>>> {
@@ -32,14 +32,32 @@ pub(crate) struct MouseWheelSettings {
impl WindowsSystemSettings {
pub(crate) fn new(display: WindowsDisplay) -> Self {
let mut settings = Self::default();
- settings.update(display);
+ settings.init(display);
settings
}
- pub(crate) fn update(&mut self, display: WindowsDisplay) {
+ fn init(&mut self, display: WindowsDisplay) {
self.mouse_wheel_settings.update();
self.auto_hide_taskbar_position = AutoHideTaskbarPosition::new(display).log_err().flatten();
}
+
+ pub(crate) fn update(&mut self, display: WindowsDisplay, wparam: usize) {
+ match wparam {
+ // SPI_SETWORKAREA
+ 47 => self.update_taskbar_position(display),
+ // SPI_GETWHEELSCROLLLINES, SPI_GETWHEELSCROLLCHARS
+ 104 | 108 => self.update_mouse_wheel_settings(),
+ _ => {}
+ }
+ }
+
+ fn update_mouse_wheel_settings(&mut self) {
+ self.mouse_wheel_settings.update();
+ }
+
+ fn update_taskbar_position(&mut self, display: WindowsDisplay) {
+ self.auto_hide_taskbar_position = AutoHideTaskbarPosition::new(display).log_err().flatten();
+ }
}
impl MouseWheelSettings {
@@ -144,8 +144,8 @@ pub(crate) fn load_cursor(style: CursorStyle) -> Option<HCURSOR> {
}
/// This function is used to configure the dark mode for the window built-in title bar.
-pub(crate) fn configure_dwm_dark_mode(hwnd: HWND) {
- let dark_mode_enabled: BOOL = match system_appearance().log_err().unwrap_or_default() {
+pub(crate) fn configure_dwm_dark_mode(hwnd: HWND, appearance: WindowAppearance) {
+ let dark_mode_enabled: BOOL = match appearance {
WindowAppearance::Dark | WindowAppearance::VibrantDark => true.into(),
WindowAppearance::Light | WindowAppearance::VibrantLight => false.into(),
};
@@ -37,11 +37,13 @@ pub struct WindowsWindowState {
pub min_size: Option<Size<Pixels>>,
pub fullscreen_restore_bounds: Bounds<Pixels>,
pub border_offset: WindowBorderOffset,
+ pub appearance: WindowAppearance,
pub scale_factor: f32,
pub restore_from_minimized: Option<Box<dyn FnMut(RequestFrameOptions)>>,
pub callbacks: Callbacks,
pub input_handler: Option<PlatformInputHandler>,
+ pub pending_surrogate: Option<u16>,
pub last_reported_modifiers: Option<Modifiers>,
pub last_reported_capslock: Option<Capslock>,
pub system_key_handled: bool,
@@ -84,6 +86,7 @@ impl WindowsWindowState {
display: WindowsDisplay,
gpu_context: &BladeContext,
min_size: Option<Size<Pixels>>,
+ appearance: WindowAppearance,
) -> Result<Self> {
let scale_factor = {
let monitor_dpi = unsafe { GetDpiForWindow(hwnd) } as f32;
@@ -103,6 +106,7 @@ impl WindowsWindowState {
let renderer = windows_renderer::init(gpu_context, hwnd, transparent)?;
let callbacks = Callbacks::default();
let input_handler = None;
+ let pending_surrogate = None;
let last_reported_modifiers = None;
let last_reported_capslock = None;
let system_key_handled = false;
@@ -118,11 +122,13 @@ impl WindowsWindowState {
logical_size,
fullscreen_restore_bounds,
border_offset,
+ appearance,
scale_factor,
restore_from_minimized,
min_size,
callbacks,
input_handler,
+ pending_surrogate,
last_reported_modifiers,
last_reported_capslock,
system_key_handled,
@@ -206,6 +212,7 @@ impl WindowsWindowStatePtr {
context.display,
context.gpu_context,
context.min_size,
+ context.appearance,
)?);
Ok(Rc::new_cyclic(|this| Self {
@@ -338,6 +345,7 @@ struct WindowCreateContext<'a> {
main_receiver: flume::Receiver<Runnable>,
gpu_context: &'a BladeContext,
main_thread_id_win32: u32,
+ appearance: WindowAppearance,
}
impl WindowsWindow {
@@ -387,6 +395,7 @@ impl WindowsWindow {
} else {
WindowsDisplay::primary_monitor().unwrap()
};
+ let appearance = system_appearance().unwrap_or_default();
let mut context = WindowCreateContext {
inner: None,
handle,
@@ -403,6 +412,7 @@ impl WindowsWindow {
main_receiver,
gpu_context,
main_thread_id_win32,
+ appearance,
};
let lpparam = Some(&context as *const _ as *const _);
let creation_result = unsafe {
@@ -426,7 +436,7 @@ impl WindowsWindow {
let state_ptr = context.inner.take().unwrap()?;
let hwnd = creation_result?;
register_drag_drop(state_ptr.clone())?;
- configure_dwm_dark_mode(hwnd);
+ configure_dwm_dark_mode(hwnd, appearance);
state_ptr.state.borrow_mut().border_offset.update(hwnd)?;
let placement = retrieve_window_placement(
hwnd,
@@ -543,7 +553,7 @@ impl PlatformWindow for WindowsWindow {
}
fn appearance(&self) -> WindowAppearance {
- system_appearance().log_err().unwrap_or_default()
+ self.0.state.borrow().appearance
}
fn display(&self) -> Option<Rc<dyn PlatformDisplay>> {
@@ -951,7 +961,7 @@ impl IDropTarget_Impl for WindowsDragDropHandler_Impl {
}
}
-#[derive(Debug)]
+#[derive(Debug, Clone, Copy)]
pub(crate) struct ClickState {
button: MouseButton,
last_click: Instant,
@@ -993,10 +1003,25 @@ impl ClickState {
self.current_count
}
- pub fn system_update(&mut self) {
- self.double_click_spatial_tolerance_width = unsafe { GetSystemMetrics(SM_CXDOUBLECLK) };
- self.double_click_spatial_tolerance_height = unsafe { GetSystemMetrics(SM_CYDOUBLECLK) };
- self.double_click_interval = Duration::from_millis(unsafe { GetDoubleClickTime() } as u64);
+ pub fn system_update(&mut self, wparam: usize) {
+ match wparam {
+ // SPI_SETDOUBLECLKWIDTH
+ 29 => {
+ self.double_click_spatial_tolerance_width =
+ unsafe { GetSystemMetrics(SM_CXDOUBLECLK) }
+ }
+ // SPI_SETDOUBLECLKHEIGHT
+ 30 => {
+ self.double_click_spatial_tolerance_height =
+ unsafe { GetSystemMetrics(SM_CYDOUBLECLK) }
+ }
+ // SPI_SETDOUBLECLICKTIME
+ 32 => {
+ self.double_click_interval =
+ Duration::from_millis(unsafe { GetDoubleClickTime() } as u64)
+ }
+ _ => {}
+ }
}
#[inline]
@@ -1299,12 +1324,8 @@ mod windows_renderer {
size: Default::default(),
transparent,
};
- BladeRenderer::new(context, &raw, config).inspect_err(|err| {
- show_error(
- "Error: Zed failed to initialize BladeRenderer",
- err.to_string(),
- )
- })
+ BladeRenderer::new(context, &raw, config)
+ .inspect_err(|err| show_error("Failed to initialize BladeRenderer", err.to_string()))
}
struct RawWindow {
@@ -3,7 +3,7 @@
//! application to avoid having to import each trait individually.
pub use crate::{
- AppContext as _, BorrowAppContext, Context, Element, FocusableElement, InteractiveElement,
- IntoElement, ParentElement, Refineable, Render, RenderOnce, StatefulInteractiveElement, Styled,
- StyledImage, VisualContext, util::FluentBuilder,
+ AppContext as _, BorrowAppContext, Context, Element, InteractiveElement, IntoElement,
+ ParentElement, Refineable, Render, RenderOnce, StatefulInteractiveElement, Styled, StyledImage,
+ VisualContext, util::FluentBuilder,
};
@@ -6,7 +6,7 @@ use serde::{Deserialize, Serialize};
use crate::{
AtlasTextureId, AtlasTile, Background, Bounds, ContentMask, Corners, Edges, Hsla, Pixels,
- Point, Radians, ScaledPixels, Size, bounds_tree::BoundsTree, point,
+ Point, Radians, ScaledPixels, Size, bounds_tree::BoundsTree,
};
use std::{fmt::Debug, iter::Peekable, ops::Range, slice};
@@ -43,13 +43,7 @@ impl Scene {
self.surfaces.clear();
}
- #[cfg_attr(
- all(
- any(target_os = "linux", target_os = "freebsd"),
- not(any(feature = "x11", feature = "wayland"))
- ),
- allow(dead_code)
- )]
+ #[allow(dead_code)]
pub fn paths(&self) -> &[Path<ScaledPixels>] {
&self.paths
}
@@ -689,6 +683,7 @@ pub struct Path<P: Clone + Debug + Default + PartialEq> {
start: Point<P>,
current: Point<P>,
contour_count: usize,
+ base_scale: f32,
}
impl Path<Pixels> {
@@ -707,25 +702,35 @@ impl Path<Pixels> {
content_mask: Default::default(),
color: Default::default(),
contour_count: 0,
+ base_scale: 1.0,
}
}
- /// Scale this path by the given factor.
- pub fn scale(&self, factor: f32) -> Path<ScaledPixels> {
+ /// Set the base scale of the path.
+ pub fn scale(mut self, factor: f32) -> Self {
+ self.base_scale = factor;
+ self
+ }
+
+ /// Apply a scale to the path.
+ pub(crate) fn apply_scale(&self, factor: f32) -> Path<ScaledPixels> {
Path {
id: self.id,
order: self.order,
- bounds: self.bounds.scale(factor),
- content_mask: self.content_mask.scale(factor),
+ bounds: self.bounds.scale(self.base_scale * factor),
+ content_mask: self.content_mask.scale(self.base_scale * factor),
vertices: self
.vertices
.iter()
- .map(|vertex| vertex.scale(factor))
+ .map(|vertex| vertex.scale(self.base_scale * factor))
.collect(),
- start: self.start.map(|start| start.scale(factor)),
- current: self.current.scale(factor),
+ start: self
+ .start
+ .map(|start| start.scale(self.base_scale * factor)),
+ current: self.current.scale(self.base_scale * factor),
contour_count: self.contour_count,
color: self.color,
+ base_scale: 1.0,
}
}
@@ -740,10 +745,7 @@ impl Path<Pixels> {
pub fn line_to(&mut self, to: Point<Pixels>) {
self.contour_count += 1;
if self.contour_count > 1 {
- self.push_triangle(
- (self.start, self.current, to),
- (point(0., 1.), point(0., 1.), point(0., 1.)),
- );
+ self.push_triangle((self.start, self.current, to));
}
self.current = to;
}
@@ -752,25 +754,15 @@ impl Path<Pixels> {
pub fn curve_to(&mut self, to: Point<Pixels>, ctrl: Point<Pixels>) {
self.contour_count += 1;
if self.contour_count > 1 {
- self.push_triangle(
- (self.start, self.current, to),
- (point(0., 1.), point(0., 1.), point(0., 1.)),
- );
+ self.push_triangle((self.start, self.current, to));
}
- self.push_triangle(
- (self.current, ctrl, to),
- (point(0., 0.), point(0.5, 0.), point(1., 1.)),
- );
+ self.push_triangle((self.current, ctrl, to));
self.current = to;
}
/// Push a triangle to the Path.
- pub fn push_triangle(
- &mut self,
- xy: (Point<Pixels>, Point<Pixels>, Point<Pixels>),
- st: (Point<f32>, Point<f32>, Point<f32>),
- ) {
+ pub fn push_triangle(&mut self, xy: (Point<Pixels>, Point<Pixels>, Point<Pixels>)) {
self.bounds = self
.bounds
.union(&Bounds {
@@ -788,17 +780,14 @@ impl Path<Pixels> {
self.vertices.push(PathVertex {
xy_position: xy.0,
- st_position: st.0,
content_mask: Default::default(),
});
self.vertices.push(PathVertex {
xy_position: xy.1,
- st_position: st.1,
content_mask: Default::default(),
});
self.vertices.push(PathVertex {
xy_position: xy.2,
- st_position: st.2,
content_mask: Default::default(),
});
}
@@ -814,7 +803,6 @@ impl From<Path<ScaledPixels>> for Primitive {
#[repr(C)]
pub(crate) struct PathVertex<P: Clone + Debug + Default + PartialEq> {
pub(crate) xy_position: Point<P>,
- pub(crate) st_position: Point<f32>,
pub(crate) content_mask: ContentMask<P>,
}
@@ -822,7 +810,6 @@ impl PathVertex<Pixels> {
pub fn scale(&self, factor: f32) -> PathVertex<ScaledPixels> {
PathVertex {
xy_position: self.xy_position.scale(factor),
- st_position: self.st_position,
content_mask: self.content_mask.scale(factor),
}
}
@@ -2,7 +2,10 @@ use derive_more::{Deref, DerefMut};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use std::{borrow::Borrow, sync::Arc};
+use std::{
+ borrow::{Borrow, Cow},
+ sync::Arc,
+};
use util::arc_cow::ArcCow;
/// A shared string is an immutable string that can be cheaply cloned in GPUI
@@ -23,12 +26,16 @@ impl SharedString {
}
impl JsonSchema for SharedString {
- fn schema_name() -> String {
+ fn inline_schema() -> bool {
+ String::inline_schema()
+ }
+
+ fn schema_name() -> Cow<'static, str> {
String::schema_name()
}
- fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
- String::json_schema(r#gen)
+ fn json_schema(generator: &mut schemars::SchemaGenerator) -> schemars::Schema {
+ String::json_schema(generator)
}
}
@@ -1,6 +1,7 @@
+use std::borrow::Cow;
use std::sync::Arc;
-use schemars::schema::{InstanceType, SchemaObject};
+use schemars::{JsonSchema, json_schema};
/// The OpenType features that can be configured for a given font.
#[derive(Default, Clone, Eq, PartialEq, Hash)]
@@ -128,36 +129,23 @@ impl serde::Serialize for FontFeatures {
}
}
-impl schemars::JsonSchema for FontFeatures {
- fn schema_name() -> String {
+impl JsonSchema for FontFeatures {
+ fn schema_name() -> Cow<'static, str> {
"FontFeatures".into()
}
- fn json_schema(_: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
- let mut schema = SchemaObject::default();
- schema.instance_type = Some(schemars::schema::SingleOrVec::Single(Box::new(
- InstanceType::Object,
- )));
- {
- let mut property = SchemaObject {
- instance_type: Some(schemars::schema::SingleOrVec::Vec(vec![
- InstanceType::Boolean,
- InstanceType::Integer,
- ])),
- ..Default::default()
- };
-
- {
- let mut number_constraints = property.number();
- number_constraints.multiple_of = Some(1.0);
- number_constraints.minimum = Some(0.0);
- }
- schema
- .object()
- .pattern_properties
- .insert("[0-9a-zA-Z]{4}$".into(), property.into());
- }
- schema.into()
+ fn json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema {
+ json_schema!({
+ "type": "object",
+ "patternProperties": {
+ "[0-9a-zA-Z]{4}$": {
+ "type": ["boolean", "integer"],
+ "minimum": 0,
+ "multipleOf": 1
+ }
+ },
+ "additionalProperties": false
+ })
}
}
@@ -2633,7 +2633,7 @@ impl Window {
path.color = color.opacity(opacity);
self.next_frame
.scene
- .insert_primitive(path.scale(scale_factor));
+ .insert_primitive(path.apply_scale(scale_factor));
}
/// Paint an underline into the scene for the next frame at the current z-index.
@@ -16,9 +16,11 @@ fn test_action_macros() {
#[derive(PartialEq, Clone, Deserialize, JsonSchema, Action)]
#[action(namespace = test_only)]
- struct AnotherSomeAction;
+ #[serde(deny_unknown_fields)]
+ struct AnotherAction;
#[derive(PartialEq, Clone, gpui::private::serde_derive::Deserialize)]
+ #[serde(deny_unknown_fields)]
struct RegisterableAction {}
register_action!(RegisterableAction);
@@ -14,6 +14,7 @@ pub(crate) fn derive_action(input: TokenStream) -> TokenStream {
let mut no_register = false;
let mut namespace = None;
let mut deprecated = None;
+ let mut doc_str: Option<String> = None;
for attr in &input.attrs {
if attr.path().is_ident("action") {
@@ -74,6 +75,22 @@ pub(crate) fn derive_action(input: TokenStream) -> TokenStream {
Ok(())
})
.unwrap_or_else(|e| panic!("in #[action] attribute: {}", e));
+ } else if attr.path().is_ident("doc") {
+ use syn::{Expr::Lit, ExprLit, Lit::Str, Meta, MetaNameValue};
+ if let Meta::NameValue(MetaNameValue {
+ value:
+ Lit(ExprLit {
+ lit: Str(ref lit_str),
+ ..
+ }),
+ ..
+ }) = attr.meta
+ {
+ let doc = lit_str.value();
+ let doc_str = doc_str.get_or_insert_default();
+ doc_str.push_str(doc.trim());
+ doc_str.push('\n');
+ }
}
}
@@ -122,6 +139,13 @@ pub(crate) fn derive_action(input: TokenStream) -> TokenStream {
quote! { None }
};
+ let documentation_fn_body = if let Some(doc) = doc_str {
+ let doc = doc.trim();
+ quote! { Some(#doc) }
+ } else {
+ quote! { None }
+ };
+
let registration = if no_register {
quote! {}
} else {
@@ -159,8 +183,8 @@ pub(crate) fn derive_action(input: TokenStream) -> TokenStream {
}
fn action_json_schema(
- _generator: &mut gpui::private::schemars::r#gen::SchemaGenerator,
- ) -> Option<gpui::private::schemars::schema::Schema> {
+ _generator: &mut gpui::private::schemars::SchemaGenerator,
+ ) -> Option<gpui::private::schemars::Schema> {
#json_schema_fn_body
}
@@ -171,6 +195,10 @@ pub(crate) fn derive_action(input: TokenStream) -> TokenStream {
fn deprecation_message() -> Option<&'static str> {
#deprecation_fn_body
}
+
+ fn documentation() -> Option<&'static str> {
+ #documentation_fn_body
+ }
}
})
}
@@ -34,6 +34,7 @@ pub(crate) fn generate_register_action(type_name: &Ident) -> TokenStream2 {
json_schema: <#type_name as gpui::Action>::action_json_schema,
deprecated_aliases: <#type_name as gpui::Action>::deprecated_aliases(),
deprecation_message: <#type_name as gpui::Action>::deprecation_message(),
+ documentation: <#type_name as gpui::Action>::documentation(),
}
}
@@ -424,7 +424,22 @@ pub fn box_shadow_style_methods(input: TokenStream) -> TokenStream {
/// Sets the box shadow of the element.
/// [Docs](https://tailwindcss.com/docs/box-shadow)
- #visibility fn shadow_sm(mut self) -> Self {
+ #visibility fn shadow_2xs(mut self) -> Self {
+ use gpui::{BoxShadow, hsla, point, px};
+ use std::vec;
+
+ self.style().box_shadow = Some(vec![BoxShadow {
+ color: hsla(0., 0., 0., 0.05),
+ offset: point(px(0.), px(1.)),
+ blur_radius: px(0.),
+ spread_radius: px(0.),
+ }]);
+ self
+ }
+
+ /// Sets the box shadow of the element.
+ /// [Docs](https://tailwindcss.com/docs/box-shadow)
+ #visibility fn shadow_xs(mut self) -> Self {
use gpui::{BoxShadow, hsla, point, px};
use std::vec;
@@ -437,6 +452,29 @@ pub fn box_shadow_style_methods(input: TokenStream) -> TokenStream {
self
}
+ /// Sets the box shadow of the element.
+ /// [Docs](https://tailwindcss.com/docs/box-shadow)
+ #visibility fn shadow_sm(mut self) -> Self {
+ use gpui::{BoxShadow, hsla, point, px};
+ use std::vec;
+
+ self.style().box_shadow = Some(vec![
+ BoxShadow {
+ color: hsla(0., 0., 0., 0.1),
+ offset: point(px(0.), px(1.)),
+ blur_radius: px(3.),
+ spread_radius: px(0.),
+ },
+ BoxShadow {
+ color: hsla(0., 0., 0., 0.1),
+ offset: point(px(0.), px(1.)),
+ blur_radius: px(2.),
+ spread_radius: px(-1.),
+ }
+ ]);
+ self
+ }
+
/// Sets the box shadow of the element.
/// [Docs](https://tailwindcss.com/docs/box-shadow)
#visibility fn shadow_md(mut self) -> Self {
@@ -445,7 +483,7 @@ pub fn box_shadow_style_methods(input: TokenStream) -> TokenStream {
self.style().box_shadow = Some(vec![
BoxShadow {
- color: hsla(0.5, 0., 0., 0.1),
+ color: hsla(0., 0., 0., 0.1),
offset: point(px(0.), px(4.)),
blur_radius: px(6.),
spread_radius: px(-1.),
@@ -37,7 +37,13 @@ use zed_actions::OpenBrowser;
use zed_llm_client::UsageLimit;
use zeta::RateCompletions;
-actions!(edit_prediction, [ToggleMenu]);
+actions!(
+ edit_prediction,
+ [
+ /// Toggles the inline completion menu.
+ ToggleMenu
+ ]
+);
const COPILOT_SETTINGS_URL: &str = "https://github.com/settings/copilot";
@@ -967,6 +973,7 @@ fn toggle_show_inline_completions_for_language(
all_language_settings(None, cx).show_edit_predictions(Some(&language), cx);
update_settings_file::<AllLanguageSettings>(fs, cx, move |file, _| {
file.languages
+ .0
.entry(language.name())
.or_default()
.show_edit_predictions = Some(!show_edit_predictions);
@@ -8,7 +8,15 @@ use util::ResultExt;
use workspace::notifications::{DetachAndPromptErr, NotificationId};
use workspace::{Toast, Workspace};
-actions!(cli, [Install, RegisterZedScheme]);
+actions!(
+ cli,
+ [
+ /// Installs the Zed CLI tool to the system PATH.
+ Install,
+ /// Registers the zed:// URL scheme handler.
+ RegisterZedScheme
+ ]
+);
async fn install_script(cx: &AsyncApp) -> Result<PathBuf> {
let cli_path = cx.update(|cx| cx.path_for_auxiliary_executable("cli"))??;
@@ -13,7 +13,13 @@ use std::{
};
use workspace::{AppState, OpenVisible, Workspace};
-actions!(journal, [NewJournalEntry]);
+actions!(
+ journal,
+ [
+ /// Creates a new journal entry for today.
+ NewJournalEntry
+ ]
+);
/// Settings specific to journaling
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
@@ -39,6 +39,7 @@ globset.workspace = true
gpui.workspace = true
http_client.workspace = true
imara-diff.workspace = true
+inventory.workspace = true
itertools.workspace = true
log.workspace = true
lsp.workspace = true
@@ -2006,7 +2006,7 @@ fn test_autoindent_language_without_indents_query(cx: &mut App) {
#[gpui::test]
fn test_autoindent_with_injected_languages(cx: &mut App) {
init_settings(cx, |settings| {
- settings.languages.extend([
+ settings.languages.0.extend([
(
"HTML".into(),
LanguageSettingsContent {
@@ -39,11 +39,7 @@ use lsp::{CodeActionKind, InitializeParams, LanguageServerBinary, LanguageServer
pub use manifest::{ManifestDelegate, ManifestName, ManifestProvider, ManifestQuery};
use parking_lot::Mutex;
use regex::Regex;
-use schemars::{
- JsonSchema,
- r#gen::SchemaGenerator,
- schema::{InstanceType, Schema, SchemaObject},
-};
+use schemars::{JsonSchema, SchemaGenerator, json_schema};
use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
use serde_json::Value;
use settings::WorktreeId;
@@ -694,7 +690,6 @@ pub struct LanguageConfig {
pub matcher: LanguageMatcher,
/// List of bracket types in a language.
#[serde(default)]
- #[schemars(schema_with = "bracket_pair_config_json_schema")]
pub brackets: BracketPairConfig,
/// If set to true, auto indentation uses last non empty line to determine
/// the indentation level for a new line.
@@ -735,6 +730,13 @@ pub struct LanguageConfig {
/// Starting and closing characters of a block comment.
#[serde(default)]
pub block_comment: Option<(Arc<str>, Arc<str>)>,
+ /// A list of additional regex patterns that should be treated as prefixes
+ /// for creating boundaries during rewrapping, ensuring content from one
+ /// prefixed section doesn't merge with another (e.g., markdown list items).
+ /// By default, Zed treats as paragraph and comment prefixes as boundaries.
+ #[serde(default, deserialize_with = "deserialize_regex_vec")]
+ #[schemars(schema_with = "regex_vec_json_schema")]
+ pub rewrap_prefixes: Vec<Regex>,
/// A list of language servers that are allowed to run on subranges of a given language.
#[serde(default)]
pub scope_opt_in_language_servers: Vec<LanguageServerName>,
@@ -914,6 +916,7 @@ impl Default for LanguageConfig {
autoclose_before: Default::default(),
line_comments: Default::default(),
block_comment: Default::default(),
+ rewrap_prefixes: Default::default(),
scope_opt_in_language_servers: Default::default(),
overrides: Default::default(),
word_characters: Default::default(),
@@ -944,10 +947,9 @@ fn deserialize_regex<'de, D: Deserializer<'de>>(d: D) -> Result<Option<Regex>, D
}
}
-fn regex_json_schema(_: &mut SchemaGenerator) -> Schema {
- Schema::Object(SchemaObject {
- instance_type: Some(InstanceType::String.into()),
- ..Default::default()
+fn regex_json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema {
+ json_schema!({
+ "type": "string"
})
}
@@ -961,6 +963,22 @@ where
}
}
+fn deserialize_regex_vec<'de, D: Deserializer<'de>>(d: D) -> Result<Vec<Regex>, D::Error> {
+ let sources = Vec::<String>::deserialize(d)?;
+ let mut regexes = Vec::new();
+ for source in sources {
+ regexes.push(regex::Regex::new(&source).map_err(de::Error::custom)?);
+ }
+ Ok(regexes)
+}
+
+fn regex_vec_json_schema(_: &mut SchemaGenerator) -> schemars::Schema {
+ json_schema!({
+ "type": "array",
+ "items": { "type": "string" }
+ })
+}
+
#[doc(hidden)]
#[cfg(any(test, feature = "test-support"))]
pub struct FakeLspAdapter {
@@ -988,12 +1006,12 @@ pub struct FakeLspAdapter {
/// This struct includes settings for defining which pairs of characters are considered brackets and
/// also specifies any language-specific scopes where these pairs should be ignored for bracket matching purposes.
#[derive(Clone, Debug, Default, JsonSchema)]
+#[schemars(with = "Vec::<BracketPairContent>")]
pub struct BracketPairConfig {
/// A list of character pairs that should be treated as brackets in the context of a given language.
pub pairs: Vec<BracketPair>,
/// A list of tree-sitter scopes for which a given bracket should not be active.
/// N-th entry in `[Self::disabled_scopes_by_bracket_ix]` contains a list of disabled scopes for an n-th entry in `[Self::pairs]`
- #[serde(skip)]
pub disabled_scopes_by_bracket_ix: Vec<Vec<String>>,
}
@@ -1003,10 +1021,6 @@ impl BracketPairConfig {
}
}
-fn bracket_pair_config_json_schema(r#gen: &mut SchemaGenerator) -> Schema {
- Option::<Vec<BracketPairContent>>::json_schema(r#gen)
-}
-
#[derive(Deserialize, JsonSchema)]
pub struct BracketPairContent {
#[serde(flatten)]
@@ -1841,6 +1855,14 @@ impl LanguageScope {
.map(|e| (&e.0, &e.1))
}
+ /// Returns additional regex patterns that act as prefix markers for creating
+ /// boundaries during rewrapping.
+ ///
+ /// By default, Zed treats as paragraph and comment prefixes as boundaries.
+ pub fn rewrap_prefixes(&self) -> &[Regex] {
+ &self.language.config.rewrap_prefixes
+ }
+
/// Returns a list of language-specific word characters.
///
/// By default, Zed treats alphanumeric characters (and '_') as word characters for
@@ -1170,7 +1170,7 @@ impl LanguageRegistryState {
if let Some(theme) = self.theme.as_ref() {
language.set_theme(theme.syntax());
}
- self.language_settings.languages.insert(
+ self.language_settings.languages.0.insert(
language.name(),
LanguageSettingsContent {
tab_size: language.config.tab_size,
@@ -3,7 +3,6 @@
use crate::{File, Language, LanguageName, LanguageServerName};
use anyhow::Result;
use collections::{FxHashMap, HashMap, HashSet};
-use core::slice;
use ec4rs::{
Properties as EditorconfigProperties,
property::{FinalNewline, IndentSize, IndentStyle, TabWidth, TrimTrailingWs},
@@ -11,20 +10,18 @@ use ec4rs::{
use globset::{Glob, GlobMatcher, GlobSet, GlobSetBuilder};
use gpui::{App, Modifiers};
use itertools::{Either, Itertools};
-use schemars::{
- JsonSchema,
- schema::{InstanceType, ObjectValidation, Schema, SchemaObject, SingleOrVec},
-};
+use schemars::{JsonSchema, json_schema};
use serde::{
Deserialize, Deserializer, Serialize,
de::{self, IntoDeserializer, MapAccess, SeqAccess, Visitor},
};
-use serde_json::Value;
+
use settings::{
- Settings, SettingsLocation, SettingsSources, SettingsStore, add_references_to_properties,
+ ParameterizedJsonSchema, Settings, SettingsLocation, SettingsSources, SettingsStore,
};
use shellexpand;
-use std::{borrow::Cow, num::NonZeroU32, path::Path, sync::Arc};
+use std::{borrow::Cow, num::NonZeroU32, path::Path, slice, sync::Arc};
+use util::schemars::replace_subschema;
use util::serde::default_true;
/// Initializes the language settings.
@@ -306,13 +303,41 @@ pub struct AllLanguageSettingsContent {
pub defaults: LanguageSettingsContent,
/// The settings for individual languages.
#[serde(default)]
- pub languages: HashMap<LanguageName, LanguageSettingsContent>,
+ pub languages: LanguageToSettingsMap,
/// Settings for associating file extensions and filenames
/// with languages.
#[serde(default)]
pub file_types: HashMap<Arc<str>, Vec<String>>,
}
+/// Map from language name to settings. Its `ParameterizedJsonSchema` allows only known language
+/// names in the keys.
+#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize, JsonSchema)]
+pub struct LanguageToSettingsMap(pub HashMap<LanguageName, LanguageSettingsContent>);
+
+inventory::submit! {
+ ParameterizedJsonSchema {
+ add_and_get_ref: |generator, params, _cx| {
+ let language_settings_content_ref = generator
+ .subschema_for::<LanguageSettingsContent>()
+ .to_value();
+ replace_subschema::<LanguageToSettingsMap>(generator, || json_schema!({
+ "type": "object",
+ "properties": params
+ .language_names
+ .iter()
+ .map(|name| {
+ (
+ name.clone(),
+ language_settings_content_ref.clone(),
+ )
+ })
+ .collect::<serde_json::Map<_, _>>()
+ }))
+ }
+ }
+}
+
/// Controls how completions are processed for this language.
#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
#[serde(rename_all = "snake_case")]
@@ -384,7 +409,6 @@ fn default_lsp_fetch_timeout_ms() -> u64 {
/// The settings for a particular language.
#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize, JsonSchema)]
-#[schemars(deny_unknown_fields)]
pub struct LanguageSettingsContent {
/// How many columns a tab should occupy.
///
@@ -652,41 +676,26 @@ pub enum FormatOnSave {
}
impl JsonSchema for FormatOnSave {
- fn schema_name() -> String {
+ fn schema_name() -> Cow<'static, str> {
"OnSaveFormatter".into()
}
- fn json_schema(generator: &mut schemars::r#gen::SchemaGenerator) -> Schema {
- let mut schema = SchemaObject::default();
+ fn json_schema(generator: &mut schemars::SchemaGenerator) -> schemars::Schema {
let formatter_schema = Formatter::json_schema(generator);
- schema.instance_type = Some(
- vec![
- InstanceType::Object,
- InstanceType::String,
- InstanceType::Array,
- ]
- .into(),
- );
-
- let valid_raw_values = SchemaObject {
- enum_values: Some(vec![
- Value::String("on".into()),
- Value::String("off".into()),
- Value::String("prettier".into()),
- Value::String("language_server".into()),
- ]),
- ..Default::default()
- };
- let mut nested_values = SchemaObject::default();
-
- nested_values.array().items = Some(formatter_schema.clone().into());
- schema.subschemas().any_of = Some(vec![
- nested_values.into(),
- valid_raw_values.into(),
- formatter_schema,
- ]);
- schema.into()
+ json_schema!({
+ "oneOf": [
+ {
+ "type": "array",
+ "items": formatter_schema
+ },
+ {
+ "type": "string",
+ "enum": ["on", "off", "language_server"]
+ },
+ formatter_schema
+ ]
+ })
}
}
@@ -725,8 +734,8 @@ impl<'de> Deserialize<'de> for FormatOnSave {
} else if v == "off" {
Ok(Self::Value::Off)
} else if v == "language_server" {
- Ok(Self::Value::List(FormatterList(
- Formatter::LanguageServer { name: None }.into(),
+ Ok(Self::Value::List(FormatterList::Single(
+ Formatter::LanguageServer { name: None },
)))
} else {
let ret: Result<FormatterList, _> =
@@ -787,41 +796,26 @@ pub enum SelectedFormatter {
}
impl JsonSchema for SelectedFormatter {
- fn schema_name() -> String {
+ fn schema_name() -> Cow<'static, str> {
"Formatter".into()
}
- fn json_schema(generator: &mut schemars::r#gen::SchemaGenerator) -> Schema {
- let mut schema = SchemaObject::default();
+ fn json_schema(generator: &mut schemars::SchemaGenerator) -> schemars::Schema {
let formatter_schema = Formatter::json_schema(generator);
- schema.instance_type = Some(
- vec![
- InstanceType::Object,
- InstanceType::String,
- InstanceType::Array,
- ]
- .into(),
- );
-
- let valid_raw_values = SchemaObject {
- enum_values: Some(vec![
- Value::String("auto".into()),
- Value::String("prettier".into()),
- Value::String("language_server".into()),
- ]),
- ..Default::default()
- };
-
- let mut nested_values = SchemaObject::default();
-
- nested_values.array().items = Some(formatter_schema.clone().into());
- schema.subschemas().any_of = Some(vec![
- nested_values.into(),
- valid_raw_values.into(),
- formatter_schema,
- ]);
- schema.into()
+ json_schema!({
+ "oneOf": [
+ {
+ "type": "array",
+ "items": formatter_schema
+ },
+ {
+ "type": "string",
+ "enum": ["auto", "language_server"]
+ },
+ formatter_schema
+ ]
+ })
}
}
@@ -836,6 +830,7 @@ impl Serialize for SelectedFormatter {
}
}
}
+
impl<'de> Deserialize<'de> for SelectedFormatter {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
@@ -856,8 +851,8 @@ impl<'de> Deserialize<'de> for SelectedFormatter {
if v == "auto" {
Ok(Self::Value::Auto)
} else if v == "language_server" {
- Ok(Self::Value::List(FormatterList(
- Formatter::LanguageServer { name: None }.into(),
+ Ok(Self::Value::List(FormatterList::Single(
+ Formatter::LanguageServer { name: None },
)))
} else {
let ret: Result<FormatterList, _> =
@@ -885,16 +880,20 @@ impl<'de> Deserialize<'de> for SelectedFormatter {
deserializer.deserialize_any(FormatDeserializer)
}
}
-/// Controls which formatter should be used when formatting code.
+
+/// Controls which formatters should be used when formatting code.
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
-#[serde(rename_all = "snake_case", transparent)]
-pub struct FormatterList(pub SingleOrVec<Formatter>);
+#[serde(untagged)]
+pub enum FormatterList {
+ Single(Formatter),
+ Vec(Vec<Formatter>),
+}
impl AsRef<[Formatter]> for FormatterList {
fn as_ref(&self) -> &[Formatter] {
- match &self.0 {
- SingleOrVec::Single(single) => slice::from_ref(single),
- SingleOrVec::Vec(v) => v,
+ match &self {
+ Self::Single(single) => slice::from_ref(single),
+ Self::Vec(v) => v,
}
}
}
@@ -1209,7 +1208,7 @@ impl settings::Settings for AllLanguageSettings {
serde_json::from_value(serde_json::to_value(&default_value.defaults)?)?;
let mut languages = HashMap::default();
- for (language_name, settings) in &default_value.languages {
+ for (language_name, settings) in &default_value.languages.0 {
let mut language_settings = defaults.clone();
merge_settings(&mut language_settings, settings);
languages.insert(language_name.clone(), language_settings);
@@ -1310,7 +1309,7 @@ impl settings::Settings for AllLanguageSettings {
}
// A user's language-specific settings override default language-specific settings.
- for (language_name, user_language_settings) in &user_settings.languages {
+ for (language_name, user_language_settings) in &user_settings.languages.0 {
merge_settings(
languages
.entry(language_name.clone())
@@ -1366,51 +1365,6 @@ impl settings::Settings for AllLanguageSettings {
})
}
- fn json_schema(
- generator: &mut schemars::r#gen::SchemaGenerator,
- params: &settings::SettingsJsonSchemaParams,
- _: &App,
- ) -> schemars::schema::RootSchema {
- let mut root_schema = generator.root_schema_for::<Self::FileContent>();
-
- // Create a schema for a 'languages overrides' object, associating editor
- // settings with specific languages.
- assert!(
- root_schema
- .definitions
- .contains_key("LanguageSettingsContent")
- );
-
- let languages_object_schema = SchemaObject {
- instance_type: Some(InstanceType::Object.into()),
- object: Some(Box::new(ObjectValidation {
- properties: params
- .language_names
- .iter()
- .map(|name| {
- (
- name.clone(),
- Schema::new_ref("#/definitions/LanguageSettingsContent".into()),
- )
- })
- .collect(),
- ..Default::default()
- })),
- ..Default::default()
- };
-
- root_schema
- .definitions
- .extend([("Languages".into(), languages_object_schema.into())]);
-
- add_references_to_properties(
- &mut root_schema,
- &[("languages", "#/definitions/Languages")],
- );
-
- root_schema
- }
-
fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut Self::FileContent) {
let d = &mut current.defaults;
if let Some(size) = vscode
@@ -1674,29 +1628,26 @@ mod tests {
let settings: LanguageSettingsContent = serde_json::from_str(raw).unwrap();
assert_eq!(
settings.formatter,
- Some(SelectedFormatter::List(FormatterList(
- Formatter::LanguageServer { name: None }.into()
+ Some(SelectedFormatter::List(FormatterList::Single(
+ Formatter::LanguageServer { name: None }
)))
);
let raw = "{\"formatter\": [{\"language_server\": {\"name\": null}}]}";
let settings: LanguageSettingsContent = serde_json::from_str(raw).unwrap();
assert_eq!(
settings.formatter,
- Some(SelectedFormatter::List(FormatterList(
- vec![Formatter::LanguageServer { name: None }].into()
- )))
+ Some(SelectedFormatter::List(FormatterList::Vec(vec![
+ Formatter::LanguageServer { name: None }
+ ])))
);
let raw = "{\"formatter\": [{\"language_server\": {\"name\": null}}, \"prettier\"]}";
let settings: LanguageSettingsContent = serde_json::from_str(raw).unwrap();
assert_eq!(
settings.formatter,
- Some(SelectedFormatter::List(FormatterList(
- vec![
- Formatter::LanguageServer { name: None },
- Formatter::Prettier
- ]
- .into()
- )))
+ Some(SelectedFormatter::List(FormatterList::Vec(vec![
+ Formatter::LanguageServer { name: None },
+ Formatter::Prettier
+ ])))
);
}
@@ -9,17 +9,18 @@ mod telemetry;
pub mod fake_provider;
use anthropic::{AnthropicError, parse_prompt_too_long};
-use anyhow::Result;
+use anyhow::{Result, anyhow};
use client::Client;
use futures::FutureExt;
use futures::{StreamExt, future::BoxFuture, stream::BoxStream};
use gpui::{AnyElement, AnyView, App, AsyncApp, SharedString, Task, Window};
-use http_client::http;
+use http_client::{StatusCode, http};
use icons::IconName;
use parking_lot::Mutex;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize, de::DeserializeOwned};
use std::ops::{Add, Sub};
+use std::str::FromStr;
use std::sync::Arc;
use std::time::Duration;
use std::{fmt, io};
@@ -34,11 +35,22 @@ pub use crate::request::*;
pub use crate::role::*;
pub use crate::telemetry::*;
-pub const ZED_CLOUD_PROVIDER_ID: &str = "zed.dev";
+pub const ANTHROPIC_PROVIDER_ID: LanguageModelProviderId =
+ LanguageModelProviderId::new("anthropic");
+pub const ANTHROPIC_PROVIDER_NAME: LanguageModelProviderName =
+ LanguageModelProviderName::new("Anthropic");
-/// If we get a rate limit error that doesn't tell us when we can retry,
-/// default to waiting this long before retrying.
-const DEFAULT_RATE_LIMIT_RETRY_AFTER: Duration = Duration::from_secs(4);
+pub const GOOGLE_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("google");
+pub const GOOGLE_PROVIDER_NAME: LanguageModelProviderName =
+ LanguageModelProviderName::new("Google AI");
+
+pub const OPEN_AI_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("openai");
+pub const OPEN_AI_PROVIDER_NAME: LanguageModelProviderName =
+ LanguageModelProviderName::new("OpenAI");
+
+pub const ZED_CLOUD_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("zed.dev");
+pub const ZED_CLOUD_PROVIDER_NAME: LanguageModelProviderName =
+ LanguageModelProviderName::new("Zed");
pub fn init(client: Arc<Client>, cx: &mut App) {
init_settings(cx);
@@ -71,6 +83,12 @@ pub enum LanguageModelCompletionEvent {
data: String,
},
ToolUse(LanguageModelToolUse),
+ ToolUseJsonParseError {
+ id: LanguageModelToolUseId,
+ tool_name: Arc<str>,
+ raw_input: Arc<str>,
+ json_parse_error: String,
+ },
StartMessage {
message_id: String,
},
@@ -79,61 +97,179 @@ pub enum LanguageModelCompletionEvent {
#[derive(Error, Debug)]
pub enum LanguageModelCompletionError {
- #[error("rate limit exceeded, retry after {retry_after:?}")]
- RateLimitExceeded { retry_after: Duration },
- #[error("received bad input JSON")]
- BadInputJson {
- id: LanguageModelToolUseId,
- tool_name: Arc<str>,
- raw_input: Arc<str>,
- json_parse_error: String,
+ #[error("prompt too large for context window")]
+ PromptTooLarge { tokens: Option<u64> },
+ #[error("missing {provider} API key")]
+ NoApiKey { provider: LanguageModelProviderName },
+ #[error("{provider}'s API rate limit exceeded")]
+ RateLimitExceeded {
+ provider: LanguageModelProviderName,
+ retry_after: Option<Duration>,
+ },
+ #[error("{provider}'s API servers are overloaded right now")]
+ ServerOverloaded {
+ provider: LanguageModelProviderName,
+ retry_after: Option<Duration>,
+ },
+ #[error("{provider}'s API server reported an internal server error: {message}")]
+ ApiInternalServerError {
+ provider: LanguageModelProviderName,
+ message: String,
+ },
+ #[error("HTTP response error from {provider}'s API: status {status_code} - {message:?}")]
+ HttpResponseError {
+ provider: LanguageModelProviderName,
+ status_code: StatusCode,
+ message: String,
+ },
+
+ // Client errors
+ #[error("invalid request format to {provider}'s API: {message}")]
+ BadRequestFormat {
+ provider: LanguageModelProviderName,
+ message: String,
},
- #[error("language model provider's API is overloaded")]
- Overloaded,
+ #[error("authentication error with {provider}'s API: {message}")]
+ AuthenticationError {
+ provider: LanguageModelProviderName,
+ message: String,
+ },
+ #[error("permission error with {provider}'s API: {message}")]
+ PermissionError {
+ provider: LanguageModelProviderName,
+ message: String,
+ },
+ #[error("language model provider API endpoint not found")]
+ ApiEndpointNotFound { provider: LanguageModelProviderName },
+ #[error("I/O error reading response from {provider}'s API")]
+ ApiReadResponseError {
+ provider: LanguageModelProviderName,
+ #[source]
+ error: io::Error,
+ },
+ #[error("error serializing request to {provider} API")]
+ SerializeRequest {
+ provider: LanguageModelProviderName,
+ #[source]
+ error: serde_json::Error,
+ },
+ #[error("error building request body to {provider} API")]
+ BuildRequestBody {
+ provider: LanguageModelProviderName,
+ #[source]
+ error: http::Error,
+ },
+ #[error("error sending HTTP request to {provider} API")]
+ HttpSend {
+ provider: LanguageModelProviderName,
+ #[source]
+ error: anyhow::Error,
+ },
+ #[error("error deserializing {provider} API response")]
+ DeserializeResponse {
+ provider: LanguageModelProviderName,
+ #[source]
+ error: serde_json::Error,
+ },
+
+ // TODO: Ideally this would be removed in favor of having a comprehensive list of errors.
#[error(transparent)]
Other(#[from] anyhow::Error),
- #[error("invalid request format to language model provider's API")]
- BadRequestFormat,
- #[error("authentication error with language model provider's API")]
- AuthenticationError,
- #[error("permission error with language model provider's API")]
- PermissionError,
- #[error("language model provider API endpoint not found")]
- ApiEndpointNotFound,
- #[error("prompt too large for context window")]
- PromptTooLarge { tokens: Option<u64> },
- #[error("internal server error in language model provider's API")]
- ApiInternalServerError,
- #[error("I/O error reading response from language model provider's API: {0:?}")]
- ApiReadResponseError(io::Error),
- #[error("HTTP response error from language model provider's API: status {status} - {body:?}")]
- HttpResponseError { status: u16, body: String },
- #[error("error serializing request to language model provider API: {0}")]
- SerializeRequest(serde_json::Error),
- #[error("error building request body to language model provider API: {0}")]
- BuildRequestBody(http::Error),
- #[error("error sending HTTP request to language model provider API: {0}")]
- HttpSend(anyhow::Error),
- #[error("error deserializing language model provider API response: {0}")]
- DeserializeResponse(serde_json::Error),
- #[error("unexpected language model provider API response format: {0}")]
- UnknownResponseFormat(String),
+}
+
+impl LanguageModelCompletionError {
+ pub fn from_cloud_failure(
+ upstream_provider: LanguageModelProviderName,
+ code: String,
+ message: String,
+ retry_after: Option<Duration>,
+ ) -> Self {
+ if let Some(tokens) = parse_prompt_too_long(&message) {
+ // TODO: currently Anthropic PAYLOAD_TOO_LARGE response may cause INTERNAL_SERVER_ERROR
+ // to be reported. This is a temporary workaround to handle this in the case where the
+ // token limit has been exceeded.
+ Self::PromptTooLarge {
+ tokens: Some(tokens),
+ }
+ } else if let Some(status_code) = code
+ .strip_prefix("upstream_http_")
+ .and_then(|code| StatusCode::from_str(code).ok())
+ {
+ Self::from_http_status(upstream_provider, status_code, message, retry_after)
+ } else if let Some(status_code) = code
+ .strip_prefix("http_")
+ .and_then(|code| StatusCode::from_str(code).ok())
+ {
+ Self::from_http_status(ZED_CLOUD_PROVIDER_NAME, status_code, message, retry_after)
+ } else {
+ anyhow!("completion request failed, code: {code}, message: {message}").into()
+ }
+ }
+
+ pub fn from_http_status(
+ provider: LanguageModelProviderName,
+ status_code: StatusCode,
+ message: String,
+ retry_after: Option<Duration>,
+ ) -> Self {
+ match status_code {
+ StatusCode::BAD_REQUEST => Self::BadRequestFormat { provider, message },
+ StatusCode::UNAUTHORIZED => Self::AuthenticationError { provider, message },
+ StatusCode::FORBIDDEN => Self::PermissionError { provider, message },
+ StatusCode::NOT_FOUND => Self::ApiEndpointNotFound { provider },
+ StatusCode::PAYLOAD_TOO_LARGE => Self::PromptTooLarge {
+ tokens: parse_prompt_too_long(&message),
+ },
+ StatusCode::TOO_MANY_REQUESTS => Self::RateLimitExceeded {
+ provider,
+ retry_after,
+ },
+ StatusCode::INTERNAL_SERVER_ERROR => Self::ApiInternalServerError { provider, message },
+ StatusCode::SERVICE_UNAVAILABLE => Self::ServerOverloaded {
+ provider,
+ retry_after,
+ },
+ _ if status_code.as_u16() == 529 => Self::ServerOverloaded {
+ provider,
+ retry_after,
+ },
+ _ => Self::HttpResponseError {
+ provider,
+ status_code,
+ message,
+ },
+ }
+ }
}
impl From<AnthropicError> for LanguageModelCompletionError {
fn from(error: AnthropicError) -> Self {
+ let provider = ANTHROPIC_PROVIDER_NAME;
match error {
- AnthropicError::SerializeRequest(error) => Self::SerializeRequest(error),
- AnthropicError::BuildRequestBody(error) => Self::BuildRequestBody(error),
- AnthropicError::HttpSend(error) => Self::HttpSend(error),
- AnthropicError::DeserializeResponse(error) => Self::DeserializeResponse(error),
- AnthropicError::ReadResponse(error) => Self::ApiReadResponseError(error),
- AnthropicError::HttpResponseError { status, body } => {
- Self::HttpResponseError { status, body }
+ AnthropicError::SerializeRequest(error) => Self::SerializeRequest { provider, error },
+ AnthropicError::BuildRequestBody(error) => Self::BuildRequestBody { provider, error },
+ AnthropicError::HttpSend(error) => Self::HttpSend { provider, error },
+ AnthropicError::DeserializeResponse(error) => {
+ Self::DeserializeResponse { provider, error }
}
- AnthropicError::RateLimit { retry_after } => Self::RateLimitExceeded { retry_after },
+ AnthropicError::ReadResponse(error) => Self::ApiReadResponseError { provider, error },
+ AnthropicError::HttpResponseError {
+ status_code,
+ message,
+ } => Self::HttpResponseError {
+ provider,
+ status_code,
+ message,
+ },
+ AnthropicError::RateLimit { retry_after } => Self::RateLimitExceeded {
+ provider,
+ retry_after: Some(retry_after),
+ },
+ AnthropicError::ServerOverloaded { retry_after } => Self::ServerOverloaded {
+ provider,
+ retry_after: retry_after,
+ },
AnthropicError::ApiError(api_error) => api_error.into(),
- AnthropicError::UnexpectedResponseFormat(error) => Self::UnknownResponseFormat(error),
}
}
}
@@ -141,23 +277,39 @@ impl From<AnthropicError> for LanguageModelCompletionError {
impl From<anthropic::ApiError> for LanguageModelCompletionError {
fn from(error: anthropic::ApiError) -> Self {
use anthropic::ApiErrorCode::*;
-
+ let provider = ANTHROPIC_PROVIDER_NAME;
match error.code() {
Some(code) => match code {
- InvalidRequestError => LanguageModelCompletionError::BadRequestFormat,
- AuthenticationError => LanguageModelCompletionError::AuthenticationError,
- PermissionError => LanguageModelCompletionError::PermissionError,
- NotFoundError => LanguageModelCompletionError::ApiEndpointNotFound,
- RequestTooLarge => LanguageModelCompletionError::PromptTooLarge {
+ InvalidRequestError => Self::BadRequestFormat {
+ provider,
+ message: error.message,
+ },
+ AuthenticationError => Self::AuthenticationError {
+ provider,
+ message: error.message,
+ },
+ PermissionError => Self::PermissionError {
+ provider,
+ message: error.message,
+ },
+ NotFoundError => Self::ApiEndpointNotFound { provider },
+ RequestTooLarge => Self::PromptTooLarge {
tokens: parse_prompt_too_long(&error.message),
},
- RateLimitError => LanguageModelCompletionError::RateLimitExceeded {
- retry_after: DEFAULT_RATE_LIMIT_RETRY_AFTER,
+ RateLimitError => Self::RateLimitExceeded {
+ provider,
+ retry_after: None,
+ },
+ ApiError => Self::ApiInternalServerError {
+ provider,
+ message: error.message,
+ },
+ OverloadedError => Self::ServerOverloaded {
+ provider,
+ retry_after: None,
},
- ApiError => LanguageModelCompletionError::ApiInternalServerError,
- OverloadedError => LanguageModelCompletionError::Overloaded,
},
- None => LanguageModelCompletionError::Other(error.into()),
+ None => Self::Other(error.into()),
}
}
}
@@ -278,6 +430,13 @@ pub trait LanguageModel: Send + Sync {
fn name(&self) -> LanguageModelName;
fn provider_id(&self) -> LanguageModelProviderId;
fn provider_name(&self) -> LanguageModelProviderName;
+ fn upstream_provider_id(&self) -> LanguageModelProviderId {
+ self.provider_id()
+ }
+ fn upstream_provider_name(&self) -> LanguageModelProviderName {
+ self.provider_name()
+ }
+
fn telemetry_id(&self) -> String;
fn api_key(&self, _cx: &App) -> Option<String> {
@@ -365,6 +524,9 @@ pub trait LanguageModel: Send + Sync {
Ok(LanguageModelCompletionEvent::RedactedThinking { .. }) => None,
Ok(LanguageModelCompletionEvent::Stop(_)) => None,
Ok(LanguageModelCompletionEvent::ToolUse(_)) => None,
+ Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
+ ..
+ }) => None,
Ok(LanguageModelCompletionEvent::UsageUpdate(token_usage)) => {
*last_token_usage.lock() = token_usage;
None
@@ -395,39 +557,6 @@ pub trait LanguageModel: Send + Sync {
}
}
-#[derive(Debug, Error)]
-pub enum LanguageModelKnownError {
- #[error("Context window limit exceeded ({tokens})")]
- ContextWindowLimitExceeded { tokens: u64 },
- #[error("Language model provider's API is currently overloaded")]
- Overloaded,
- #[error("Language model provider's API encountered an internal server error")]
- ApiInternalServerError,
- #[error("I/O error while reading response from language model provider's API: {0:?}")]
- ReadResponseError(io::Error),
- #[error("Error deserializing response from language model provider's API: {0:?}")]
- DeserializeResponse(serde_json::Error),
- #[error("Language model provider's API returned a response in an unknown format")]
- UnknownResponseFormat(String),
- #[error("Rate limit exceeded for language model provider's API; retry in {retry_after:?}")]
- RateLimitExceeded { retry_after: Duration },
-}
-
-impl LanguageModelKnownError {
- /// Attempts to map an HTTP response status code to a known error type.
- /// Returns None if the status code doesn't map to a specific known error.
- pub fn from_http_response(status: u16, _body: &str) -> Option<Self> {
- match status {
- 429 => Some(Self::RateLimitExceeded {
- retry_after: DEFAULT_RATE_LIMIT_RETRY_AFTER,
- }),
- 503 => Some(Self::Overloaded),
- 500..=599 => Some(Self::ApiInternalServerError),
- _ => None,
- }
- }
-}
-
pub trait LanguageModelTool: 'static + DeserializeOwned + JsonSchema {
fn name() -> String;
fn description() -> String;
@@ -473,7 +602,7 @@ pub trait LanguageModelProvider: 'static {
#[derive(PartialEq, Eq)]
pub enum LanguageModelProviderTosView {
/// When there are some past interactions in the Agent Panel.
- ThreadtEmptyState,
+ ThreadEmptyState,
/// When there are no past interactions in the Agent Panel.
ThreadFreshStart,
PromptEditorPopup,
@@ -509,12 +638,30 @@ pub struct LanguageModelProviderId(pub SharedString);
#[derive(Clone, Eq, PartialEq, Hash, Debug, Ord, PartialOrd)]
pub struct LanguageModelProviderName(pub SharedString);
+impl LanguageModelProviderId {
+ pub const fn new(id: &'static str) -> Self {
+ Self(SharedString::new_static(id))
+ }
+}
+
+impl LanguageModelProviderName {
+ pub const fn new(id: &'static str) -> Self {
+ Self(SharedString::new_static(id))
+ }
+}
+
impl fmt::Display for LanguageModelProviderId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
+impl fmt::Display for LanguageModelProviderName {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.0)
+ }
+}
+
impl From<String> for LanguageModelId {
fn from(value: String) -> Self {
Self(SharedString::from(value))
@@ -98,7 +98,7 @@ impl ConfiguredModel {
}
pub fn is_provided_by_zed(&self) -> bool {
- self.provider.id().0 == crate::ZED_CLOUD_PROVIDER_ID
+ self.provider.id() == crate::ZED_CLOUD_PROVIDER_ID
}
}
@@ -1,3 +1,4 @@
+use crate::ANTHROPIC_PROVIDER_ID;
use anthropic::ANTHROPIC_API_URL;
use anyhow::{Context as _, anyhow};
use client::telemetry::Telemetry;
@@ -8,8 +9,6 @@ use std::sync::Arc;
use telemetry_events::{AssistantEventData, AssistantKind, AssistantPhase};
use util::ResultExt;
-pub const ANTHROPIC_PROVIDER_ID: &str = "anthropic";
-
pub fn report_assistant_event(
event: AssistantEventData,
telemetry: Option<Arc<Telemetry>>,
@@ -19,7 +18,7 @@ pub fn report_assistant_event(
) {
if let Some(telemetry) = telemetry.as_ref() {
telemetry.report_assistant_event(event.clone());
- if telemetry.metrics_enabled() && event.model_provider == ANTHROPIC_PROVIDER_ID {
+ if telemetry.metrics_enabled() && event.model_provider == ANTHROPIC_PROVIDER_ID.0 {
if let Some(api_key) = model_api_key {
executor
.spawn(async move {
@@ -20,8 +20,10 @@ aws-credential-types = { workspace = true, features = [
] }
aws_http_client.workspace = true
bedrock.workspace = true
+chrono.workspace = true
client.workspace = true
collections.workspace = true
+component.workspace = true
credentials_provider.workspace = true
copilot.workspace = true
deepseek = { workspace = true, features = ["schemars"] }
@@ -33,8 +33,8 @@ use theme::ThemeSettings;
use ui::{Icon, IconName, List, Tooltip, prelude::*};
use util::ResultExt;
-const PROVIDER_ID: &str = language_model::ANTHROPIC_PROVIDER_ID;
-const PROVIDER_NAME: &str = "Anthropic";
+const PROVIDER_ID: LanguageModelProviderId = language_model::ANTHROPIC_PROVIDER_ID;
+const PROVIDER_NAME: LanguageModelProviderName = language_model::ANTHROPIC_PROVIDER_NAME;
#[derive(Default, Clone, Debug, PartialEq)]
pub struct AnthropicSettings {
@@ -218,11 +218,11 @@ impl LanguageModelProviderState for AnthropicLanguageModelProvider {
impl LanguageModelProvider for AnthropicLanguageModelProvider {
fn id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn icon(&self) -> IconName {
@@ -403,7 +403,11 @@ impl AnthropicModel {
};
async move {
- let api_key = api_key.context("Missing Anthropic API Key")?;
+ let Some(api_key) = api_key else {
+ return Err(LanguageModelCompletionError::NoApiKey {
+ provider: PROVIDER_NAME,
+ });
+ };
let request =
anthropic::stream_completion(http_client.as_ref(), &api_url, &api_key, request);
request.await.map_err(Into::into)
@@ -422,11 +426,11 @@ impl LanguageModel for AnthropicModel {
}
fn provider_id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn provider_name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn supports_tools(&self) -> bool {
@@ -806,12 +810,14 @@ impl AnthropicEventMapper {
raw_input: tool_use.input_json.clone(),
},
)),
- Err(json_parse_err) => Err(LanguageModelCompletionError::BadInputJson {
- id: tool_use.id.into(),
- tool_name: tool_use.name.into(),
- raw_input: input_json.into(),
- json_parse_error: json_parse_err.to_string(),
- }),
+ Err(json_parse_err) => {
+ Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
+ id: tool_use.id.into(),
+ tool_name: tool_use.name.into(),
+ raw_input: input_json.into(),
+ json_parse_error: json_parse_err.to_string(),
+ })
+ }
};
vec![event_result]
@@ -46,14 +46,13 @@ use settings::{Settings, SettingsStore};
use smol::lock::OnceCell;
use strum::{EnumIter, IntoEnumIterator, IntoStaticStr};
use theme::ThemeSettings;
-use tokio::runtime::Handle;
use ui::{Icon, IconName, List, Tooltip, prelude::*};
use util::ResultExt;
use crate::AllLanguageModelSettings;
-const PROVIDER_ID: &str = "amazon-bedrock";
-const PROVIDER_NAME: &str = "Amazon Bedrock";
+const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("amazon-bedrock");
+const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Amazon Bedrock");
#[derive(Default, Clone, Deserialize, Serialize, PartialEq, Debug)]
pub struct BedrockCredentials {
@@ -285,11 +284,11 @@ impl BedrockLanguageModelProvider {
impl LanguageModelProvider for BedrockLanguageModelProvider {
fn id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn icon(&self) -> IconName {
@@ -460,22 +459,22 @@ impl BedrockModel {
&self,
request: bedrock::Request,
cx: &AsyncApp,
- ) -> Result<
- BoxFuture<'static, BoxStream<'static, Result<BedrockStreamingResponse, BedrockError>>>,
+ ) -> BoxFuture<
+ 'static,
+ Result<BoxStream<'static, Result<BedrockStreamingResponse, BedrockError>>>,
> {
- let runtime_client = self
- .get_or_init_client(cx)
+ let Ok(runtime_client) = self
+ .get_or_init_client(&cx)
.cloned()
- .context("Bedrock client not initialized")?;
- let owned_handle = self.handler.clone();
+ .context("Bedrock client not initialized")
+ else {
+ return futures::future::ready(Err(anyhow!("App state dropped"))).boxed();
+ };
- Ok(async move {
- let request = bedrock::stream_completion(runtime_client, request, owned_handle);
- request.await.unwrap_or_else(|e| {
- futures::stream::once(async move { Err(BedrockError::ClientError(e)) }).boxed()
- })
+ match Tokio::spawn(cx, bedrock::stream_completion(runtime_client, request)) {
+ Ok(res) => async { res.await.map_err(|err| anyhow!(err))? }.boxed(),
+ Err(err) => futures::future::ready(Err(anyhow!(err))).boxed(),
}
- .boxed())
}
}
@@ -489,11 +488,11 @@ impl LanguageModel for BedrockModel {
}
fn provider_id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn provider_name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn supports_tools(&self) -> bool {
@@ -570,12 +569,10 @@ impl LanguageModel for BedrockModel {
Err(err) => return futures::future::ready(Err(err.into())).boxed(),
};
- let owned_handle = self.handler.clone();
-
let request = self.stream_completion(request, cx);
let future = self.request_limiter.stream(async move {
- let response = request.map_err(|err| anyhow!(err))?.await;
- let events = map_to_language_model_completion_events(response, owned_handle);
+ let response = request.await.map_err(|err| anyhow!(err))?;
+ let events = map_to_language_model_completion_events(response);
if deny_tool_calls {
Ok(deny_tool_use_events(events).boxed())
@@ -879,7 +876,6 @@ pub fn get_bedrock_tokens(
pub fn map_to_language_model_completion_events(
events: Pin<Box<dyn Send + Stream<Item = Result<BedrockStreamingResponse, BedrockError>>>>,
- handle: Handle,
) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
struct RawToolUse {
id: String,
@@ -892,198 +888,123 @@ pub fn map_to_language_model_completion_events(
tool_uses_by_index: HashMap<i32, RawToolUse>,
}
- futures::stream::unfold(
- State {
- events,
- tool_uses_by_index: HashMap::default(),
- },
- move |mut state: State| {
- let inner_handle = handle.clone();
- async move {
- inner_handle
- .spawn(async {
- while let Some(event) = state.events.next().await {
- match event {
- Ok(event) => match event {
- ConverseStreamOutput::ContentBlockDelta(cb_delta) => {
- match cb_delta.delta {
- Some(ContentBlockDelta::Text(text_out)) => {
- let completion_event =
- LanguageModelCompletionEvent::Text(text_out);
- return Some((Some(Ok(completion_event)), state));
- }
-
- Some(ContentBlockDelta::ToolUse(text_out)) => {
- if let Some(tool_use) = state
- .tool_uses_by_index
- .get_mut(&cb_delta.content_block_index)
- {
- tool_use.input_json.push_str(text_out.input());
- }
- }
-
- Some(ContentBlockDelta::ReasoningContent(thinking)) => {
- match thinking {
- ReasoningContentBlockDelta::RedactedContent(
- redacted,
- ) => {
- let thinking_event =
- LanguageModelCompletionEvent::Thinking {
- text: String::from_utf8(
- redacted.into_inner(),
- )
- .unwrap_or("REDACTED".to_string()),
- signature: None,
- };
-
- return Some((
- Some(Ok(thinking_event)),
- state,
- ));
- }
- ReasoningContentBlockDelta::Signature(
- signature,
- ) => {
- return Some((
- Some(Ok(LanguageModelCompletionEvent::Thinking {
- text: "".to_string(),
- signature: Some(signature)
- })),
- state,
- ));
- }
- ReasoningContentBlockDelta::Text(thoughts) => {
- let thinking_event =
- LanguageModelCompletionEvent::Thinking {
- text: thoughts.to_string(),
- signature: None
- };
-
- return Some((
- Some(Ok(thinking_event)),
- state,
- ));
- }
- _ => {}
- }
- }
- _ => {}
- }
- }
- ConverseStreamOutput::ContentBlockStart(cb_start) => {
- if let Some(ContentBlockStart::ToolUse(text_out)) =
- cb_start.start
- {
- let tool_use = RawToolUse {
- id: text_out.tool_use_id,
- name: text_out.name,
- input_json: String::new(),
- };
-
- state
- .tool_uses_by_index
- .insert(cb_start.content_block_index, tool_use);
- }
- }
- ConverseStreamOutput::ContentBlockStop(cb_stop) => {
- if let Some(tool_use) = state
- .tool_uses_by_index
- .remove(&cb_stop.content_block_index)
- {
- let tool_use_event = LanguageModelToolUse {
- id: tool_use.id.into(),
- name: tool_use.name.into(),
- is_input_complete: true,
- raw_input: tool_use.input_json.clone(),
- input: if tool_use.input_json.is_empty() {
- Value::Null
- } else {
- serde_json::Value::from_str(
- &tool_use.input_json,
- )
- .map_err(|err| anyhow!(err))
- .unwrap()
- },
- };
-
- return Some((
- Some(Ok(LanguageModelCompletionEvent::ToolUse(
- tool_use_event,
- ))),
- state,
- ));
- }
- }
-
- ConverseStreamOutput::Metadata(cb_meta) => {
- if let Some(metadata) = cb_meta.usage {
- let completion_event =
- LanguageModelCompletionEvent::UsageUpdate(
- TokenUsage {
- input_tokens: metadata.input_tokens as u64,
- output_tokens: metadata.output_tokens as u64,
- cache_creation_input_tokens:
- metadata.cache_write_input_tokens.unwrap_or_default() as u64,
- cache_read_input_tokens:
- metadata.cache_read_input_tokens.unwrap_or_default() as u64,
- },
- );
- return Some((Some(Ok(completion_event)), state));
- }
- }
- ConverseStreamOutput::MessageStop(message_stop) => {
- let reason = match message_stop.stop_reason {
- StopReason::ContentFiltered => {
- LanguageModelCompletionEvent::Stop(
- language_model::StopReason::EndTurn,
- )
- }
- StopReason::EndTurn => {
- LanguageModelCompletionEvent::Stop(
- language_model::StopReason::EndTurn,
- )
- }
- StopReason::GuardrailIntervened => {
- LanguageModelCompletionEvent::Stop(
- language_model::StopReason::EndTurn,
- )
- }
- StopReason::MaxTokens => {
- LanguageModelCompletionEvent::Stop(
- language_model::StopReason::EndTurn,
- )
- }
- StopReason::StopSequence => {
- LanguageModelCompletionEvent::Stop(
- language_model::StopReason::EndTurn,
- )
- }
- StopReason::ToolUse => {
- LanguageModelCompletionEvent::Stop(
- language_model::StopReason::ToolUse,
- )
- }
- _ => LanguageModelCompletionEvent::Stop(
- language_model::StopReason::EndTurn,
- ),
- };
- return Some((Some(Ok(reason)), state));
- }
- _ => {}
- },
+ let initial_state = State {
+ events,
+ tool_uses_by_index: HashMap::default(),
+ };
- Err(err) => return Some((Some(Err(anyhow!(err).into())), state)),
+ futures::stream::unfold(initial_state, |mut state| async move {
+ match state.events.next().await {
+ Some(event_result) => match event_result {
+ Ok(event) => {
+ let result = match event {
+ ConverseStreamOutput::ContentBlockDelta(cb_delta) => match cb_delta.delta {
+ Some(ContentBlockDelta::Text(text)) => {
+ Some(Ok(LanguageModelCompletionEvent::Text(text)))
+ }
+ Some(ContentBlockDelta::ToolUse(tool_output)) => {
+ if let Some(tool_use) = state
+ .tool_uses_by_index
+ .get_mut(&cb_delta.content_block_index)
+ {
+ tool_use.input_json.push_str(tool_output.input());
+ }
+ None
}
+ Some(ContentBlockDelta::ReasoningContent(thinking)) => match thinking {
+ ReasoningContentBlockDelta::Text(thoughts) => {
+ Some(Ok(LanguageModelCompletionEvent::Thinking {
+ text: thoughts.clone(),
+ signature: None,
+ }))
+ }
+ ReasoningContentBlockDelta::Signature(sig) => {
+ Some(Ok(LanguageModelCompletionEvent::Thinking {
+ text: "".into(),
+ signature: Some(sig),
+ }))
+ }
+ ReasoningContentBlockDelta::RedactedContent(redacted) => {
+ let content = String::from_utf8(redacted.into_inner())
+ .unwrap_or("REDACTED".to_string());
+ Some(Ok(LanguageModelCompletionEvent::Thinking {
+ text: content,
+ signature: None,
+ }))
+ }
+ _ => None,
+ },
+ _ => None,
+ },
+ ConverseStreamOutput::ContentBlockStart(cb_start) => {
+ if let Some(ContentBlockStart::ToolUse(tool_start)) = cb_start.start {
+ state.tool_uses_by_index.insert(
+ cb_start.content_block_index,
+ RawToolUse {
+ id: tool_start.tool_use_id,
+ name: tool_start.name,
+ input_json: String::new(),
+ },
+ );
+ }
+ None
}
- None
- })
- .await
- .log_err()
- .flatten()
- }
- },
- )
- .filter_map(|event| async move { event })
+ ConverseStreamOutput::ContentBlockStop(cb_stop) => state
+ .tool_uses_by_index
+ .remove(&cb_stop.content_block_index)
+ .map(|tool_use| {
+ let input = if tool_use.input_json.is_empty() {
+ Value::Null
+ } else {
+ serde_json::Value::from_str(&tool_use.input_json)
+ .unwrap_or(Value::Null)
+ };
+
+ Ok(LanguageModelCompletionEvent::ToolUse(
+ LanguageModelToolUse {
+ id: tool_use.id.into(),
+ name: tool_use.name.into(),
+ is_input_complete: true,
+ raw_input: tool_use.input_json.clone(),
+ input,
+ },
+ ))
+ }),
+ ConverseStreamOutput::Metadata(cb_meta) => cb_meta.usage.map(|metadata| {
+ Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
+ input_tokens: metadata.input_tokens as u64,
+ output_tokens: metadata.output_tokens as u64,
+ cache_creation_input_tokens: metadata
+ .cache_write_input_tokens
+ .unwrap_or_default()
+ as u64,
+ cache_read_input_tokens: metadata
+ .cache_read_input_tokens
+ .unwrap_or_default()
+ as u64,
+ }))
+ }),
+ ConverseStreamOutput::MessageStop(message_stop) => {
+ let stop_reason = match message_stop.stop_reason {
+ StopReason::ToolUse => language_model::StopReason::ToolUse,
+ _ => language_model::StopReason::EndTurn,
+ };
+ Some(Ok(LanguageModelCompletionEvent::Stop(stop_reason)))
+ }
+ _ => None,
+ };
+
+ Some((result, state))
+ }
+ Err(err) => Some((
+ Some(Err(LanguageModelCompletionError::Other(anyhow!(err)))),
+ state,
+ )),
+ },
+ None => None,
+ }
+ })
+ .filter_map(|result| async move { result })
}
struct ConfigurationView {
@@ -1,5 +1,6 @@
-use anthropic::{AnthropicModelMode, parse_prompt_too_long};
+use anthropic::AnthropicModelMode;
use anyhow::{Context as _, Result, anyhow};
+use chrono::{DateTime, Utc};
use client::{Client, ModelRequestUsage, UserStore, zed_urls};
use futures::{
AsyncBufReadExt, FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream,
@@ -8,25 +9,21 @@ use google_ai::GoogleModelMode;
use gpui::{
AnyElement, AnyView, App, AsyncApp, Context, Entity, SemanticVersion, Subscription, Task,
};
+use http_client::http::{HeaderMap, HeaderValue};
use http_client::{AsyncBody, HttpClient, Method, Response, StatusCode};
use language_model::{
AuthenticateError, LanguageModel, LanguageModelCacheConfiguration,
- LanguageModelCompletionError, LanguageModelId, LanguageModelKnownError, LanguageModelName,
- LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState,
- LanguageModelProviderTosView, LanguageModelRequest, LanguageModelToolChoice,
- LanguageModelToolSchemaFormat, ModelRequestLimitReachedError, RateLimiter,
- ZED_CLOUD_PROVIDER_ID,
-};
-use language_model::{
- LanguageModelCompletionEvent, LanguageModelProvider, LlmApiToken, PaymentRequiredError,
- RefreshLlmTokenListener,
+ LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId, LanguageModelName,
+ LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
+ LanguageModelProviderState, LanguageModelProviderTosView, LanguageModelRequest,
+ LanguageModelToolChoice, LanguageModelToolSchemaFormat, LlmApiToken,
+ ModelRequestLimitReachedError, PaymentRequiredError, RateLimiter, RefreshLlmTokenListener,
};
use proto::Plan;
use release_channel::AppVersion;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize, de::DeserializeOwned};
use settings::SettingsStore;
-use smol::Timer;
use smol::io::{AsyncReadExt, BufReader};
use std::pin::Pin;
use std::str::FromStr as _;
@@ -47,7 +44,8 @@ use crate::provider::anthropic::{AnthropicEventMapper, count_anthropic_tokens, i
use crate::provider::google::{GoogleEventMapper, into_google};
use crate::provider::open_ai::{OpenAiEventMapper, count_open_ai_tokens, into_open_ai};
-pub const PROVIDER_NAME: &str = "Zed";
+const PROVIDER_ID: LanguageModelProviderId = language_model::ZED_CLOUD_PROVIDER_ID;
+const PROVIDER_NAME: LanguageModelProviderName = language_model::ZED_CLOUD_PROVIDER_NAME;
#[derive(Default, Clone, Debug, PartialEq)]
pub struct ZedDotDevSettings {
@@ -120,7 +118,7 @@ pub struct State {
llm_api_token: LlmApiToken,
user_store: Entity<UserStore>,
status: client::Status,
- accept_terms: Option<Task<Result<()>>>,
+ accept_terms_of_service_task: Option<Task<Result<()>>>,
models: Vec<Arc<zed_llm_client::LanguageModel>>,
default_model: Option<Arc<zed_llm_client::LanguageModel>>,
default_fast_model: Option<Arc<zed_llm_client::LanguageModel>>,
@@ -144,7 +142,7 @@ impl State {
llm_api_token: LlmApiToken::default(),
user_store,
status,
- accept_terms: None,
+ accept_terms_of_service_task: None,
models: Vec::new(),
default_model: None,
default_fast_model: None,
@@ -253,12 +251,12 @@ impl State {
fn accept_terms_of_service(&mut self, cx: &mut Context<Self>) {
let user_store = self.user_store.clone();
- self.accept_terms = Some(cx.spawn(async move |this, cx| {
+ self.accept_terms_of_service_task = Some(cx.spawn(async move |this, cx| {
let _ = user_store
.update(cx, |store, cx| store.accept_terms_of_service(cx))?
.await;
this.update(cx, |this, cx| {
- this.accept_terms = None;
+ this.accept_terms_of_service_task = None;
cx.notify()
})
}));
@@ -351,11 +349,11 @@ impl LanguageModelProviderState for CloudLanguageModelProvider {
impl LanguageModelProvider for CloudLanguageModelProvider {
fn id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(ZED_CLOUD_PROVIDER_ID.into())
+ PROVIDER_ID
}
fn name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn icon(&self) -> IconName {
@@ -397,7 +395,8 @@ impl LanguageModelProvider for CloudLanguageModelProvider {
}
fn is_authenticated(&self, cx: &App) -> bool {
- !self.state.read(cx).is_signed_out()
+ let state = self.state.read(cx);
+ !state.is_signed_out() && state.has_accepted_terms_of_service(cx)
}
fn authenticate(&self, _cx: &mut App) -> Task<Result<(), AuthenticateError>> {
@@ -405,10 +404,8 @@ impl LanguageModelProvider for CloudLanguageModelProvider {
}
fn configuration_view(&self, _: &mut Window, cx: &mut App) -> AnyView {
- cx.new(|_| ConfigurationView {
- state: self.state.clone(),
- })
- .into()
+ cx.new(|_| ConfigurationView::new(self.state.clone()))
+ .into()
}
fn must_accept_terms(&self, cx: &App) -> bool {
@@ -420,7 +417,19 @@ impl LanguageModelProvider for CloudLanguageModelProvider {
view: LanguageModelProviderTosView,
cx: &mut App,
) -> Option<AnyElement> {
- render_accept_terms(self.state.clone(), view, cx)
+ let state = self.state.read(cx);
+ if state.has_accepted_terms_of_service(cx) {
+ return None;
+ }
+ Some(
+ render_accept_terms(view, state.accept_terms_of_service_task.is_some(), {
+ let state = self.state.clone();
+ move |_window, cx| {
+ state.update(cx, |state, cx| state.accept_terms_of_service(cx));
+ }
+ })
+ .into_any_element(),
+ )
}
fn reset_credentials(&self, _cx: &mut App) -> Task<Result<()>> {
@@ -429,18 +438,12 @@ impl LanguageModelProvider for CloudLanguageModelProvider {
}
fn render_accept_terms(
- state: Entity<State>,
view_kind: LanguageModelProviderTosView,
- cx: &mut App,
-) -> Option<AnyElement> {
- if state.read(cx).has_accepted_terms_of_service(cx) {
- return None;
- }
-
- let accept_terms_disabled = state.read(cx).accept_terms.is_some();
-
+ accept_terms_of_service_in_progress: bool,
+ accept_terms_callback: impl Fn(&mut Window, &mut App) + 'static,
+) -> impl IntoElement {
let thread_fresh_start = matches!(view_kind, LanguageModelProviderTosView::ThreadFreshStart);
- let thread_empty_state = matches!(view_kind, LanguageModelProviderTosView::ThreadtEmptyState);
+ let thread_empty_state = matches!(view_kind, LanguageModelProviderTosView::ThreadEmptyState);
let terms_button = Button::new("terms_of_service", "Terms of Service")
.style(ButtonStyle::Subtle)
@@ -463,18 +466,11 @@ fn render_accept_terms(
this.style(ButtonStyle::Tinted(TintColor::Warning))
.label_size(LabelSize::Small)
})
- .disabled(accept_terms_disabled)
- .on_click({
- let state = state.downgrade();
- move |_, _window, cx| {
- state
- .update(cx, |state, cx| state.accept_terms_of_service(cx))
- .ok();
- }
- }),
+ .disabled(accept_terms_of_service_in_progress)
+ .on_click(move |_, window, cx| (accept_terms_callback)(window, cx)),
);
- let form = if thread_empty_state {
+ if thread_empty_state {
h_flex()
.w_full()
.flex_wrap()
@@ -512,12 +508,10 @@ fn render_accept_terms(
LanguageModelProviderTosView::ThreadFreshStart => {
button_container.w_full().justify_center()
}
- LanguageModelProviderTosView::ThreadtEmptyState => div().w_0(),
+ LanguageModelProviderTosView::ThreadEmptyState => div().w_0(),
}
})
- };
-
- Some(form.into_any())
+ }
}
pub struct CloudLanguageModel {
@@ -536,8 +530,6 @@ struct PerformLlmCompletionResponse {
}
impl CloudLanguageModel {
- const MAX_RETRIES: usize = 3;
-
async fn perform_llm_completion(
client: Arc<Client>,
llm_api_token: LlmApiToken,
@@ -547,8 +539,7 @@ impl CloudLanguageModel {
let http_client = &client.http_client();
let mut token = llm_api_token.acquire(&client).await?;
- let mut retries_remaining = Self::MAX_RETRIES;
- let mut retry_delay = Duration::from_secs(1);
+ let mut refreshed_token = false;
loop {
let request_builder = http_client::Request::builder()
@@ -590,14 +581,20 @@ impl CloudLanguageModel {
includes_status_messages,
tool_use_limit_reached,
});
- } else if response
- .headers()
- .get(EXPIRED_LLM_TOKEN_HEADER_NAME)
- .is_some()
+ }
+
+ if !refreshed_token
+ && response
+ .headers()
+ .get(EXPIRED_LLM_TOKEN_HEADER_NAME)
+ .is_some()
{
- retries_remaining -= 1;
token = llm_api_token.refresh(&client).await?;
- } else if status == StatusCode::FORBIDDEN
+ refreshed_token = true;
+ continue;
+ }
+
+ if status == StatusCode::FORBIDDEN
&& response
.headers()
.get(SUBSCRIPTION_LIMIT_RESOURCE_HEADER_NAME)
@@ -622,35 +619,18 @@ impl CloudLanguageModel {
return Err(anyhow!(ModelRequestLimitReachedError { plan }));
}
}
-
- anyhow::bail!("Forbidden");
- } else if status.as_u16() >= 500 && status.as_u16() < 600 {
- // If we encounter an error in the 500 range, retry after a delay.
- // We've seen at least these in the wild from API providers:
- // * 500 Internal Server Error
- // * 502 Bad Gateway
- // * 529 Service Overloaded
-
- if retries_remaining == 0 {
- let mut body = String::new();
- response.body_mut().read_to_string(&mut body).await?;
- anyhow::bail!(
- "cloud language model completion failed after {} retries with status {status}: {body}",
- Self::MAX_RETRIES
- );
- }
-
- Timer::after(retry_delay).await;
-
- retries_remaining -= 1;
- retry_delay *= 2; // If it fails again, wait longer.
} else if status == StatusCode::PAYMENT_REQUIRED {
return Err(anyhow!(PaymentRequiredError));
- } else {
- let mut body = String::new();
- response.body_mut().read_to_string(&mut body).await?;
- return Err(anyhow!(ApiError { status, body }));
}
+
+ let mut body = String::new();
+ let headers = response.headers().clone();
+ response.body_mut().read_to_string(&mut body).await?;
+ return Err(anyhow!(ApiError {
+ status,
+ body,
+ headers
+ }));
}
}
}
@@ -660,6 +640,19 @@ impl CloudLanguageModel {
struct ApiError {
status: StatusCode,
body: String,
+ headers: HeaderMap<HeaderValue>,
+}
+
+impl From<ApiError> for LanguageModelCompletionError {
+ fn from(error: ApiError) -> Self {
+ let retry_after = None;
+ LanguageModelCompletionError::from_http_status(
+ PROVIDER_NAME,
+ error.status,
+ error.body,
+ retry_after,
+ )
+ }
}
impl LanguageModel for CloudLanguageModel {
@@ -672,11 +665,29 @@ impl LanguageModel for CloudLanguageModel {
}
fn provider_id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(ZED_CLOUD_PROVIDER_ID.into())
+ PROVIDER_ID
}
fn provider_name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
+ }
+
+ fn upstream_provider_id(&self) -> LanguageModelProviderId {
+ use zed_llm_client::LanguageModelProvider::*;
+ match self.model.provider {
+ Anthropic => language_model::ANTHROPIC_PROVIDER_ID,
+ OpenAi => language_model::OPEN_AI_PROVIDER_ID,
+ Google => language_model::GOOGLE_PROVIDER_ID,
+ }
+ }
+
+ fn upstream_provider_name(&self) -> LanguageModelProviderName {
+ use zed_llm_client::LanguageModelProvider::*;
+ match self.model.provider {
+ Anthropic => language_model::ANTHROPIC_PROVIDER_NAME,
+ OpenAi => language_model::OPEN_AI_PROVIDER_NAME,
+ Google => language_model::GOOGLE_PROVIDER_NAME,
+ }
}
fn supports_tools(&self) -> bool {
@@ -776,6 +787,7 @@ impl LanguageModel for CloudLanguageModel {
.body(serde_json::to_string(&request_body)?.into())?;
let mut response = http_client.send(request).await?;
let status = response.status();
+ let headers = response.headers().clone();
let mut response_body = String::new();
response
.body_mut()
@@ -790,7 +802,8 @@ impl LanguageModel for CloudLanguageModel {
} else {
Err(anyhow!(ApiError {
status,
- body: response_body
+ body: response_body,
+ headers
}))
}
}
@@ -855,18 +868,7 @@ impl LanguageModel for CloudLanguageModel {
)
.await
.map_err(|err| match err.downcast::<ApiError>() {
- Ok(api_err) => {
- if api_err.status == StatusCode::BAD_REQUEST {
- if let Some(tokens) = parse_prompt_too_long(&api_err.body) {
- return anyhow!(
- LanguageModelKnownError::ContextWindowLimitExceeded {
- tokens
- }
- );
- }
- }
- anyhow!(api_err)
- }
+ Ok(api_err) => anyhow!(LanguageModelCompletionError::from(api_err)),
Err(err) => anyhow!(err),
})?;
@@ -995,7 +997,7 @@ where
.flat_map(move |event| {
futures::stream::iter(match event {
Err(error) => {
- vec![Err(LanguageModelCompletionError::Other(error))]
+ vec![Err(LanguageModelCompletionError::from(error))]
}
Ok(CloudCompletionEvent::Status(event)) => {
vec![Ok(LanguageModelCompletionEvent::StatusUpdate(event))]
@@ -1054,32 +1056,24 @@ fn response_lines<T: DeserializeOwned>(
)
}
-struct ConfigurationView {
- state: gpui::Entity<State>,
+#[derive(IntoElement, RegisterComponent)]
+struct ZedAiConfiguration {
+ is_connected: bool,
+ plan: Option<proto::Plan>,
+ subscription_period: Option<(DateTime<Utc>, DateTime<Utc>)>,
+ eligible_for_trial: bool,
+ has_accepted_terms_of_service: bool,
+ accept_terms_of_service_in_progress: bool,
+ accept_terms_of_service_callback: Arc<dyn Fn(&mut Window, &mut App) + Send + Sync>,
+ sign_in_callback: Arc<dyn Fn(&mut Window, &mut App) + Send + Sync>,
}
-impl ConfigurationView {
- fn authenticate(&mut self, cx: &mut Context<Self>) {
- self.state.update(cx, |state, cx| {
- state.authenticate(cx).detach_and_log_err(cx);
- });
- cx.notify();
- }
-}
-
-impl Render for ConfigurationView {
- fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+impl RenderOnce for ZedAiConfiguration {
+ fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement {
const ZED_PRICING_URL: &str = "https://zed.dev/pricing";
- let is_connected = !self.state.read(cx).is_signed_out();
- let user_store = self.state.read(cx).user_store.read(cx);
- let plan = user_store.current_plan();
- let subscription_period = user_store.subscription_period();
- let eligible_for_trial = user_store.trial_started_at().is_none();
- let has_accepted_terms = self.state.read(cx).has_accepted_terms_of_service(cx);
-
- let is_pro = plan == Some(proto::Plan::ZedPro);
- let subscription_text = match (plan, subscription_period) {
+ let is_pro = self.plan == Some(proto::Plan::ZedPro);
+ let subscription_text = match (self.plan, self.subscription_period) {
(Some(proto::Plan::ZedPro), Some(_)) => {
"You have access to Zed's hosted LLMs through your Zed Pro subscription."
}
@@ -1090,7 +1084,7 @@ impl Render for ConfigurationView {
"You have basic access to Zed's hosted LLMs through your Zed Free subscription."
}
_ => {
- if eligible_for_trial {
+ if self.eligible_for_trial {
"Subscribe for access to Zed's hosted LLMs. Start with a 14 day free trial."
} else {
"Subscribe for access to Zed's hosted LLMs."
@@ -1101,7 +1095,7 @@ impl Render for ConfigurationView {
h_flex().child(
Button::new("manage_settings", "Manage Subscription")
.style(ButtonStyle::Tinted(TintColor::Accent))
- .on_click(cx.listener(|_, _, _, cx| cx.open_url(&zed_urls::account_url(cx)))),
+ .on_click(|_, _, cx| cx.open_url(&zed_urls::account_url(cx))),
)
} else {
h_flex()
@@ -1109,28 +1103,38 @@ impl Render for ConfigurationView {
.child(
Button::new("learn_more", "Learn more")
.style(ButtonStyle::Subtle)
- .on_click(cx.listener(|_, _, _, cx| cx.open_url(ZED_PRICING_URL))),
+ .on_click(|_, _, cx| cx.open_url(ZED_PRICING_URL)),
)
.child(
- Button::new("upgrade", "Upgrade")
- .style(ButtonStyle::Subtle)
- .color(Color::Accent)
- .on_click(
- cx.listener(|_, _, _, cx| cx.open_url(&zed_urls::account_url(cx))),
- ),
+ Button::new(
+ "upgrade",
+ if self.plan.is_none() && self.eligible_for_trial {
+ "Start Trial"
+ } else {
+ "Upgrade"
+ },
+ )
+ .style(ButtonStyle::Subtle)
+ .color(Color::Accent)
+ .on_click(|_, _, cx| cx.open_url(&zed_urls::account_url(cx))),
)
};
- if is_connected {
+ if self.is_connected {
v_flex()
.gap_3()
.w_full()
- .children(render_accept_terms(
- self.state.clone(),
- LanguageModelProviderTosView::Configuration,
- cx,
- ))
- .when(has_accepted_terms, |this| {
+ .when(!self.has_accepted_terms_of_service, |this| {
+ this.child(render_accept_terms(
+ LanguageModelProviderTosView::Configuration,
+ self.accept_terms_of_service_in_progress,
+ {
+ let callback = self.accept_terms_of_service_callback.clone();
+ move |window, cx| (callback)(window, cx)
+ },
+ ))
+ })
+ .when(self.has_accepted_terms_of_service, |this| {
this.child(subscription_text)
.child(manage_subscription_buttons)
})
@@ -1143,8 +1147,126 @@ impl Render for ConfigurationView {
.icon_color(Color::Muted)
.icon(IconName::Github)
.icon_position(IconPosition::Start)
- .on_click(cx.listener(move |this, _, _, cx| this.authenticate(cx))),
+ .on_click({
+ let callback = self.sign_in_callback.clone();
+ move |_, window, cx| (callback)(window, cx)
+ }),
)
}
}
}
+
+struct ConfigurationView {
+ state: Entity<State>,
+ accept_terms_of_service_callback: Arc<dyn Fn(&mut Window, &mut App) + Send + Sync>,
+ sign_in_callback: Arc<dyn Fn(&mut Window, &mut App) + Send + Sync>,
+}
+
+impl ConfigurationView {
+ fn new(state: Entity<State>) -> Self {
+ let accept_terms_of_service_callback = Arc::new({
+ let state = state.clone();
+ move |_window: &mut Window, cx: &mut App| {
+ state.update(cx, |state, cx| {
+ state.accept_terms_of_service(cx);
+ });
+ }
+ });
+
+ let sign_in_callback = Arc::new({
+ let state = state.clone();
+ move |_window: &mut Window, cx: &mut App| {
+ state.update(cx, |state, cx| {
+ state.authenticate(cx).detach_and_log_err(cx);
+ });
+ }
+ });
+
+ Self {
+ state,
+ accept_terms_of_service_callback,
+ sign_in_callback,
+ }
+ }
+}
+
+impl Render for ConfigurationView {
+ fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let state = self.state.read(cx);
+ let user_store = state.user_store.read(cx);
+
+ ZedAiConfiguration {
+ is_connected: !state.is_signed_out(),
+ plan: user_store.current_plan(),
+ subscription_period: user_store.subscription_period(),
+ eligible_for_trial: user_store.trial_started_at().is_none(),
+ has_accepted_terms_of_service: state.has_accepted_terms_of_service(cx),
+ accept_terms_of_service_in_progress: state.accept_terms_of_service_task.is_some(),
+ accept_terms_of_service_callback: self.accept_terms_of_service_callback.clone(),
+ sign_in_callback: self.sign_in_callback.clone(),
+ }
+ }
+}
+
+impl Component for ZedAiConfiguration {
+ fn scope() -> ComponentScope {
+ ComponentScope::Agent
+ }
+
+ fn preview(_window: &mut Window, _cx: &mut App) -> Option<AnyElement> {
+ fn configuration(
+ is_connected: bool,
+ plan: Option<proto::Plan>,
+ eligible_for_trial: bool,
+ has_accepted_terms_of_service: bool,
+ ) -> AnyElement {
+ ZedAiConfiguration {
+ is_connected,
+ plan,
+ subscription_period: plan
+ .is_some()
+ .then(|| (Utc::now(), Utc::now() + chrono::Duration::days(7))),
+ eligible_for_trial,
+ has_accepted_terms_of_service,
+ accept_terms_of_service_in_progress: false,
+ accept_terms_of_service_callback: Arc::new(|_, _| {}),
+ sign_in_callback: Arc::new(|_, _| {}),
+ }
+ .into_any_element()
+ }
+
+ Some(
+ v_flex()
+ .p_4()
+ .gap_4()
+ .children(vec![
+ single_example("Not connected", configuration(false, None, false, true)),
+ single_example(
+ "Accept Terms of Service",
+ configuration(true, None, true, false),
+ ),
+ single_example(
+ "No Plan - Not eligible for trial",
+ configuration(true, None, false, true),
+ ),
+ single_example(
+ "No Plan - Eligible for trial",
+ configuration(true, None, true, true),
+ ),
+ single_example(
+ "Free Plan",
+ configuration(true, Some(proto::Plan::Free), true, true),
+ ),
+ single_example(
+ "Zed Pro Trial Plan",
+ configuration(true, Some(proto::Plan::ZedProTrial), true, true),
+ ),
+ single_example(
+ "Zed Pro Plan",
+ configuration(true, Some(proto::Plan::ZedPro), true, true),
+ ),
+ ])
+ .into_any_element(),
+ )
+ }
+}
@@ -30,13 +30,15 @@ use settings::SettingsStore;
use std::time::Duration;
use ui::prelude::*;
use util::debug_panic;
+use zed_llm_client::CompletionIntent;
use super::anthropic::count_anthropic_tokens;
use super::google::count_google_tokens;
use super::open_ai::count_open_ai_tokens;
-const PROVIDER_ID: &str = "copilot_chat";
-const PROVIDER_NAME: &str = "GitHub Copilot Chat";
+const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("copilot_chat");
+const PROVIDER_NAME: LanguageModelProviderName =
+ LanguageModelProviderName::new("GitHub Copilot Chat");
pub struct CopilotChatLanguageModelProvider {
state: Entity<State>,
@@ -102,11 +104,11 @@ impl LanguageModelProviderState for CopilotChatLanguageModelProvider {
impl LanguageModelProvider for CopilotChatLanguageModelProvider {
fn id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn icon(&self) -> IconName {
@@ -201,11 +203,11 @@ impl LanguageModel for CopilotChatLanguageModel {
}
fn provider_id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn provider_name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn supports_tools(&self) -> bool {
@@ -267,6 +269,19 @@ impl LanguageModel for CopilotChatLanguageModel {
LanguageModelCompletionError,
>,
> {
+ let is_user_initiated = request.intent.is_none_or(|intent| match intent {
+ CompletionIntent::UserPrompt
+ | CompletionIntent::ThreadContextSummarization
+ | CompletionIntent::InlineAssist
+ | CompletionIntent::TerminalInlineAssist
+ | CompletionIntent::GenerateGitCommitMessage => true,
+
+ CompletionIntent::ToolResults
+ | CompletionIntent::ThreadSummarization
+ | CompletionIntent::CreateFile
+ | CompletionIntent::EditFile => false,
+ });
+
let copilot_request = match into_copilot_chat(&self.model, request) {
Ok(request) => request,
Err(err) => return futures::future::ready(Err(err.into())).boxed(),
@@ -275,7 +290,8 @@ impl LanguageModel for CopilotChatLanguageModel {
let request_limiter = self.request_limiter.clone();
let future = cx.spawn(async move |cx| {
- let request = CopilotChat::stream_completion(copilot_request, cx.clone());
+ let request =
+ CopilotChat::stream_completion(copilot_request, is_user_initiated, cx.clone());
request_limiter
.stream(async move {
let response = request.await?;
@@ -391,24 +407,24 @@ pub fn map_to_language_model_completion_events(
serde_json::Value::from_str(&tool_call.arguments)
};
match arguments {
- Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
- LanguageModelToolUse {
- id: tool_call.id.clone().into(),
- name: tool_call.name.as_str().into(),
- is_input_complete: true,
- input,
- raw_input: tool_call.arguments.clone(),
- },
- )),
- Err(error) => {
- Err(LanguageModelCompletionError::BadInputJson {
- id: tool_call.id.into(),
- tool_name: tool_call.name.as_str().into(),
- raw_input: tool_call.arguments.into(),
- json_parse_error: error.to_string(),
- })
- }
- }
+ Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
+ LanguageModelToolUse {
+ id: tool_call.id.clone().into(),
+ name: tool_call.name.as_str().into(),
+ is_input_complete: true,
+ input,
+ raw_input: tool_call.arguments.clone(),
+ },
+ )),
+ Err(error) => Ok(
+ LanguageModelCompletionEvent::ToolUseJsonParseError {
+ id: tool_call.id.into(),
+ tool_name: tool_call.name.as_str().into(),
+ raw_input: tool_call.arguments.into(),
+ json_parse_error: error.to_string(),
+ },
+ ),
+ }
},
));
@@ -28,8 +28,8 @@ use util::ResultExt;
use crate::{AllLanguageModelSettings, ui::InstructionListItem};
-const PROVIDER_ID: &str = "deepseek";
-const PROVIDER_NAME: &str = "DeepSeek";
+const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("deepseek");
+const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("DeepSeek");
const DEEPSEEK_API_KEY_VAR: &str = "DEEPSEEK_API_KEY";
#[derive(Default)]
@@ -174,11 +174,11 @@ impl LanguageModelProviderState for DeepSeekLanguageModelProvider {
impl LanguageModelProvider for DeepSeekLanguageModelProvider {
fn id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn icon(&self) -> IconName {
@@ -283,11 +283,11 @@ impl LanguageModel for DeepSeekLanguageModel {
}
fn provider_id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn provider_name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn supports_tools(&self) -> bool {
@@ -466,7 +466,7 @@ impl DeepSeekEventMapper {
events.flat_map(move |event| {
futures::stream::iter(match event {
Ok(event) => self.map_event(event),
- Err(error) => vec![Err(LanguageModelCompletionError::Other(anyhow!(error)))],
+ Err(error) => vec![Err(LanguageModelCompletionError::from(error))],
})
})
}
@@ -476,7 +476,7 @@ impl DeepSeekEventMapper {
event: deepseek::StreamResponse,
) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
let Some(choice) = event.choices.first() else {
- return vec![Err(LanguageModelCompletionError::Other(anyhow!(
+ return vec![Err(LanguageModelCompletionError::from(anyhow!(
"Response contained no choices"
)))];
};
@@ -538,8 +538,8 @@ impl DeepSeekEventMapper {
raw_input: tool_call.arguments.clone(),
},
)),
- Err(error) => Err(LanguageModelCompletionError::BadInputJson {
- id: tool_call.id.into(),
+ Err(error) => Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
+ id: tool_call.id.clone().into(),
tool_name: tool_call.name.as_str().into(),
raw_input: tool_call.arguments.into(),
json_parse_error: error.to_string(),
@@ -37,8 +37,8 @@ use util::ResultExt;
use crate::AllLanguageModelSettings;
use crate::ui::InstructionListItem;
-const PROVIDER_ID: &str = "google";
-const PROVIDER_NAME: &str = "Google AI";
+const PROVIDER_ID: LanguageModelProviderId = language_model::GOOGLE_PROVIDER_ID;
+const PROVIDER_NAME: LanguageModelProviderName = language_model::GOOGLE_PROVIDER_NAME;
#[derive(Default, Clone, Debug, PartialEq)]
pub struct GoogleSettings {
@@ -207,11 +207,11 @@ impl LanguageModelProviderState for GoogleLanguageModelProvider {
impl LanguageModelProvider for GoogleLanguageModelProvider {
fn id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn icon(&self) -> IconName {
@@ -334,11 +334,11 @@ impl LanguageModel for GoogleLanguageModel {
}
fn provider_id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn provider_name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn supports_tools(&self) -> bool {
@@ -423,9 +423,7 @@ impl LanguageModel for GoogleLanguageModel {
);
let request = self.stream_completion(request, cx);
let future = self.request_limiter.stream(async move {
- let response = request
- .await
- .map_err(|err| LanguageModelCompletionError::Other(anyhow!(err)))?;
+ let response = request.await.map_err(LanguageModelCompletionError::from)?;
Ok(GoogleEventMapper::new().map_stream(response))
});
async move { Ok(future.await?.boxed()) }.boxed()
@@ -622,7 +620,7 @@ impl GoogleEventMapper {
futures::stream::iter(match event {
Some(Ok(event)) => self.map_event(event),
Some(Err(error)) => {
- vec![Err(LanguageModelCompletionError::Other(anyhow!(error)))]
+ vec![Err(LanguageModelCompletionError::from(error))]
}
None => vec![Ok(LanguageModelCompletionEvent::Stop(self.stop_reason))],
})
@@ -31,8 +31,8 @@ const LMSTUDIO_DOWNLOAD_URL: &str = "https://lmstudio.ai/download";
const LMSTUDIO_CATALOG_URL: &str = "https://lmstudio.ai/models";
const LMSTUDIO_SITE: &str = "https://lmstudio.ai/";
-const PROVIDER_ID: &str = "lmstudio";
-const PROVIDER_NAME: &str = "LM Studio";
+const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("lmstudio");
+const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("LM Studio");
#[derive(Default, Debug, Clone, PartialEq)]
pub struct LmStudioSettings {
@@ -156,11 +156,11 @@ impl LanguageModelProviderState for LmStudioLanguageModelProvider {
impl LanguageModelProvider for LmStudioLanguageModelProvider {
fn id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn icon(&self) -> IconName {
@@ -386,11 +386,11 @@ impl LanguageModel for LmStudioLanguageModel {
}
fn provider_id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn provider_name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn supports_tools(&self) -> bool {
@@ -474,7 +474,7 @@ impl LmStudioEventMapper {
events.flat_map(move |event| {
futures::stream::iter(match event {
Ok(event) => self.map_event(event),
- Err(error) => vec![Err(LanguageModelCompletionError::Other(anyhow!(error)))],
+ Err(error) => vec![Err(LanguageModelCompletionError::from(error))],
})
})
}
@@ -484,7 +484,7 @@ impl LmStudioEventMapper {
event: lmstudio::ResponseStreamEvent,
) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
let Some(choice) = event.choices.into_iter().next() else {
- return vec![Err(LanguageModelCompletionError::Other(anyhow!(
+ return vec![Err(LanguageModelCompletionError::from(anyhow!(
"Response contained no choices"
)))];
};
@@ -553,7 +553,7 @@ impl LmStudioEventMapper {
raw_input: tool_call.arguments,
},
)),
- Err(error) => Err(LanguageModelCompletionError::BadInputJson {
+ Err(error) => Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
id: tool_call.id.into(),
tool_name: tool_call.name.into(),
raw_input: tool_call.arguments.into(),
@@ -2,8 +2,7 @@ use anyhow::{Context as _, Result, anyhow};
use collections::BTreeMap;
use credentials_provider::CredentialsProvider;
use editor::{Editor, EditorElement, EditorStyle};
-use futures::stream::BoxStream;
-use futures::{FutureExt, StreamExt, future::BoxFuture};
+use futures::{FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream};
use gpui::{
AnyView, App, AsyncApp, Context, Entity, FontStyle, Subscription, Task, TextStyle, WhiteSpace,
};
@@ -15,6 +14,7 @@ use language_model::{
LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolUse, MessageContent,
RateLimiter, Role, StopReason, TokenUsage,
};
+use mistral::StreamResponse;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsStore};
@@ -29,8 +29,8 @@ use util::ResultExt;
use crate::{AllLanguageModelSettings, ui::InstructionListItem};
-const PROVIDER_ID: &str = "mistral";
-const PROVIDER_NAME: &str = "Mistral";
+const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("mistral");
+const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Mistral");
#[derive(Default, Clone, Debug, PartialEq)]
pub struct MistralSettings {
@@ -171,11 +171,11 @@ impl LanguageModelProviderState for MistralLanguageModelProvider {
impl LanguageModelProvider for MistralLanguageModelProvider {
fn id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn icon(&self) -> IconName {
@@ -298,11 +298,11 @@ impl LanguageModel for MistralLanguageModel {
}
fn provider_id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn provider_name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn supports_tools(&self) -> bool {
@@ -579,13 +579,13 @@ impl MistralEventMapper {
pub fn map_stream(
mut self,
- events: Pin<Box<dyn Send + futures::Stream<Item = Result<mistral::StreamResponse>>>>,
- ) -> impl futures::Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
+ events: Pin<Box<dyn Send + Stream<Item = Result<StreamResponse>>>>,
+ ) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
{
events.flat_map(move |event| {
futures::stream::iter(match event {
Ok(event) => self.map_event(event),
- Err(error) => vec![Err(LanguageModelCompletionError::Other(anyhow!(error)))],
+ Err(error) => vec![Err(LanguageModelCompletionError::from(error))],
})
})
}
@@ -595,7 +595,7 @@ impl MistralEventMapper {
event: mistral::StreamResponse,
) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
let Some(choice) = event.choices.first() else {
- return vec![Err(LanguageModelCompletionError::Other(anyhow!(
+ return vec![Err(LanguageModelCompletionError::from(anyhow!(
"Response contained no choices"
)))];
};
@@ -660,7 +660,7 @@ impl MistralEventMapper {
for (_, tool_call) in self.tool_calls_by_index.drain() {
if tool_call.id.is_empty() || tool_call.name.is_empty() {
- results.push(Err(LanguageModelCompletionError::Other(anyhow!(
+ results.push(Err(LanguageModelCompletionError::from(anyhow!(
"Received incomplete tool call: missing id or name"
))));
continue;
@@ -676,12 +676,14 @@ impl MistralEventMapper {
raw_input: tool_call.arguments,
},
))),
- Err(error) => results.push(Err(LanguageModelCompletionError::BadInputJson {
- id: tool_call.id.into(),
- tool_name: tool_call.name.into(),
- raw_input: tool_call.arguments.into(),
- json_parse_error: error.to_string(),
- })),
+ Err(error) => {
+ results.push(Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
+ id: tool_call.id.into(),
+ tool_name: tool_call.name.into(),
+ raw_input: tool_call.arguments.into(),
+ json_parse_error: error.to_string(),
+ }))
+ }
}
}
@@ -30,8 +30,8 @@ const OLLAMA_DOWNLOAD_URL: &str = "https://ollama.com/download";
const OLLAMA_LIBRARY_URL: &str = "https://ollama.com/library";
const OLLAMA_SITE: &str = "https://ollama.com/";
-const PROVIDER_ID: &str = "ollama";
-const PROVIDER_NAME: &str = "Ollama";
+const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("ollama");
+const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Ollama");
#[derive(Default, Debug, Clone, PartialEq)]
pub struct OllamaSettings {
@@ -181,11 +181,11 @@ impl LanguageModelProviderState for OllamaLanguageModelProvider {
impl LanguageModelProvider for OllamaLanguageModelProvider {
fn id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn icon(&self) -> IconName {
@@ -350,11 +350,11 @@ impl LanguageModel for OllamaLanguageModel {
}
fn provider_id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn provider_name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn supports_tools(&self) -> bool {
@@ -453,7 +453,7 @@ fn map_to_language_model_completion_events(
let delta = match response {
Ok(delta) => delta,
Err(e) => {
- let event = Err(LanguageModelCompletionError::Other(anyhow!(e)));
+ let event = Err(LanguageModelCompletionError::from(anyhow!(e)));
return Some((vec![event], state));
}
};
@@ -31,8 +31,8 @@ use util::ResultExt;
use crate::OpenAiSettingsContent;
use crate::{AllLanguageModelSettings, ui::InstructionListItem};
-const PROVIDER_ID: &str = "openai";
-const PROVIDER_NAME: &str = "OpenAI";
+const PROVIDER_ID: LanguageModelProviderId = language_model::OPEN_AI_PROVIDER_ID;
+const PROVIDER_NAME: LanguageModelProviderName = language_model::OPEN_AI_PROVIDER_NAME;
#[derive(Default, Clone, Debug, PartialEq)]
pub struct OpenAiSettings {
@@ -173,11 +173,11 @@ impl LanguageModelProviderState for OpenAiLanguageModelProvider {
impl LanguageModelProvider for OpenAiLanguageModelProvider {
fn id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn icon(&self) -> IconName {
@@ -267,7 +267,11 @@ impl OpenAiLanguageModel {
};
let future = self.request_limiter.stream(async move {
- let api_key = api_key.context("Missing OpenAI API Key")?;
+ let Some(api_key) = api_key else {
+ return Err(LanguageModelCompletionError::NoApiKey {
+ provider: PROVIDER_NAME,
+ });
+ };
let request = stream_completion(http_client.as_ref(), &api_url, &api_key, request);
let response = request.await?;
Ok(response)
@@ -287,11 +291,11 @@ impl LanguageModel for OpenAiLanguageModel {
}
fn provider_id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn provider_name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn supports_tools(&self) -> bool {
@@ -525,7 +529,7 @@ impl OpenAiEventMapper {
events.flat_map(move |event| {
futures::stream::iter(match event {
Ok(event) => self.map_event(event),
- Err(error) => vec![Err(LanguageModelCompletionError::Other(anyhow!(error)))],
+ Err(error) => vec![Err(LanguageModelCompletionError::from(anyhow!(error)))],
})
})
}
@@ -588,10 +592,10 @@ impl OpenAiEventMapper {
raw_input: tool_call.arguments.clone(),
},
)),
- Err(error) => Err(LanguageModelCompletionError::BadInputJson {
+ Err(error) => Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
id: tool_call.id.into(),
- tool_name: tool_call.name.as_str().into(),
- raw_input: tool_call.arguments.into(),
+ tool_name: tool_call.name.into(),
+ raw_input: tool_call.arguments.clone().into(),
json_parse_error: error.to_string(),
}),
}
@@ -29,8 +29,8 @@ use util::ResultExt;
use crate::{AllLanguageModelSettings, ui::InstructionListItem};
-const PROVIDER_ID: &str = "openrouter";
-const PROVIDER_NAME: &str = "OpenRouter";
+const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("openrouter");
+const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("OpenRouter");
#[derive(Default, Clone, Debug, PartialEq)]
pub struct OpenRouterSettings {
@@ -244,11 +244,11 @@ impl LanguageModelProviderState for OpenRouterLanguageModelProvider {
impl LanguageModelProvider for OpenRouterLanguageModelProvider {
fn id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn icon(&self) -> IconName {
@@ -363,11 +363,11 @@ impl LanguageModel for OpenRouterLanguageModel {
}
fn provider_id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn provider_name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn supports_tools(&self) -> bool {
@@ -607,7 +607,7 @@ impl OpenRouterEventMapper {
events.flat_map(move |event| {
futures::stream::iter(match event {
Ok(event) => self.map_event(event),
- Err(error) => vec![Err(LanguageModelCompletionError::Other(anyhow!(error)))],
+ Err(error) => vec![Err(LanguageModelCompletionError::from(anyhow!(error)))],
})
})
}
@@ -617,7 +617,7 @@ impl OpenRouterEventMapper {
event: ResponseStreamEvent,
) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
let Some(choice) = event.choices.first() else {
- return vec![Err(LanguageModelCompletionError::Other(anyhow!(
+ return vec![Err(LanguageModelCompletionError::from(anyhow!(
"Response contained no choices"
)))];
};
@@ -683,10 +683,10 @@ impl OpenRouterEventMapper {
raw_input: tool_call.arguments.clone(),
},
)),
- Err(error) => Err(LanguageModelCompletionError::BadInputJson {
- id: tool_call.id.into(),
+ Err(error) => Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
+ id: tool_call.id.clone().into(),
tool_name: tool_call.name.as_str().into(),
- raw_input: tool_call.arguments.into(),
+ raw_input: tool_call.arguments.clone().into(),
json_parse_error: error.to_string(),
}),
}
@@ -25,8 +25,8 @@ use util::ResultExt;
use crate::{AllLanguageModelSettings, ui::InstructionListItem};
-const PROVIDER_ID: &str = "vercel";
-const PROVIDER_NAME: &str = "Vercel";
+const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("vercel");
+const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("Vercel");
#[derive(Default, Clone, Debug, PartialEq)]
pub struct VercelSettings {
@@ -172,11 +172,11 @@ impl LanguageModelProviderState for VercelLanguageModelProvider {
impl LanguageModelProvider for VercelLanguageModelProvider {
fn id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn icon(&self) -> IconName {
@@ -269,7 +269,11 @@ impl VercelLanguageModel {
};
let future = self.request_limiter.stream(async move {
- let api_key = api_key.context("Missing Vercel API Key")?;
+ let Some(api_key) = api_key else {
+ return Err(LanguageModelCompletionError::NoApiKey {
+ provider: PROVIDER_NAME,
+ });
+ };
let request =
open_ai::stream_completion(http_client.as_ref(), &api_url, &api_key, request);
let response = request.await?;
@@ -290,11 +294,11 @@ impl LanguageModel for VercelLanguageModel {
}
fn provider_id(&self) -> LanguageModelProviderId {
- LanguageModelProviderId(PROVIDER_ID.into())
+ PROVIDER_ID
}
fn provider_name(&self) -> LanguageModelProviderName {
- LanguageModelProviderName(PROVIDER_NAME.into())
+ PROVIDER_NAME
}
fn supports_tools(&self) -> bool {
@@ -19,7 +19,13 @@ use ui::{HighlightedLabel, ListItem, ListItemSpacing, prelude::*};
use util::ResultExt;
use workspace::{ModalView, Workspace};
-actions!(language_selector, [Toggle]);
+actions!(
+ language_selector,
+ [
+ /// Toggles the language selector modal.
+ Toggle
+ ]
+);
pub fn init(cx: &mut App) {
cx.observe_new(LanguageSelector::register).detach();
@@ -13,7 +13,13 @@ use ui::{
};
use workspace::{Item, SplitDirection, Workspace};
-actions!(dev, [OpenKeyContextView]);
+actions!(
+ dev,
+ [
+ /// Opens the key context view for debugging keybindings.
+ OpenKeyContextView
+ ]
+);
pub fn init(cx: &mut App) {
cx.observe_new(|workspace: &mut Workspace, _, _| {
@@ -204,7 +204,13 @@ pub(crate) struct LogMenuItem {
pub server_kind: LanguageServerKind,
}
-actions!(dev, [OpenLanguageServerLogs]);
+actions!(
+ dev,
+ [
+ /// Opens the language server protocol logs viewer.
+ OpenLanguageServerLogs
+ ]
+);
pub(super) struct GlobalLogStore(pub WeakEntity<LogStore>);
@@ -19,7 +19,13 @@ use workspace::{StatusItemView, Workspace};
use crate::lsp_log::GlobalLogStore;
-actions!(lsp_tool, [ToggleMenu]);
+actions!(
+ lsp_tool,
+ [
+ /// Toggles the language server tool menu.
+ ToggleMenu
+ ]
+);
pub struct LspTool {
state: Entity<PickerState>,
@@ -15,7 +15,13 @@ use workspace::{
item::{Item, ItemHandle},
};
-actions!(dev, [OpenSyntaxTreeView]);
+actions!(
+ dev,
+ [
+ /// Opens the syntax tree view for the current file.
+ OpenSyntaxTreeView
+ ]
+);
pub fn init(cx: &mut App) {
cx.observe_new(|workspace: &mut Workspace, _, _| {
@@ -16,3 +16,9 @@ brackets = [
{ start = "{", end = "}", close = true, newline = false },
{ start = "[", end = "]", close = true, newline = false },
]
+rewrap_prefixes = [
+ "[-*+]\\s+",
+ "\\d+\\.\\s+",
+ ">\\s*",
+ "[-*+]\\s+\\[[\\sx]\\]\\s+"
+]
@@ -1,9 +1,8 @@
(comment) @comment.inclusive
-[
- (string)
- (template_string)
-] @string
+(string) @string
+
+(template_string (string_fragment) @string)
(jsx_element) @element
@@ -8,7 +8,8 @@ use futures::StreamExt;
use gpui::{App, AsyncApp, Task};
use http_client::github::{GitHubLspBinaryVersion, latest_github_release};
use language::{
- ContextProvider, LanguageRegistry, LanguageToolchainStore, LspAdapter, LspAdapterDelegate,
+ ContextProvider, LanguageRegistry, LanguageToolchainStore, LocalFile as _, LspAdapter,
+ LspAdapterDelegate,
};
use lsp::{LanguageServerBinary, LanguageServerName};
use node_runtime::NodeRuntime;
@@ -65,13 +66,14 @@ impl ContextProvider for JsonTaskProvider {
.ok()?
.await
.ok()?;
+ let path = cx.update(|cx| file.abs_path(cx)).ok()?.as_path().into();
let task_templates = if is_package_json {
let package_json = serde_json_lenient::from_str::<
HashMap<String, serde_json_lenient::Value>,
>(&contents.text)
.ok()?;
- let package_json = PackageJsonData::new(file.path.clone(), package_json);
+ let package_json = PackageJsonData::new(path, package_json);
let command = package_json.package_manager.unwrap_or("npm").to_owned();
package_json
.scripts
@@ -269,10 +271,10 @@ impl JsonLspAdapter {
#[cfg(debug_assertions)]
fn generate_inspector_style_schema() -> serde_json_lenient::Value {
- let schema = schemars::r#gen::SchemaSettings::draft07()
- .with(|settings| settings.option_add_null_type = false)
+ let schema = schemars::generate::SchemaSettings::draft2019_09()
+ .with_transform(util::schemars::DefaultDenyUnknownFields)
.into_generator()
- .into_root_schema_for::<gpui::StyleRefinement>();
+ .root_schema_for::<gpui::StyleRefinement>();
serde_json_lenient::to_value(schema).unwrap()
}
@@ -3,7 +3,7 @@ grammar = "markdown"
path_suffixes = ["md", "mdx", "mdwn", "markdown", "MD"]
completion_query_characters = ["-"]
block_comment = ["<!-- ", " -->"]
-autoclose_before = "}])>"
+autoclose_before = ";:.,=}])>"
brackets = [
{ start = "{", end = "}", close = true, newline = true },
{ start = "[", end = "]", close = true, newline = true },
@@ -13,6 +13,12 @@ brackets = [
{ start = "'", end = "'", close = false, newline = false },
{ start = "`", end = "`", close = false, newline = false },
]
+rewrap_prefixes = [
+ "[-*+]\\s+",
+ "\\d+\\.\\s+",
+ ">\\s*",
+ "[-*+]\\s+\\[[\\sx]\\]\\s+"
+]
auto_indent_on_paste = false
auto_indent_using_last_non_empty_line = false
@@ -226,6 +226,12 @@
">>"
"|"
"~"
+ "&="
+ "<<="
+ ">>="
+ "@="
+ "^="
+ "|="
] @operator
[
@@ -571,6 +571,9 @@ const RUST_DOC_TEST_NAME_TASK_VARIABLE: VariableName =
const RUST_TEST_NAME_TASK_VARIABLE: VariableName =
VariableName::Custom(Cow::Borrowed("RUST_TEST_NAME"));
+const RUST_MANIFEST_DIRNAME_TASK_VARIABLE: VariableName =
+ VariableName::Custom(Cow::Borrowed("RUST_MANIFEST_DIRNAME"));
+
impl ContextProvider for RustContextProvider {
fn build_context(
&self,
@@ -615,8 +618,11 @@ impl ContextProvider for RustContextProvider {
variables.insert(RUST_PACKAGE_TASK_VARIABLE.clone(), package_name);
}
}
- if let Some(path) = local_abs_path.as_ref() {
- if let Some(target) = target_info_from_abs_path(&path, project_env.as_ref()).await {
+ if let Some(path) = local_abs_path.as_ref()
+ && let Some((target, manifest_path)) =
+ target_info_from_abs_path(&path, project_env.as_ref()).await
+ {
+ if let Some(target) = target {
variables.extend(TaskVariables::from_iter([
(RUST_PACKAGE_TASK_VARIABLE.clone(), target.package_name),
(RUST_BIN_NAME_TASK_VARIABLE.clone(), target.target_name),
@@ -639,6 +645,10 @@ impl ContextProvider for RustContextProvider {
);
}
}
+ variables.extend(TaskVariables::from_iter([(
+ RUST_MANIFEST_DIRNAME_TASK_VARIABLE.clone(),
+ manifest_path.to_string_lossy().into_owned(),
+ )]));
}
Ok(variables)
})
@@ -708,7 +718,7 @@ impl ContextProvider for RustContextProvider {
RUST_TEST_NAME_TASK_VARIABLE.template_value(),
],
tags: vec!["rust-test".to_owned()],
- cwd: Some("$ZED_DIRNAME".to_owned()),
+ cwd: Some(RUST_MANIFEST_DIRNAME_TASK_VARIABLE.template_value()),
..TaskTemplate::default()
},
TaskTemplate {
@@ -729,7 +739,7 @@ impl ContextProvider for RustContextProvider {
RUST_DOC_TEST_NAME_TASK_VARIABLE.template_value(),
],
tags: vec!["rust-doc-test".to_owned()],
- cwd: Some("$ZED_DIRNAME".to_owned()),
+ cwd: Some(RUST_MANIFEST_DIRNAME_TASK_VARIABLE.template_value()),
..TaskTemplate::default()
},
TaskTemplate {
@@ -747,7 +757,7 @@ impl ContextProvider for RustContextProvider {
RUST_TEST_FRAGMENT_TASK_VARIABLE.template_value(),
],
tags: vec!["rust-mod-test".to_owned()],
- cwd: Some("$ZED_DIRNAME".to_owned()),
+ cwd: Some(RUST_MANIFEST_DIRNAME_TASK_VARIABLE.template_value()),
..TaskTemplate::default()
},
TaskTemplate {
@@ -782,7 +792,7 @@ impl ContextProvider for RustContextProvider {
"-p".into(),
RUST_PACKAGE_TASK_VARIABLE.template_value(),
],
- cwd: Some("$ZED_DIRNAME".to_owned()),
+ cwd: Some(RUST_MANIFEST_DIRNAME_TASK_VARIABLE.template_value()),
..TaskTemplate::default()
},
TaskTemplate {
@@ -826,18 +836,19 @@ impl ContextProvider for RustContextProvider {
}
/// Part of the data structure of Cargo metadata
-#[derive(serde::Deserialize)]
+#[derive(Debug, serde::Deserialize)]
struct CargoMetadata {
packages: Vec<CargoPackage>,
}
-#[derive(serde::Deserialize)]
+#[derive(Debug, serde::Deserialize)]
struct CargoPackage {
id: String,
targets: Vec<CargoTarget>,
+ manifest_path: Arc<Path>,
}
-#[derive(serde::Deserialize)]
+#[derive(Debug, serde::Deserialize)]
struct CargoTarget {
name: String,
kind: Vec<String>,
@@ -883,7 +894,7 @@ struct TargetInfo {
async fn target_info_from_abs_path(
abs_path: &Path,
project_env: Option<&HashMap<String, String>>,
-) -> Option<TargetInfo> {
+) -> Option<(Option<TargetInfo>, Arc<Path>)> {
let mut command = util::command::new_smol_command("cargo");
if let Some(envs) = project_env {
command.envs(envs);
@@ -900,12 +911,33 @@ async fn target_info_from_abs_path(
.stdout;
let metadata: CargoMetadata = serde_json::from_slice(&output).log_err()?;
-
target_info_from_metadata(metadata, abs_path)
}
-fn target_info_from_metadata(metadata: CargoMetadata, abs_path: &Path) -> Option<TargetInfo> {
+fn target_info_from_metadata(
+ metadata: CargoMetadata,
+ abs_path: &Path,
+) -> Option<(Option<TargetInfo>, Arc<Path>)> {
+ let mut manifest_path = None;
for package in metadata.packages {
+ let Some(manifest_dir_path) = package.manifest_path.parent() else {
+ continue;
+ };
+
+ let Some(path_from_manifest_dir) = abs_path.strip_prefix(manifest_dir_path).ok() else {
+ continue;
+ };
+ let candidate_path_length = path_from_manifest_dir.components().count();
+ // Pick the most specific manifest path
+ if let Some((path, current_length)) = &mut manifest_path {
+ if candidate_path_length > *current_length {
+ *path = Arc::from(manifest_dir_path);
+ *current_length = candidate_path_length;
+ }
+ } else {
+ manifest_path = Some((Arc::from(manifest_dir_path), candidate_path_length));
+ };
+
for target in package.targets {
let Some(bin_kind) = target
.kind
@@ -916,17 +948,22 @@ fn target_info_from_metadata(metadata: CargoMetadata, abs_path: &Path) -> Option
};
let target_path = PathBuf::from(target.src_path);
if target_path == abs_path {
- return package_name_from_pkgid(&package.id).map(|package_name| TargetInfo {
- package_name: package_name.to_owned(),
- target_name: target.name,
- required_features: target.required_features,
- target_kind: bin_kind,
+ return manifest_path.map(|(path, _)| {
+ (
+ package_name_from_pkgid(&package.id).map(|package_name| TargetInfo {
+ package_name: package_name.to_owned(),
+ target_name: target.name,
+ required_features: target.required_features,
+ target_kind: bin_kind,
+ }),
+ path,
+ )
});
}
}
}
- None
+ manifest_path.map(|(path, _)| (None, path))
}
async fn human_readable_package_name(
@@ -1380,62 +1417,77 @@ mod tests {
fn test_target_info_from_metadata() {
for (input, absolute_path, expected) in [
(
- r#"{"packages":[{"id":"path+file:///absolute/path/to/project/zed/crates/zed#0.131.0","targets":[{"name":"zed","kind":["bin"],"src_path":"/path/to/zed/src/main.rs"}]}]}"#,
+ r#"{"packages":[{"id":"path+file:///absolute/path/to/project/zed/crates/zed#0.131.0","manifest_path":"/path/to/zed/Cargo.toml","targets":[{"name":"zed","kind":["bin"],"src_path":"/path/to/zed/src/main.rs"}]}]}"#,
"/path/to/zed/src/main.rs",
- Some(TargetInfo {
- package_name: "zed".into(),
- target_name: "zed".into(),
- required_features: Vec::new(),
- target_kind: TargetKind::Bin,
- }),
+ Some((
+ Some(TargetInfo {
+ package_name: "zed".into(),
+ target_name: "zed".into(),
+ required_features: Vec::new(),
+ target_kind: TargetKind::Bin,
+ }),
+ Arc::from("/path/to/zed".as_ref()),
+ )),
),
(
- r#"{"packages":[{"id":"path+file:///path/to/custom-package#my-custom-package@0.1.0","targets":[{"name":"my-custom-bin","kind":["bin"],"src_path":"/path/to/custom-package/src/main.rs"}]}]}"#,
+ r#"{"packages":[{"id":"path+file:///path/to/custom-package#my-custom-package@0.1.0","manifest_path":"/path/to/custom-package/Cargo.toml","targets":[{"name":"my-custom-bin","kind":["bin"],"src_path":"/path/to/custom-package/src/main.rs"}]}]}"#,
"/path/to/custom-package/src/main.rs",
- Some(TargetInfo {
- package_name: "my-custom-package".into(),
- target_name: "my-custom-bin".into(),
- required_features: Vec::new(),
- target_kind: TargetKind::Bin,
- }),
+ Some((
+ Some(TargetInfo {
+ package_name: "my-custom-package".into(),
+ target_name: "my-custom-bin".into(),
+ required_features: Vec::new(),
+ target_kind: TargetKind::Bin,
+ }),
+ Arc::from("/path/to/custom-package".as_ref()),
+ )),
),
(
- r#"{"packages":[{"id":"path+file:///path/to/custom-package#my-custom-package@0.1.0","targets":[{"name":"my-custom-bin","kind":["example"],"src_path":"/path/to/custom-package/src/main.rs"}]}]}"#,
+ r#"{"packages":[{"id":"path+file:///path/to/custom-package#my-custom-package@0.1.0","targets":[{"name":"my-custom-bin","kind":["example"],"src_path":"/path/to/custom-package/src/main.rs"}],"manifest_path":"/path/to/custom-package/Cargo.toml"}]}"#,
"/path/to/custom-package/src/main.rs",
- Some(TargetInfo {
- package_name: "my-custom-package".into(),
- target_name: "my-custom-bin".into(),
- required_features: Vec::new(),
- target_kind: TargetKind::Example,
- }),
+ Some((
+ Some(TargetInfo {
+ package_name: "my-custom-package".into(),
+ target_name: "my-custom-bin".into(),
+ required_features: Vec::new(),
+ target_kind: TargetKind::Example,
+ }),
+ Arc::from("/path/to/custom-package".as_ref()),
+ )),
),
(
- r#"{"packages":[{"id":"path+file:///path/to/custom-package#my-custom-package@0.1.0","targets":[{"name":"my-custom-bin","kind":["example"],"src_path":"/path/to/custom-package/src/main.rs","required-features":["foo","bar"]}]}]}"#,
+ r#"{"packages":[{"id":"path+file:///path/to/custom-package#my-custom-package@0.1.0","manifest_path":"/path/to/custom-package/Cargo.toml","targets":[{"name":"my-custom-bin","kind":["example"],"src_path":"/path/to/custom-package/src/main.rs","required-features":["foo","bar"]}]}]}"#,
"/path/to/custom-package/src/main.rs",
- Some(TargetInfo {
- package_name: "my-custom-package".into(),
- target_name: "my-custom-bin".into(),
- required_features: vec!["foo".to_owned(), "bar".to_owned()],
- target_kind: TargetKind::Example,
- }),
+ Some((
+ Some(TargetInfo {
+ package_name: "my-custom-package".into(),
+ target_name: "my-custom-bin".into(),
+ required_features: vec!["foo".to_owned(), "bar".to_owned()],
+ target_kind: TargetKind::Example,
+ }),
+ Arc::from("/path/to/custom-package".as_ref()),
+ )),
),
(
- r#"{"packages":[{"id":"path+file:///path/to/custom-package#my-custom-package@0.1.0","targets":[{"name":"my-custom-bin","kind":["example"],"src_path":"/path/to/custom-package/src/main.rs","required-features":[]}]}]}"#,
+ r#"{"packages":[{"id":"path+file:///path/to/custom-package#my-custom-package@0.1.0","targets":[{"name":"my-custom-bin","kind":["example"],"src_path":"/path/to/custom-package/src/main.rs","required-features":[]}],"manifest_path":"/path/to/custom-package/Cargo.toml"}]}"#,
"/path/to/custom-package/src/main.rs",
- Some(TargetInfo {
- package_name: "my-custom-package".into(),
- target_name: "my-custom-bin".into(),
- required_features: vec![],
- target_kind: TargetKind::Example,
- }),
+ Some((
+ Some(TargetInfo {
+ package_name: "my-custom-package".into(),
+ target_name: "my-custom-bin".into(),
+ required_features: vec![],
+ target_kind: TargetKind::Example,
+ }),
+ Arc::from("/path/to/custom-package".as_ref()),
+ )),
),
(
- r#"{"packages":[{"id":"path+file:///path/to/custom-package#my-custom-package@0.1.0","targets":[{"name":"my-custom-package","kind":["lib"],"src_path":"/path/to/custom-package/src/main.rs"}]}]}"#,
+ r#"{"packages":[{"id":"path+file:///path/to/custom-package#my-custom-package@0.1.0","targets":[{"name":"my-custom-package","kind":["lib"],"src_path":"/path/to/custom-package/src/main.rs"}],"manifest_path":"/path/to/custom-package/Cargo.toml"}]}"#,
"/path/to/custom-package/src/main.rs",
- None,
+ Some((None, Arc::from("/path/to/custom-package".as_ref()))),
),
] {
- let metadata: CargoMetadata = serde_json::from_str(input).unwrap();
+ let metadata: CargoMetadata = serde_json::from_str(input).context(input).unwrap();
let absolute_path = Path::new(absolute_path);
@@ -1,9 +1,8 @@
(comment) @comment.inclusive
-[
- (string)
- (template_string)
-] @string
+(string) @string
+
+(template_string (string_fragment) @string)
(jsx_element) @element
@@ -221,15 +221,30 @@ impl PackageJsonData {
});
}
+ let script_name_counts: HashMap<_, usize> =
+ self.scripts
+ .iter()
+ .fold(HashMap::default(), |mut acc, (_, script)| {
+ *acc.entry(script).or_default() += 1;
+ acc
+ });
for (path, script) in &self.scripts {
+ let label = if script_name_counts.get(script).copied().unwrap_or_default() > 1
+ && let Some(parent) = path.parent().and_then(|parent| parent.file_name())
+ {
+ let parent = parent.to_string_lossy();
+ format!("{parent}/package.json > {script}")
+ } else {
+ format!("package.json > {script}")
+ };
task_templates.0.push(TaskTemplate {
- label: format!("package.json > {script}",),
+ label,
command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
args: vec!["run".to_owned(), script.to_owned()],
tags: vec!["package-script".into()],
cwd: Some(
path.parent()
- .unwrap_or(Path::new(""))
+ .unwrap_or(Path::new("/"))
.to_string_lossy()
.to_string(),
),
@@ -767,8 +782,8 @@ pub struct EsLintLspAdapter {
}
impl EsLintLspAdapter {
- const CURRENT_VERSION: &'static str = "3.0.10";
- const CURRENT_VERSION_TAG_NAME: &'static str = "release/3.0.10";
+ const CURRENT_VERSION: &'static str = "2.4.4";
+ const CURRENT_VERSION_TAG_NAME: &'static str = "release/2.4.4";
#[cfg(not(windows))]
const GITHUB_ASSET_KIND: AssetKind = AssetKind::TarGz;
@@ -846,7 +861,9 @@ impl LspAdapter for EsLintLspAdapter {
"enable": true
}
},
- "useFlatConfig": use_flat_config,
+ "experimental": {
+ "useFlatConfig": use_flat_config,
+ },
});
let override_options = cx.update(|cx| {
@@ -1012,6 +1029,7 @@ mod tests {
use language::language_settings;
use project::{FakeFs, Project};
use serde_json::json;
+ use task::TaskTemplates;
use unindent::Unindent;
use util::path;
@@ -1133,5 +1151,42 @@ mod tests {
package_manager: None,
}
);
+
+ let mut task_templates = TaskTemplates::default();
+ package_json_data.fill_task_templates(&mut task_templates);
+ let task_templates = task_templates
+ .0
+ .into_iter()
+ .map(|template| (template.label, template.cwd))
+ .collect::<Vec<_>>();
+ pretty_assertions::assert_eq!(
+ task_templates,
+ [
+ (
+ "vitest file test".into(),
+ Some("$ZED_CUSTOM_TYPESCRIPT_VITEST_PACKAGE_PATH".into()),
+ ),
+ (
+ "vitest test $ZED_SYMBOL".into(),
+ Some("$ZED_CUSTOM_TYPESCRIPT_VITEST_PACKAGE_PATH".into()),
+ ),
+ (
+ "mocha file test".into(),
+ Some("$ZED_CUSTOM_TYPESCRIPT_MOCHA_PACKAGE_PATH".into()),
+ ),
+ (
+ "mocha test $ZED_SYMBOL".into(),
+ Some("$ZED_CUSTOM_TYPESCRIPT_MOCHA_PACKAGE_PATH".into()),
+ ),
+ (
+ "root/package.json > test".into(),
+ Some(path!("/root").into())
+ ),
+ (
+ "sub/package.json > test".into(),
+ Some(path!("/root/sub").into())
+ ),
+ ]
+ );
}
}
@@ -1,6 +1,9 @@
(comment) @comment.inclusive
+
(string) @string
+(template_string (string_fragment) @string)
+
(_ value: (call_expression
function: (identifier) @function_name_before_type_arguments
type_arguments: (type_arguments)))
@@ -12,6 +12,6 @@ brackets = [
auto_indent_on_paste = false
auto_indent_using_last_non_empty_line = false
-increase_indent_pattern = ":\\s*[|>]?\\s*$"
+increase_indent_pattern = "^[^#]*:\\s*[|>]?\\s*$"
prettier_parser_name = "yaml"
tab_size = 2
@@ -25,7 +25,7 @@ async-trait.workspace = true
collections.workspace = true
cpal.workspace = true
futures.workspace = true
-gpui = { workspace = true, features = ["x11", "wayland"] }
+gpui = { workspace = true, features = ["screen-capture", "x11", "wayland"] }
gpui_tokio.workspace = true
http_client_tls.workspace = true
image.workspace = true
@@ -15,11 +15,7 @@ use gpui::{App, AppContext as _, AsyncApp, BackgroundExecutor, SharedString, Tas
use notification::DidChangeWorkspaceFolders;
use parking_lot::{Mutex, RwLock};
use postage::{barrier, prelude::Stream};
-use schemars::{
- JsonSchema,
- r#gen::SchemaGenerator,
- schema::{InstanceType, Schema, SchemaObject},
-};
+use schemars::JsonSchema;
use serde::{Deserialize, Serialize, de::DeserializeOwned};
use serde_json::{Value, json, value::RawValue};
use smol::{
@@ -130,7 +126,10 @@ impl LanguageServerId {
}
/// A name of a language server.
-#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
+#[derive(
+ Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize, JsonSchema,
+)]
+#[serde(transparent)]
pub struct LanguageServerName(pub SharedString);
impl std::fmt::Display for LanguageServerName {
@@ -151,20 +150,6 @@ impl AsRef<OsStr> for LanguageServerName {
}
}
-impl JsonSchema for LanguageServerName {
- fn schema_name() -> String {
- "LanguageServerName".into()
- }
-
- fn json_schema(_: &mut SchemaGenerator) -> Schema {
- SchemaObject {
- instance_type: Some(InstanceType::String.into()),
- ..Default::default()
- }
- .into()
- }
-}
-
impl LanguageServerName {
pub const fn new_static(s: &'static str) -> Self {
Self(SharedString::new_static(s))
@@ -141,7 +141,15 @@ pub type CodeBlockRenderFn = Arc<
pub type CodeBlockTransformFn =
Arc<dyn Fn(AnyDiv, Range<usize>, CodeBlockMetadata, &mut Window, &App) -> AnyDiv>;
-actions!(markdown, [Copy, CopyAsMarkdown]);
+actions!(
+ markdown,
+ [
+ /// Copies the selected text to the clipboard.
+ Copy,
+ /// Copies the selected text as markdown to the clipboard.
+ CopyAsMarkdown
+ ]
+);
impl Markdown {
pub fn new(
@@ -421,7 +429,7 @@ impl Selection {
}
}
-#[derive(Clone, Default)]
+#[derive(Debug, Clone, Default)]
pub struct ParsedMarkdown {
pub source: SharedString,
pub events: Arc<[(Range<usize>, MarkdownEvent)]>,
@@ -1672,7 +1680,7 @@ struct RenderedText {
links: Rc<[RenderedLink]>,
}
-#[derive(Clone, Eq, PartialEq)]
+#[derive(Debug, Clone, Eq, PartialEq)]
struct RenderedLink {
source_range: Range<usize>,
destination_url: SharedString,
@@ -8,7 +8,18 @@ pub mod markdown_renderer;
actions!(
markdown,
- [OpenPreview, OpenPreviewToTheSide, OpenFollowingPreview]
+ [
+ /// Scrolls up by one page in the markdown preview.
+ MovePageUp,
+ /// Scrolls down by one page in the markdown preview.
+ MovePageDown,
+ /// Opens a markdown preview for the current file.
+ OpenPreview,
+ /// Opens a markdown preview in a split pane.
+ OpenPreviewToTheSide,
+ /// Opens a following markdown preview that syncs with the editor.
+ OpenFollowingPreview
+ ]
);
pub fn init(cx: &mut App) {
@@ -7,8 +7,8 @@ use editor::scroll::Autoscroll;
use editor::{Editor, EditorEvent, SelectionEffects};
use gpui::{
App, ClickEvent, Context, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement,
- IntoElement, ListState, ParentElement, Render, RetainAllImageCache, Styled, Subscription, Task,
- WeakEntity, Window, list,
+ IntoElement, IsZero, ListState, ParentElement, Render, RetainAllImageCache, Styled,
+ Subscription, Task, WeakEntity, Window, list,
};
use language::LanguageRegistry;
use settings::Settings;
@@ -19,7 +19,7 @@ use workspace::{Pane, Workspace};
use crate::markdown_elements::ParsedMarkdownElement;
use crate::{
- OpenFollowingPreview, OpenPreview, OpenPreviewToTheSide,
+ MovePageDown, MovePageUp, OpenFollowingPreview, OpenPreview, OpenPreviewToTheSide,
markdown_elements::ParsedMarkdown,
markdown_parser::parse_markdown,
markdown_renderer::{RenderContext, render_markdown_block},
@@ -530,6 +530,26 @@ impl MarkdownPreviewView {
) -> bool {
!(current_block.is_list_item() && next_block.map(|b| b.is_list_item()).unwrap_or(false))
}
+
+ fn scroll_page_up(&mut self, _: &MovePageUp, _window: &mut Window, cx: &mut Context<Self>) {
+ let viewport_height = self.list_state.viewport_bounds().size.height;
+ if viewport_height.is_zero() {
+ return;
+ }
+
+ self.list_state.scroll_by(-viewport_height);
+ cx.notify();
+ }
+
+ fn scroll_page_down(&mut self, _: &MovePageDown, _window: &mut Window, cx: &mut Context<Self>) {
+ let viewport_height = self.list_state.viewport_bounds().size.height;
+ if viewport_height.is_zero() {
+ return;
+ }
+
+ self.list_state.scroll_by(viewport_height);
+ cx.notify();
+ }
}
impl Focusable for MarkdownPreviewView {
@@ -580,6 +600,8 @@ impl Render for MarkdownPreviewView {
.id("MarkdownPreview")
.key_context("MarkdownPreview")
.track_focus(&self.focus_handle(cx))
+ .on_action(cx.listener(MarkdownPreviewView::scroll_page_up))
+ .on_action(cx.listener(MarkdownPreviewView::scroll_page_down))
.size_full()
.bg(cx.theme().colors().editor_background)
.p_4()
@@ -12,13 +12,21 @@ pub fn init() {}
actions!(
menu,
[
+ /// Cancels the current menu operation.
Cancel,
+ /// Confirms the selected menu item.
Confirm,
+ /// Performs secondary confirmation action.
SecondaryConfirm,
+ /// Selects the previous item in the menu.
SelectPrevious,
+ /// Selects the next item in the menu.
SelectNext,
+ /// Selects the first item in the menu.
SelectFirst,
+ /// Selects the last item in the menu.
SelectLast,
+ /// Restarts the menu from the beginning.
Restart,
EndSlot,
]
@@ -65,17 +65,28 @@ use worktree::{Entry, ProjectEntryId, WorktreeId};
actions!(
outline_panel,
[
+ /// Collapses all entries in the outline tree.
CollapseAllEntries,
+ /// Collapses the currently selected entry.
CollapseSelectedEntry,
+ /// Expands all entries in the outline tree.
ExpandAllEntries,
+ /// Expands the currently selected entry.
ExpandSelectedEntry,
+ /// Folds the selected directory.
FoldDirectory,
+ /// Opens the selected entry in the editor.
OpenSelectedEntry,
+ /// Reveals the selected item in the system file manager.
RevealInFileManager,
+ /// Selects the parent of the current entry.
SelectParent,
+ /// Toggles the pin status of the active editor.
ToggleActiveEditorPin,
- ToggleFocus,
+ /// Unfolds the selected directory.
UnfoldDirectory,
+ /// Toggles focus on the outline panel.
+ ToggleFocus,
]
);
@@ -5,7 +5,15 @@ use settings::Settings;
use theme::ThemeSettings;
use ui::{Tab, prelude::*};
-actions!(panel, [NextPanelTab, PreviousPanelTab]);
+actions!(
+ panel,
+ [
+ /// Navigates to the next tab in the panel.
+ NextPanelTab,
+ /// Navigates to the previous tab in the panel.
+ PreviousPanelTab
+ ]
+);
pub trait PanelHeader: workspace::Panel {
fn header_height(&self, cx: &mut App) -> Pixels {
@@ -34,7 +34,13 @@ pub enum Direction {
Down,
}
-actions!(picker, [ConfirmCompletion]);
+actions!(
+ picker,
+ [
+ /// Confirms the selected completion in the picker.
+ ConfirmCompletion
+ ]
+);
/// ConfirmInput is an alternative editor action which - instead of selecting active picker entry - treats pickers editor input literally,
/// performing some kind of action on it.
@@ -54,9 +54,17 @@ impl Prettier {
".prettierrc.toml",
".prettierrc.js",
".prettierrc.cjs",
+ ".prettierrc.mjs",
+ ".prettierrc.ts",
+ ".prettierrc.cts",
+ ".prettierrc.mts",
"package.json",
"prettier.config.js",
"prettier.config.cjs",
+ "prettier.config.mjs",
+ "prettier.config.ts",
+ "prettier.config.cts",
+ "prettier.config.mts",
".editorconfig",
".prettierignore",
];
@@ -54,6 +54,7 @@ indexmap.workspace = true
language.workspace = true
log.workspace = true
lsp.workspace = true
+markdown.workspace = true
node_runtime.workspace = true
parking_lot.workspace = true
pathdiff.workspace = true
@@ -21,7 +21,13 @@ pub fn init(cx: &mut App) {
extension::init(cx);
}
-actions!(context_server, [Restart]);
+actions!(
+ context_server,
+ [
+ /// Restarts the context server.
+ Restart
+ ]
+);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ContextServerStatus {
@@ -165,6 +171,15 @@ impl ContextServerStore {
)
}
+ /// Returns all configured context server ids, regardless of enabled state.
+ pub fn configured_server_ids(&self) -> Vec<ContextServerId> {
+ self.context_server_settings
+ .keys()
+ .cloned()
+ .map(ContextServerId)
+ .collect()
+ }
+
#[cfg(any(test, feature = "test-support"))]
pub fn test(
registry: Entity<ContextServerDescriptorRegistry>,
@@ -812,9 +827,9 @@ mod tests {
.await;
let executor = cx.executor();
- let registry = cx.new(|_| {
+ let registry = cx.new(|cx| {
let mut registry = ContextServerDescriptorRegistry::new();
- registry.register_context_server_descriptor(SERVER_1_ID.into(), fake_descriptor_1);
+ registry.register_context_server_descriptor(SERVER_1_ID.into(), fake_descriptor_1, cx);
registry
});
let store = cx.new(|cx| {
@@ -103,19 +103,20 @@ struct ContextServerDescriptorRegistryProxy {
impl ExtensionContextServerProxy for ContextServerDescriptorRegistryProxy {
fn register_context_server(&self, extension: Arc<dyn Extension>, id: Arc<str>, cx: &mut App) {
self.context_server_factory_registry
- .update(cx, |registry, _| {
+ .update(cx, |registry, cx| {
registry.register_context_server_descriptor(
id.clone(),
Arc::new(ContextServerDescriptor { id, extension })
as Arc<dyn registry::ContextServerDescriptor>,
+ cx,
)
});
}
fn unregister_context_server(&self, server_id: Arc<str>, cx: &mut App) {
self.context_server_factory_registry
- .update(cx, |registry, _| {
- registry.unregister_context_server_descriptor_by_id(&server_id)
+ .update(cx, |registry, cx| {
+ registry.unregister_context_server_descriptor_by_id(&server_id, cx)
});
}
}
@@ -4,7 +4,7 @@ use anyhow::Result;
use collections::HashMap;
use context_server::ContextServerCommand;
use extension::ContextServerConfiguration;
-use gpui::{App, AppContext as _, AsyncApp, Entity, Global, Task};
+use gpui::{App, AppContext as _, AsyncApp, Context, Entity, Global, Task};
use crate::worktree_store::WorktreeStore;
@@ -66,12 +66,19 @@ impl ContextServerDescriptorRegistry {
&mut self,
id: Arc<str>,
descriptor: Arc<dyn ContextServerDescriptor>,
+ cx: &mut Context<Self>,
) {
self.context_servers.insert(id, descriptor);
+ cx.notify();
}
/// Unregisters the [`ContextServerDescriptor`] for the server with the given ID.
- pub fn unregister_context_server_descriptor_by_id(&mut self, server_id: &str) {
+ pub fn unregister_context_server_descriptor_by_id(
+ &mut self,
+ server_id: &str,
+ cx: &mut Context<Self>,
+ ) {
self.context_servers.remove(server_id);
+ cx.notify();
}
}
@@ -2,11 +2,13 @@ use anyhow::{Context as _, Result};
use async_trait::async_trait;
use dap::{DapLocator, DebugRequest, adapters::DebugAdapterName};
use gpui::SharedString;
-use serde_json::Value;
+use serde_json::{Value, json};
use smol::{
+ Timer,
io::AsyncReadExt,
process::{Command, Stdio},
};
+use std::time::Duration;
use task::{BuildTaskDefinition, DebugScenario, ShellBuilder, SpawnInTerminal, TaskTemplate};
pub(crate) struct CargoLocator;
@@ -25,14 +27,29 @@ async fn find_best_executable(executables: &[String], test_name: &str) -> Option
continue;
};
let mut test_lines = String::default();
- if let Some(mut stdout) = child.stdout.take() {
- stdout.read_to_string(&mut test_lines).await.ok();
+ let exec_result = smol::future::race(
+ async {
+ if let Some(mut stdout) = child.stdout.take() {
+ stdout.read_to_string(&mut test_lines).await?;
+ }
+ Ok(())
+ },
+ async {
+ Timer::after(Duration::from_secs(3)).await;
+ anyhow::bail!("Timed out waiting for executable stdout")
+ },
+ );
+
+ if let Err(err) = exec_result.await {
+ log::warn!("Failed to list tests for {executable}: {err}");
+ } else {
for line in test_lines.lines() {
if line.contains(&test_name) {
return Some(executable.clone());
}
}
}
+ let _ = child.kill();
}
None
}
@@ -76,6 +93,13 @@ impl DapLocator for CargoLocator {
_ => {}
}
+ let config = if adapter.as_ref() == "CodeLLDB" {
+ json!({
+ "sourceLanguages": ["rust"]
+ })
+ } else {
+ Value::Null
+ };
Some(DebugScenario {
adapter: adapter.0.clone(),
label: resolved_label.to_string().into(),
@@ -83,7 +107,7 @@ impl DapLocator for CargoLocator {
task_template,
locator_name: Some(self.name()),
}),
- config: serde_json::Value::Null,
+ config,
tcp_connection: None,
})
}
@@ -95,7 +119,7 @@ impl DapLocator for CargoLocator {
.context("Couldn't get cwd from debug config which is needed for locators")?;
let builder = ShellBuilder::new(true, &build_config.shell).non_interactive();
let (program, args) = builder.build(
- "cargo".into(),
+ Some("cargo".into()),
&build_config
.args
.iter()
@@ -119,10 +143,28 @@ impl DapLocator for CargoLocator {
let status = child.status().await?;
anyhow::ensure!(status.success(), "Cargo command failed");
+ let is_test = build_config
+ .args
+ .first()
+ .map_or(false, |arg| arg == "test" || arg == "t");
+
let executables = output
.lines()
.filter(|line| !line.trim().is_empty())
.filter_map(|line| serde_json::from_str(line).ok())
+ .filter(|json: &Value| {
+ let is_test_binary = json
+ .get("profile")
+ .and_then(|profile| profile.get("test"))
+ .and_then(Value::as_bool)
+ .unwrap_or(false);
+
+ if is_test {
+ is_test_binary
+ } else {
+ !is_test_binary
+ }
+ })
.filter_map(|json: Value| {
json.get("executable")
.and_then(Value::as_str)
@@ -133,10 +175,6 @@ impl DapLocator for CargoLocator {
!executables.is_empty(),
"Couldn't get executable in cargo locator"
);
- let is_test = build_config
- .args
- .first()
- .map_or(false, |arg| arg == "test" || arg == "t");
let mut test_name = None;
if is_test {
@@ -117,7 +117,20 @@ impl DapLocator for GoLocator {
// HACK: tasks assume that they are run in a shell context,
// so the -run regex has escaped specials. Delve correctly
// handles escaping, so we undo that here.
- if arg.starts_with("\\^") && arg.ends_with("\\$") {
+ if let Some((left, right)) = arg.split_once("/")
+ && left.starts_with("\\^")
+ && left.ends_with("\\$")
+ && right.starts_with("\\^")
+ && right.ends_with("\\$")
+ {
+ let mut left = left[1..left.len() - 2].to_string();
+ left.push('$');
+
+ let mut right = right[1..right.len() - 2].to_string();
+ right.push('$');
+
+ args.push(format!("{left}/{right}"));
+ } else if arg.starts_with("\\^") && arg.ends_with("\\$") {
let mut arg = arg[1..arg.len() - 2].to_string();
arg.push('$');
args.push(arg);
@@ -420,6 +420,15 @@ impl RunningMode {
.collect::<Vec<_>>()
})
.unwrap_or_default();
+ // From spec (on initialization sequence):
+ // client sends a setExceptionBreakpoints request if one or more exceptionBreakpointFilters have been defined (or if supportsConfigurationDoneRequest is not true)
+ //
+ // Thus we should send setExceptionBreakpoints even if `exceptionFilters` variable is empty (as long as there were some options in the first place).
+ let should_send_exception_breakpoints = capabilities
+ .exception_breakpoint_filters
+ .as_ref()
+ .map_or(false, |filters| !filters.is_empty())
+ || !configuration_done_supported;
let supports_exception_filters = capabilities
.supports_exception_filter_options
.unwrap_or_default();
@@ -461,9 +470,12 @@ impl RunningMode {
}
})?;
- this.send_exception_breakpoints(exception_filters, supports_exception_filters)
- .await
- .ok();
+ if should_send_exception_breakpoints {
+ this.send_exception_breakpoints(exception_filters, supports_exception_filters)
+ .await
+ .ok();
+ }
+
let ret = if configuration_done_supported {
this.request(ConfigurationDone {})
} else {
@@ -1004,7 +1016,7 @@ impl Session {
cx.spawn(async move |this, cx| {
while let Some(output) = rx.next().await {
- this.update(cx, |this, cx| {
+ this.update(cx, |this, _| {
let event = dap::OutputEvent {
category: None,
output,
@@ -1016,7 +1028,7 @@ impl Session {
data: None,
location_reference: None,
};
- this.push_output(event, cx);
+ this.push_output(event);
})?;
}
anyhow::Ok(())
@@ -1446,7 +1458,7 @@ impl Session {
return;
}
- self.push_output(event, cx);
+ self.push_output(event);
cx.notify();
}
Events::Breakpoint(event) => self.breakpoint_store.update(cx, |store, _| {
@@ -1479,6 +1491,28 @@ impl Session {
}
Events::Capabilities(event) => {
self.capabilities = self.capabilities.merge(event.capabilities);
+
+ // The adapter might've enabled new exception breakpoints (or disabled existing ones).
+ let recent_filters = self
+ .capabilities
+ .exception_breakpoint_filters
+ .iter()
+ .flatten()
+ .map(|filter| (filter.filter.clone(), filter.clone()))
+ .collect::<BTreeMap<_, _>>();
+ for filter in recent_filters.values() {
+ let default = filter.default.unwrap_or_default();
+ self.exception_breakpoints
+ .entry(filter.filter.clone())
+ .or_insert_with(|| (filter.clone(), default));
+ }
+ self.exception_breakpoints
+ .retain(|k, _| recent_filters.contains_key(k));
+ if self.is_started() {
+ self.send_exception_breakpoints(cx);
+ }
+
+ // Remove the ones that no longer exist.
cx.notify();
}
Events::Memory(_) => {}
@@ -1611,10 +1645,9 @@ impl Session {
});
}
- fn push_output(&mut self, event: OutputEvent, cx: &mut Context<Self>) {
+ fn push_output(&mut self, event: OutputEvent) {
self.output.push_back(event);
self.output_token.0 += 1;
- cx.emit(SessionEvent::ConsoleOutput);
}
pub fn any_stopped_thread(&self) -> bool {
@@ -1902,12 +1935,14 @@ impl Session {
}
pub fn continue_thread(&mut self, thread_id: ThreadId, cx: &mut Context<Self>) {
+ let supports_single_thread_execution_requests =
+ self.capabilities.supports_single_thread_execution_requests;
self.thread_states.continue_thread(thread_id);
self.request(
ContinueCommand {
args: ContinueArguments {
thread_id: thread_id.0,
- single_thread: Some(true),
+ single_thread: supports_single_thread_execution_requests,
},
},
Self::on_step_response::<ContinueCommand>(thread_id),
@@ -2318,7 +2353,7 @@ impl Session {
data: None,
location_reference: None,
};
- self.push_output(event, cx);
+ self.push_output(event);
let request = self.mode.request_dap(EvaluateCommand {
expression,
context,
@@ -2341,7 +2376,7 @@ impl Session {
data: None,
location_reference: None,
};
- this.push_output(event, cx);
+ this.push_output(event);
}
Err(e) => {
let event = dap::OutputEvent {
@@ -2355,7 +2390,7 @@ impl Session {
data: None,
location_reference: None,
};
- this.push_output(event, cx);
+ this.push_output(event);
}
};
cx.notify();
@@ -171,27 +171,27 @@ pub(crate) struct PerformRename {
pub push_to_history: bool,
}
-#[derive(Debug)]
-pub struct GetDefinition {
+#[derive(Debug, Clone, Copy)]
+pub struct GetDefinitions {
pub position: PointUtf16,
}
-#[derive(Debug)]
-pub(crate) struct GetDeclaration {
+#[derive(Debug, Clone, Copy)]
+pub(crate) struct GetDeclarations {
pub position: PointUtf16,
}
-#[derive(Debug)]
-pub(crate) struct GetTypeDefinition {
+#[derive(Debug, Clone, Copy)]
+pub(crate) struct GetTypeDefinitions {
pub position: PointUtf16,
}
-#[derive(Debug)]
-pub(crate) struct GetImplementation {
+#[derive(Debug, Clone, Copy)]
+pub(crate) struct GetImplementations {
pub position: PointUtf16,
}
-#[derive(Debug)]
+#[derive(Debug, Clone, Copy)]
pub(crate) struct GetReferences {
pub position: PointUtf16,
}
@@ -588,7 +588,7 @@ impl LspCommand for PerformRename {
}
#[async_trait(?Send)]
-impl LspCommand for GetDefinition {
+impl LspCommand for GetDefinitions {
type Response = Vec<LocationLink>;
type LspRequest = lsp::request::GotoDefinition;
type ProtoRequest = proto::GetDefinition;
@@ -690,7 +690,7 @@ impl LspCommand for GetDefinition {
}
#[async_trait(?Send)]
-impl LspCommand for GetDeclaration {
+impl LspCommand for GetDeclarations {
type Response = Vec<LocationLink>;
type LspRequest = lsp::request::GotoDeclaration;
type ProtoRequest = proto::GetDeclaration;
@@ -793,7 +793,7 @@ impl LspCommand for GetDeclaration {
}
#[async_trait(?Send)]
-impl LspCommand for GetImplementation {
+impl LspCommand for GetImplementations {
type Response = Vec<LocationLink>;
type LspRequest = lsp::request::GotoImplementation;
type ProtoRequest = proto::GetImplementation;
@@ -895,7 +895,7 @@ impl LspCommand for GetImplementation {
}
#[async_trait(?Send)]
-impl LspCommand for GetTypeDefinition {
+impl LspCommand for GetTypeDefinitions {
type Response = Vec<LocationLink>;
type LspRequest = lsp::request::GotoTypeDefinition;
type ProtoRequest = proto::GetTypeDefinition;
@@ -1846,12 +1846,15 @@ impl LspCommand for GetSignatureHelp {
async fn response_from_lsp(
self,
message: Option<lsp::SignatureHelp>,
- _: Entity<LspStore>,
+ lsp_store: Entity<LspStore>,
_: Entity<Buffer>,
_: LanguageServerId,
- _: AsyncApp,
+ cx: AsyncApp,
) -> Result<Self::Response> {
- Ok(message.and_then(SignatureHelp::new))
+ let Some(message) = message else {
+ return Ok(None);
+ };
+ cx.update(|cx| SignatureHelp::new(message, Some(lsp_store.read(cx).languages.clone()), cx))
}
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> Self::ProtoRequest {
@@ -1902,14 +1905,18 @@ impl LspCommand for GetSignatureHelp {
async fn response_from_proto(
self,
response: proto::GetSignatureHelpResponse,
- _: Entity<LspStore>,
+ lsp_store: Entity<LspStore>,
_: Entity<Buffer>,
- _: AsyncApp,
+ cx: AsyncApp,
) -> Result<Self::Response> {
- Ok(response
- .signature_help
- .map(proto_to_lsp_signature)
- .and_then(SignatureHelp::new))
+ cx.update(|cx| {
+ response
+ .signature_help
+ .map(proto_to_lsp_signature)
+ .and_then(|signature| {
+ SignatureHelp::new(signature, Some(lsp_store.read(cx).languages.clone()), cx)
+ })
+ })
}
fn buffer_id_from_proto(message: &Self::ProtoRequest) -> Result<BufferId> {
@@ -1,94 +1,143 @@
-use std::ops::Range;
+use std::{ops::Range, sync::Arc};
-use gpui::{FontStyle, FontWeight, HighlightStyle};
+use gpui::{App, AppContext, Entity, FontWeight, HighlightStyle, SharedString};
+use language::LanguageRegistry;
+use markdown::Markdown;
use rpc::proto::{self, documentation};
#[derive(Debug)]
pub struct SignatureHelp {
- pub label: String,
- pub highlights: Vec<(Range<usize>, HighlightStyle)>,
+ pub active_signature: usize,
+ pub signatures: Vec<SignatureHelpData>,
pub(super) original_data: lsp::SignatureHelp,
}
-impl SignatureHelp {
- pub fn new(help: lsp::SignatureHelp) -> Option<Self> {
- let function_options_count = help.signatures.len();
-
- let signature_information = help
- .active_signature
- .and_then(|active_signature| help.signatures.get(active_signature as usize))
- .or_else(|| help.signatures.first())?;
-
- let str_for_join = ", ";
- let parameter_length = signature_information
- .parameters
- .as_ref()
- .map_or(0, |parameters| parameters.len());
- let mut highlight_start = 0;
- let (strings, mut highlights): (Vec<_>, Vec<_>) = signature_information
- .parameters
- .as_ref()?
- .iter()
- .enumerate()
- .map(|(i, parameter_information)| {
- let label = match parameter_information.label.clone() {
- lsp::ParameterLabel::Simple(string) => string,
- lsp::ParameterLabel::LabelOffsets(offset) => signature_information
- .label
- .chars()
- .skip(offset[0] as usize)
- .take((offset[1] - offset[0]) as usize)
- .collect::<String>(),
- };
- let label_length = label.len();
-
- let highlights = help.active_parameter.and_then(|active_parameter| {
- if i == active_parameter as usize {
- Some((
- highlight_start..(highlight_start + label_length),
- HighlightStyle {
- font_weight: Some(FontWeight::EXTRA_BOLD),
- ..Default::default()
- },
- ))
- } else {
- None
- }
- });
+#[derive(Debug, Clone)]
+pub struct SignatureHelpData {
+ pub label: SharedString,
+ pub documentation: Option<Entity<Markdown>>,
+ pub highlights: Vec<(Range<usize>, HighlightStyle)>,
+ pub active_parameter: Option<usize>,
+ pub parameters: Vec<ParameterInfo>,
+}
+
+#[derive(Debug, Clone)]
+pub struct ParameterInfo {
+ pub label_range: Option<Range<usize>>,
+ pub documentation: Option<Entity<Markdown>>,
+}
- if i != parameter_length {
- highlight_start += label_length + str_for_join.len();
+impl SignatureHelp {
+ pub fn new(
+ help: lsp::SignatureHelp,
+ language_registry: Option<Arc<LanguageRegistry>>,
+ cx: &mut App,
+ ) -> Option<Self> {
+ if help.signatures.is_empty() {
+ return None;
+ }
+ let active_signature = help.active_signature.unwrap_or(0) as usize;
+ let mut signatures = Vec::<SignatureHelpData>::with_capacity(help.signatures.capacity());
+ for signature in &help.signatures {
+ let active_parameter = signature
+ .active_parameter
+ .unwrap_or_else(|| help.active_parameter.unwrap_or(0))
+ as usize;
+ let mut highlights = Vec::new();
+ let mut parameter_infos = Vec::new();
+
+ if let Some(parameters) = &signature.parameters {
+ for (index, parameter) in parameters.iter().enumerate() {
+ let label_range = match ¶meter.label {
+ lsp::ParameterLabel::LabelOffsets(parameter_label_offsets) => {
+ let range = *parameter_label_offsets.get(0)? as usize
+ ..*parameter_label_offsets.get(1)? as usize;
+ if index == active_parameter {
+ highlights.push((
+ range.clone(),
+ HighlightStyle {
+ font_weight: Some(FontWeight::EXTRA_BOLD),
+ ..HighlightStyle::default()
+ },
+ ));
+ }
+ Some(range)
+ }
+ lsp::ParameterLabel::Simple(parameter_label) => {
+ if let Some(start) = signature.label.find(parameter_label) {
+ let range = start..start + parameter_label.len();
+ if index == active_parameter {
+ highlights.push((
+ range.clone(),
+ HighlightStyle {
+ font_weight: Some(FontWeight::EXTRA_BOLD),
+ ..HighlightStyle::default()
+ },
+ ));
+ }
+ Some(range)
+ } else {
+ None
+ }
+ }
+ };
+
+ let documentation = parameter
+ .documentation
+ .as_ref()
+ .map(|doc| documentation_to_markdown(doc, language_registry.clone(), cx));
+
+ parameter_infos.push(ParameterInfo {
+ label_range,
+ documentation,
+ });
}
+ }
- (label, highlights)
- })
- .unzip();
-
- if strings.is_empty() {
- None
- } else {
- let mut label = strings.join(str_for_join);
-
- if function_options_count >= 2 {
- let suffix = format!("(+{} overload)", function_options_count - 1);
- let highlight_start = label.len() + 1;
- highlights.push(Some((
- highlight_start..(highlight_start + suffix.len()),
- HighlightStyle {
- font_style: Some(FontStyle::Italic),
- ..Default::default()
- },
- )));
- label.push(' ');
- label.push_str(&suffix);
- };
+ let label = SharedString::from(signature.label.clone());
+ let documentation = signature
+ .documentation
+ .as_ref()
+ .map(|doc| documentation_to_markdown(doc, language_registry.clone(), cx));
- Some(Self {
+ signatures.push(SignatureHelpData {
label,
- highlights: highlights.into_iter().flatten().collect(),
- original_data: help,
- })
+ documentation,
+ highlights,
+ active_parameter: Some(active_parameter),
+ parameters: parameter_infos,
+ });
+ }
+ Some(Self {
+ signatures,
+ active_signature,
+ original_data: help,
+ })
+ }
+}
+
+fn documentation_to_markdown(
+ documentation: &lsp::Documentation,
+ language_registry: Option<Arc<LanguageRegistry>>,
+ cx: &mut App,
+) -> Entity<Markdown> {
+ match documentation {
+ lsp::Documentation::String(string) => {
+ cx.new(|cx| Markdown::new_text(SharedString::from(string), cx))
}
+ lsp::Documentation::MarkupContent(markup) => match markup.kind {
+ lsp::MarkupKind::PlainText => {
+ cx.new(|cx| Markdown::new_text(SharedString::from(&markup.value), cx))
+ }
+ lsp::MarkupKind::Markdown => cx.new(|cx| {
+ Markdown::new(
+ SharedString::from(&markup.value),
+ language_registry,
+ None,
+ cx,
+ )
+ }),
+ },
}
}
@@ -206,7 +255,8 @@ fn proto_to_lsp_documentation(documentation: proto::Documentation) -> Option<lsp
#[cfg(test)]
mod tests {
- use gpui::{FontStyle, FontWeight, HighlightStyle};
+ use gpui::{FontWeight, HighlightStyle, SharedString, TestAppContext};
+ use lsp::{Documentation, MarkupContent, MarkupKind};
use crate::lsp_command::signature_help::SignatureHelp;
@@ -217,19 +267,14 @@ mod tests {
}
}
- fn overload() -> HighlightStyle {
- HighlightStyle {
- font_style: Some(FontStyle::Italic),
- ..Default::default()
- }
- }
-
- #[test]
- fn test_create_signature_help_markdown_string_1() {
+ #[gpui::test]
+ fn test_create_signature_help_markdown_string_1(cx: &mut TestAppContext) {
let signature_help = lsp::SignatureHelp {
signatures: vec![lsp::SignatureInformation {
label: "fn test(foo: u8, bar: &str)".to_string(),
- documentation: None,
+ documentation: Some(Documentation::String(
+ "This is a test documentation".to_string(),
+ )),
parameters: Some(vec![
lsp::ParameterInformation {
label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
@@ -245,26 +290,37 @@ mod tests {
active_signature: Some(0),
active_parameter: Some(0),
};
- let maybe_markdown = SignatureHelp::new(signature_help);
+ let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, cx));
assert!(maybe_markdown.is_some());
let markdown = maybe_markdown.unwrap();
- let markdown = (markdown.label, markdown.highlights);
+ let signature = markdown.signatures[markdown.active_signature].clone();
+ let markdown = (signature.label, signature.highlights);
assert_eq!(
markdown,
(
- "foo: u8, bar: &str".to_string(),
- vec![(0..7, current_parameter())]
+ SharedString::new("fn test(foo: u8, bar: &str)"),
+ vec![(8..15, current_parameter())]
)
);
+ assert_eq!(
+ signature
+ .documentation
+ .unwrap()
+ .update(cx, |documentation, _| documentation.source().to_owned()),
+ "This is a test documentation",
+ )
}
- #[test]
- fn test_create_signature_help_markdown_string_2() {
+ #[gpui::test]
+ fn test_create_signature_help_markdown_string_2(cx: &mut TestAppContext) {
let signature_help = lsp::SignatureHelp {
signatures: vec![lsp::SignatureInformation {
label: "fn test(foo: u8, bar: &str)".to_string(),
- documentation: None,
+ documentation: Some(Documentation::MarkupContent(MarkupContent {
+ kind: MarkupKind::Markdown,
+ value: "This is a test documentation".to_string(),
+ })),
parameters: Some(vec![
lsp::ParameterInformation {
label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
@@ -280,22 +336,30 @@ mod tests {
active_signature: Some(0),
active_parameter: Some(1),
};
- let maybe_markdown = SignatureHelp::new(signature_help);
+ let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, cx));
assert!(maybe_markdown.is_some());
let markdown = maybe_markdown.unwrap();
- let markdown = (markdown.label, markdown.highlights);
+ let signature = markdown.signatures[markdown.active_signature].clone();
+ let markdown = (signature.label, signature.highlights);
assert_eq!(
markdown,
(
- "foo: u8, bar: &str".to_string(),
- vec![(9..18, current_parameter())]
+ SharedString::new("fn test(foo: u8, bar: &str)"),
+ vec![(17..26, current_parameter())]
)
);
+ assert_eq!(
+ signature
+ .documentation
+ .unwrap()
+ .update(cx, |documentation, _| documentation.source().to_owned()),
+ "This is a test documentation",
+ )
}
- #[test]
- fn test_create_signature_help_markdown_string_3() {
+ #[gpui::test]
+ fn test_create_signature_help_markdown_string_3(cx: &mut TestAppContext) {
let signature_help = lsp::SignatureHelp {
signatures: vec![
lsp::SignatureInformation {
@@ -332,22 +396,23 @@ mod tests {
active_signature: Some(0),
active_parameter: Some(0),
};
- let maybe_markdown = SignatureHelp::new(signature_help);
+ let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, cx));
assert!(maybe_markdown.is_some());
let markdown = maybe_markdown.unwrap();
- let markdown = (markdown.label, markdown.highlights);
+ let signature = markdown.signatures[markdown.active_signature].clone();
+ let markdown = (signature.label, signature.highlights);
assert_eq!(
markdown,
(
- "foo: u8, bar: &str (+1 overload)".to_string(),
- vec![(0..7, current_parameter()), (19..32, overload())]
+ SharedString::new("fn test1(foo: u8, bar: &str)"),
+ vec![(9..16, current_parameter())]
)
);
}
- #[test]
- fn test_create_signature_help_markdown_string_4() {
+ #[gpui::test]
+ fn test_create_signature_help_markdown_string_4(cx: &mut TestAppContext) {
let signature_help = lsp::SignatureHelp {
signatures: vec![
lsp::SignatureInformation {
@@ -384,22 +449,23 @@ mod tests {
active_signature: Some(1),
active_parameter: Some(0),
};
- let maybe_markdown = SignatureHelp::new(signature_help);
+ let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, cx));
assert!(maybe_markdown.is_some());
let markdown = maybe_markdown.unwrap();
- let markdown = (markdown.label, markdown.highlights);
+ let signature = markdown.signatures[markdown.active_signature].clone();
+ let markdown = (signature.label, signature.highlights);
assert_eq!(
markdown,
(
- "hoge: String, fuga: bool (+1 overload)".to_string(),
- vec![(0..12, current_parameter()), (25..38, overload())]
+ SharedString::new("fn test2(hoge: String, fuga: bool)"),
+ vec![(9..21, current_parameter())]
)
);
}
- #[test]
- fn test_create_signature_help_markdown_string_5() {
+ #[gpui::test]
+ fn test_create_signature_help_markdown_string_5(cx: &mut TestAppContext) {
let signature_help = lsp::SignatureHelp {
signatures: vec![
lsp::SignatureInformation {
@@ -436,22 +502,23 @@ mod tests {
active_signature: Some(1),
active_parameter: Some(1),
};
- let maybe_markdown = SignatureHelp::new(signature_help);
+ let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, cx));
assert!(maybe_markdown.is_some());
let markdown = maybe_markdown.unwrap();
- let markdown = (markdown.label, markdown.highlights);
+ let signature = markdown.signatures[markdown.active_signature].clone();
+ let markdown = (signature.label, signature.highlights);
assert_eq!(
markdown,
(
- "hoge: String, fuga: bool (+1 overload)".to_string(),
- vec![(14..24, current_parameter()), (25..38, overload())]
+ SharedString::new("fn test2(hoge: String, fuga: bool)"),
+ vec![(23..33, current_parameter())]
)
);
}
- #[test]
- fn test_create_signature_help_markdown_string_6() {
+ #[gpui::test]
+ fn test_create_signature_help_markdown_string_6(cx: &mut TestAppContext) {
let signature_help = lsp::SignatureHelp {
signatures: vec![
lsp::SignatureInformation {
@@ -488,22 +555,23 @@ mod tests {
active_signature: Some(1),
active_parameter: None,
};
- let maybe_markdown = SignatureHelp::new(signature_help);
+ let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, cx));
assert!(maybe_markdown.is_some());
let markdown = maybe_markdown.unwrap();
- let markdown = (markdown.label, markdown.highlights);
+ let signature = markdown.signatures[markdown.active_signature].clone();
+ let markdown = (signature.label, signature.highlights);
assert_eq!(
markdown,
(
- "hoge: String, fuga: bool (+1 overload)".to_string(),
- vec![(25..38, overload())]
+ SharedString::new("fn test2(hoge: String, fuga: bool)"),
+ vec![(9..21, current_parameter())]
)
);
}
- #[test]
- fn test_create_signature_help_markdown_string_7() {
+ #[gpui::test]
+ fn test_create_signature_help_markdown_string_7(cx: &mut TestAppContext) {
let signature_help = lsp::SignatureHelp {
signatures: vec![
lsp::SignatureInformation {
@@ -555,33 +623,34 @@ mod tests {
active_signature: Some(2),
active_parameter: Some(1),
};
- let maybe_markdown = SignatureHelp::new(signature_help);
+ let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, cx));
assert!(maybe_markdown.is_some());
let markdown = maybe_markdown.unwrap();
- let markdown = (markdown.label, markdown.highlights);
+ let signature = markdown.signatures[markdown.active_signature].clone();
+ let markdown = (signature.label, signature.highlights);
assert_eq!(
markdown,
(
- "one: usize, two: u32 (+2 overload)".to_string(),
- vec![(12..20, current_parameter()), (21..34, overload())]
+ SharedString::new("fn test3(one: usize, two: u32)"),
+ vec![(21..29, current_parameter())]
)
);
}
- #[test]
- fn test_create_signature_help_markdown_string_8() {
+ #[gpui::test]
+ fn test_create_signature_help_markdown_string_8(cx: &mut TestAppContext) {
let signature_help = lsp::SignatureHelp {
signatures: vec![],
active_signature: None,
active_parameter: None,
};
- let maybe_markdown = SignatureHelp::new(signature_help);
+ let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, cx));
assert!(maybe_markdown.is_none());
}
- #[test]
- fn test_create_signature_help_markdown_string_9() {
+ #[gpui::test]
+ fn test_create_signature_help_markdown_string_9(cx: &mut TestAppContext) {
let signature_help = lsp::SignatureHelp {
signatures: vec![lsp::SignatureInformation {
label: "fn test(foo: u8, bar: &str)".to_string(),
@@ -601,17 +670,70 @@ mod tests {
active_signature: Some(0),
active_parameter: Some(0),
};
- let maybe_markdown = SignatureHelp::new(signature_help);
+ let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, cx));
assert!(maybe_markdown.is_some());
let markdown = maybe_markdown.unwrap();
- let markdown = (markdown.label, markdown.highlights);
+ let signature = markdown.signatures[markdown.active_signature].clone();
+ let markdown = (signature.label, signature.highlights);
assert_eq!(
markdown,
(
- "foo: u8, bar: &str".to_string(),
- vec![(0..7, current_parameter())]
+ SharedString::new("fn test(foo: u8, bar: &str)"),
+ vec![(8..15, current_parameter())]
)
);
}
+
+ #[gpui::test]
+ fn test_parameter_documentation(cx: &mut TestAppContext) {
+ let signature_help = lsp::SignatureHelp {
+ signatures: vec![lsp::SignatureInformation {
+ label: "fn test(foo: u8, bar: &str)".to_string(),
+ documentation: Some(Documentation::String(
+ "This is a test documentation".to_string(),
+ )),
+ parameters: Some(vec![
+ lsp::ParameterInformation {
+ label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
+ documentation: Some(Documentation::String("The foo parameter".to_string())),
+ },
+ lsp::ParameterInformation {
+ label: lsp::ParameterLabel::Simple("bar: &str".to_string()),
+ documentation: Some(Documentation::String("The bar parameter".to_string())),
+ },
+ ]),
+ active_parameter: None,
+ }],
+ active_signature: Some(0),
+ active_parameter: Some(0),
+ };
+ let maybe_signature_help = cx.update(|cx| SignatureHelp::new(signature_help, None, cx));
+ assert!(maybe_signature_help.is_some());
+
+ let signature_help = maybe_signature_help.unwrap();
+ let signature = &signature_help.signatures[signature_help.active_signature];
+
+ // Check that parameter documentation is extracted
+ assert_eq!(signature.parameters.len(), 2);
+ assert_eq!(
+ signature.parameters[0]
+ .documentation
+ .as_ref()
+ .unwrap()
+ .update(cx, |documentation, _| documentation.source().to_owned()),
+ "The foo parameter",
+ );
+ assert_eq!(
+ signature.parameters[1]
+ .documentation
+ .as_ref()
+ .unwrap()
+ .update(cx, |documentation, _| documentation.source().to_owned()),
+ "The bar parameter",
+ );
+
+ // Check that the active parameter is correct
+ assert_eq!(signature.active_parameter, Some(0));
+ }
}
@@ -4,8 +4,9 @@ pub mod rust_analyzer_ext;
use crate::{
CodeAction, ColorPresentation, Completion, CompletionResponse, CompletionSource,
- CoreCompletion, DocumentColor, Hover, InlayHint, LspAction, LspPullDiagnostics, ProjectItem,
- ProjectPath, ProjectTransaction, PulledDiagnostics, ResolveState, Symbol, ToolchainStore,
+ CoreCompletion, DocumentColor, Hover, InlayHint, LocationLink, LspAction, LspPullDiagnostics,
+ ProjectItem, ProjectPath, ProjectTransaction, PulledDiagnostics, ResolveState, Symbol,
+ ToolchainStore,
buffer_store::{BufferStore, BufferStoreEvent},
environment::ProjectEnvironment,
lsp_command::{self, *},
@@ -3660,12 +3661,8 @@ impl LspStore {
client.add_entity_request_handler(Self::handle_lsp_command::<GetCodeActions>);
client.add_entity_request_handler(Self::handle_lsp_command::<GetCompletions>);
client.add_entity_request_handler(Self::handle_lsp_command::<GetHover>);
- client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
- client.add_entity_request_handler(Self::handle_lsp_command::<GetDeclaration>);
- client.add_entity_request_handler(Self::handle_lsp_command::<GetTypeDefinition>);
client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentSymbols>);
- client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
client.add_entity_request_handler(Self::handle_lsp_command::<LinkedEditingRange>);
@@ -5257,6 +5254,371 @@ impl LspStore {
})
}
+ pub fn definitions(
+ &mut self,
+ buffer_handle: &Entity<Buffer>,
+ position: PointUtf16,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Vec<LocationLink>>> {
+ if let Some((upstream_client, project_id)) = self.upstream_client() {
+ let request_task = upstream_client.request(proto::MultiLspQuery {
+ buffer_id: buffer_handle.read(cx).remote_id().into(),
+ version: serialize_version(&buffer_handle.read(cx).version()),
+ project_id,
+ strategy: Some(proto::multi_lsp_query::Strategy::All(
+ proto::AllLanguageServers {},
+ )),
+ request: Some(proto::multi_lsp_query::Request::GetDefinition(
+ GetDefinitions { position }.to_proto(project_id, buffer_handle.read(cx)),
+ )),
+ });
+ let buffer = buffer_handle.clone();
+ cx.spawn(async move |weak_project, cx| {
+ let Some(project) = weak_project.upgrade() else {
+ return Ok(Vec::new());
+ };
+ let responses = request_task.await?.responses;
+ let actions = join_all(
+ responses
+ .into_iter()
+ .filter_map(|lsp_response| match lsp_response.response? {
+ proto::lsp_response::Response::GetDefinitionResponse(response) => {
+ Some(response)
+ }
+ unexpected => {
+ debug_panic!("Unexpected response: {unexpected:?}");
+ None
+ }
+ })
+ .map(|definitions_response| {
+ GetDefinitions { position }.response_from_proto(
+ definitions_response,
+ project.clone(),
+ buffer.clone(),
+ cx.clone(),
+ )
+ }),
+ )
+ .await;
+
+ Ok(actions
+ .into_iter()
+ .collect::<Result<Vec<Vec<_>>>>()?
+ .into_iter()
+ .flatten()
+ .dedup()
+ .collect())
+ })
+ } else {
+ let definitions_task = self.request_multiple_lsp_locally(
+ buffer_handle,
+ Some(position),
+ GetDefinitions { position },
+ cx,
+ );
+ cx.spawn(async move |_, _| {
+ Ok(definitions_task
+ .await
+ .into_iter()
+ .flat_map(|(_, definitions)| definitions)
+ .dedup()
+ .collect())
+ })
+ }
+ }
+
+ pub fn declarations(
+ &mut self,
+ buffer_handle: &Entity<Buffer>,
+ position: PointUtf16,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Vec<LocationLink>>> {
+ if let Some((upstream_client, project_id)) = self.upstream_client() {
+ let request_task = upstream_client.request(proto::MultiLspQuery {
+ buffer_id: buffer_handle.read(cx).remote_id().into(),
+ version: serialize_version(&buffer_handle.read(cx).version()),
+ project_id,
+ strategy: Some(proto::multi_lsp_query::Strategy::All(
+ proto::AllLanguageServers {},
+ )),
+ request: Some(proto::multi_lsp_query::Request::GetDeclaration(
+ GetDeclarations { position }.to_proto(project_id, buffer_handle.read(cx)),
+ )),
+ });
+ let buffer = buffer_handle.clone();
+ cx.spawn(async move |weak_project, cx| {
+ let Some(project) = weak_project.upgrade() else {
+ return Ok(Vec::new());
+ };
+ let responses = request_task.await?.responses;
+ let actions = join_all(
+ responses
+ .into_iter()
+ .filter_map(|lsp_response| match lsp_response.response? {
+ proto::lsp_response::Response::GetDeclarationResponse(response) => {
+ Some(response)
+ }
+ unexpected => {
+ debug_panic!("Unexpected response: {unexpected:?}");
+ None
+ }
+ })
+ .map(|declarations_response| {
+ GetDeclarations { position }.response_from_proto(
+ declarations_response,
+ project.clone(),
+ buffer.clone(),
+ cx.clone(),
+ )
+ }),
+ )
+ .await;
+
+ Ok(actions
+ .into_iter()
+ .collect::<Result<Vec<Vec<_>>>>()?
+ .into_iter()
+ .flatten()
+ .dedup()
+ .collect())
+ })
+ } else {
+ let declarations_task = self.request_multiple_lsp_locally(
+ buffer_handle,
+ Some(position),
+ GetDeclarations { position },
+ cx,
+ );
+ cx.spawn(async move |_, _| {
+ Ok(declarations_task
+ .await
+ .into_iter()
+ .flat_map(|(_, declarations)| declarations)
+ .dedup()
+ .collect())
+ })
+ }
+ }
+
+ pub fn type_definitions(
+ &mut self,
+ buffer_handle: &Entity<Buffer>,
+ position: PointUtf16,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Vec<LocationLink>>> {
+ if let Some((upstream_client, project_id)) = self.upstream_client() {
+ let request_task = upstream_client.request(proto::MultiLspQuery {
+ buffer_id: buffer_handle.read(cx).remote_id().into(),
+ version: serialize_version(&buffer_handle.read(cx).version()),
+ project_id,
+ strategy: Some(proto::multi_lsp_query::Strategy::All(
+ proto::AllLanguageServers {},
+ )),
+ request: Some(proto::multi_lsp_query::Request::GetTypeDefinition(
+ GetTypeDefinitions { position }.to_proto(project_id, buffer_handle.read(cx)),
+ )),
+ });
+ let buffer = buffer_handle.clone();
+ cx.spawn(async move |weak_project, cx| {
+ let Some(project) = weak_project.upgrade() else {
+ return Ok(Vec::new());
+ };
+ let responses = request_task.await?.responses;
+ let actions = join_all(
+ responses
+ .into_iter()
+ .filter_map(|lsp_response| match lsp_response.response? {
+ proto::lsp_response::Response::GetTypeDefinitionResponse(response) => {
+ Some(response)
+ }
+ unexpected => {
+ debug_panic!("Unexpected response: {unexpected:?}");
+ None
+ }
+ })
+ .map(|type_definitions_response| {
+ GetTypeDefinitions { position }.response_from_proto(
+ type_definitions_response,
+ project.clone(),
+ buffer.clone(),
+ cx.clone(),
+ )
+ }),
+ )
+ .await;
+
+ Ok(actions
+ .into_iter()
+ .collect::<Result<Vec<Vec<_>>>>()?
+ .into_iter()
+ .flatten()
+ .dedup()
+ .collect())
+ })
+ } else {
+ let type_definitions_task = self.request_multiple_lsp_locally(
+ buffer_handle,
+ Some(position),
+ GetTypeDefinitions { position },
+ cx,
+ );
+ cx.spawn(async move |_, _| {
+ Ok(type_definitions_task
+ .await
+ .into_iter()
+ .flat_map(|(_, type_definitions)| type_definitions)
+ .dedup()
+ .collect())
+ })
+ }
+ }
+
+ pub fn implementations(
+ &mut self,
+ buffer_handle: &Entity<Buffer>,
+ position: PointUtf16,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Vec<LocationLink>>> {
+ if let Some((upstream_client, project_id)) = self.upstream_client() {
+ let request_task = upstream_client.request(proto::MultiLspQuery {
+ buffer_id: buffer_handle.read(cx).remote_id().into(),
+ version: serialize_version(&buffer_handle.read(cx).version()),
+ project_id,
+ strategy: Some(proto::multi_lsp_query::Strategy::All(
+ proto::AllLanguageServers {},
+ )),
+ request: Some(proto::multi_lsp_query::Request::GetImplementation(
+ GetImplementations { position }.to_proto(project_id, buffer_handle.read(cx)),
+ )),
+ });
+ let buffer = buffer_handle.clone();
+ cx.spawn(async move |weak_project, cx| {
+ let Some(project) = weak_project.upgrade() else {
+ return Ok(Vec::new());
+ };
+ let responses = request_task.await?.responses;
+ let actions = join_all(
+ responses
+ .into_iter()
+ .filter_map(|lsp_response| match lsp_response.response? {
+ proto::lsp_response::Response::GetImplementationResponse(response) => {
+ Some(response)
+ }
+ unexpected => {
+ debug_panic!("Unexpected response: {unexpected:?}");
+ None
+ }
+ })
+ .map(|implementations_response| {
+ GetImplementations { position }.response_from_proto(
+ implementations_response,
+ project.clone(),
+ buffer.clone(),
+ cx.clone(),
+ )
+ }),
+ )
+ .await;
+
+ Ok(actions
+ .into_iter()
+ .collect::<Result<Vec<Vec<_>>>>()?
+ .into_iter()
+ .flatten()
+ .dedup()
+ .collect())
+ })
+ } else {
+ let implementations_task = self.request_multiple_lsp_locally(
+ buffer_handle,
+ Some(position),
+ GetImplementations { position },
+ cx,
+ );
+ cx.spawn(async move |_, _| {
+ Ok(implementations_task
+ .await
+ .into_iter()
+ .flat_map(|(_, implementations)| implementations)
+ .dedup()
+ .collect())
+ })
+ }
+ }
+
+ pub fn references(
+ &mut self,
+ buffer_handle: &Entity<Buffer>,
+ position: PointUtf16,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Vec<Location>>> {
+ if let Some((upstream_client, project_id)) = self.upstream_client() {
+ let request_task = upstream_client.request(proto::MultiLspQuery {
+ buffer_id: buffer_handle.read(cx).remote_id().into(),
+ version: serialize_version(&buffer_handle.read(cx).version()),
+ project_id,
+ strategy: Some(proto::multi_lsp_query::Strategy::All(
+ proto::AllLanguageServers {},
+ )),
+ request: Some(proto::multi_lsp_query::Request::GetReferences(
+ GetReferences { position }.to_proto(project_id, buffer_handle.read(cx)),
+ )),
+ });
+ let buffer = buffer_handle.clone();
+ cx.spawn(async move |weak_project, cx| {
+ let Some(project) = weak_project.upgrade() else {
+ return Ok(Vec::new());
+ };
+ let responses = request_task.await?.responses;
+ let actions = join_all(
+ responses
+ .into_iter()
+ .filter_map(|lsp_response| match lsp_response.response? {
+ proto::lsp_response::Response::GetReferencesResponse(response) => {
+ Some(response)
+ }
+ unexpected => {
+ debug_panic!("Unexpected response: {unexpected:?}");
+ None
+ }
+ })
+ .map(|references_response| {
+ GetReferences { position }.response_from_proto(
+ references_response,
+ project.clone(),
+ buffer.clone(),
+ cx.clone(),
+ )
+ }),
+ )
+ .await;
+
+ Ok(actions
+ .into_iter()
+ .collect::<Result<Vec<Vec<_>>>>()?
+ .into_iter()
+ .flatten()
+ .dedup()
+ .collect())
+ })
+ } else {
+ let references_task = self.request_multiple_lsp_locally(
+ buffer_handle,
+ Some(position),
+ GetReferences { position },
+ cx,
+ );
+ cx.spawn(async move |_, _| {
+ Ok(references_task
+ .await
+ .into_iter()
+ .flat_map(|(_, references)| references)
+ .dedup()
+ .collect())
+ })
+ }
+ }
+
pub fn code_actions(
&mut self,
buffer_handle: &Entity<Buffer>,
@@ -5681,7 +6043,9 @@ impl LspStore {
);
server.request::<lsp::request::ResolveCompletionItem>(*lsp_completion.clone())
}
- CompletionSource::BufferWord { .. } | CompletionSource::Custom => {
+ CompletionSource::BufferWord { .. }
+ | CompletionSource::Dap { .. }
+ | CompletionSource::Custom => {
return Ok(());
}
}
@@ -5833,7 +6197,9 @@ impl LspStore {
}
serde_json::to_string(lsp_completion).unwrap().into_bytes()
}
- CompletionSource::Custom | CompletionSource::BufferWord { .. } => {
+ CompletionSource::Custom
+ | CompletionSource::Dap { .. }
+ | CompletionSource::BufferWord { .. } => {
return Ok(());
}
}
@@ -6504,7 +6870,6 @@ impl LspStore {
.await
.into_iter()
.flat_map(|(_, actions)| actions)
- .filter(|help| !help.label.is_empty())
.collect::<Vec<_>>()
})
}
@@ -7887,6 +8252,200 @@ impl LspStore {
.collect(),
})
}
+ Some(proto::multi_lsp_query::Request::GetDefinition(message)) => {
+ let get_definitions = GetDefinitions::from_proto(
+ message,
+ lsp_store.clone(),
+ buffer.clone(),
+ cx.clone(),
+ )
+ .await?;
+
+ let definitions = lsp_store
+ .update(&mut cx, |project, cx| {
+ project.request_multiple_lsp_locally(
+ &buffer,
+ Some(get_definitions.position),
+ get_definitions,
+ cx,
+ )
+ })?
+ .await
+ .into_iter();
+
+ lsp_store.update(&mut cx, |project, cx| proto::MultiLspQueryResponse {
+ responses: definitions
+ .map(|(server_id, definitions)| proto::LspResponse {
+ server_id: server_id.to_proto(),
+ response: Some(proto::lsp_response::Response::GetDefinitionResponse(
+ GetDefinitions::response_to_proto(
+ definitions,
+ project,
+ sender_id,
+ &buffer_version,
+ cx,
+ ),
+ )),
+ })
+ .collect(),
+ })
+ }
+ Some(proto::multi_lsp_query::Request::GetDeclaration(message)) => {
+ let get_declarations = GetDeclarations::from_proto(
+ message,
+ lsp_store.clone(),
+ buffer.clone(),
+ cx.clone(),
+ )
+ .await?;
+
+ let declarations = lsp_store
+ .update(&mut cx, |project, cx| {
+ project.request_multiple_lsp_locally(
+ &buffer,
+ Some(get_declarations.position),
+ get_declarations,
+ cx,
+ )
+ })?
+ .await
+ .into_iter();
+
+ lsp_store.update(&mut cx, |project, cx| proto::MultiLspQueryResponse {
+ responses: declarations
+ .map(|(server_id, declarations)| proto::LspResponse {
+ server_id: server_id.to_proto(),
+ response: Some(proto::lsp_response::Response::GetDeclarationResponse(
+ GetDeclarations::response_to_proto(
+ declarations,
+ project,
+ sender_id,
+ &buffer_version,
+ cx,
+ ),
+ )),
+ })
+ .collect(),
+ })
+ }
+ Some(proto::multi_lsp_query::Request::GetTypeDefinition(message)) => {
+ let get_type_definitions = GetTypeDefinitions::from_proto(
+ message,
+ lsp_store.clone(),
+ buffer.clone(),
+ cx.clone(),
+ )
+ .await?;
+
+ let type_definitions = lsp_store
+ .update(&mut cx, |project, cx| {
+ project.request_multiple_lsp_locally(
+ &buffer,
+ Some(get_type_definitions.position),
+ get_type_definitions,
+ cx,
+ )
+ })?
+ .await
+ .into_iter();
+
+ lsp_store.update(&mut cx, |project, cx| proto::MultiLspQueryResponse {
+ responses: type_definitions
+ .map(|(server_id, type_definitions)| proto::LspResponse {
+ server_id: server_id.to_proto(),
+ response: Some(
+ proto::lsp_response::Response::GetTypeDefinitionResponse(
+ GetTypeDefinitions::response_to_proto(
+ type_definitions,
+ project,
+ sender_id,
+ &buffer_version,
+ cx,
+ ),
+ ),
+ ),
+ })
+ .collect(),
+ })
+ }
+ Some(proto::multi_lsp_query::Request::GetImplementation(message)) => {
+ let get_implementations = GetImplementations::from_proto(
+ message,
+ lsp_store.clone(),
+ buffer.clone(),
+ cx.clone(),
+ )
+ .await?;
+
+ let implementations = lsp_store
+ .update(&mut cx, |project, cx| {
+ project.request_multiple_lsp_locally(
+ &buffer,
+ Some(get_implementations.position),
+ get_implementations,
+ cx,
+ )
+ })?
+ .await
+ .into_iter();
+
+ lsp_store.update(&mut cx, |project, cx| proto::MultiLspQueryResponse {
+ responses: implementations
+ .map(|(server_id, implementations)| proto::LspResponse {
+ server_id: server_id.to_proto(),
+ response: Some(
+ proto::lsp_response::Response::GetImplementationResponse(
+ GetImplementations::response_to_proto(
+ implementations,
+ project,
+ sender_id,
+ &buffer_version,
+ cx,
+ ),
+ ),
+ ),
+ })
+ .collect(),
+ })
+ }
+ Some(proto::multi_lsp_query::Request::GetReferences(message)) => {
+ let get_references = GetReferences::from_proto(
+ message,
+ lsp_store.clone(),
+ buffer.clone(),
+ cx.clone(),
+ )
+ .await?;
+
+ let references = lsp_store
+ .update(&mut cx, |project, cx| {
+ project.request_multiple_lsp_locally(
+ &buffer,
+ Some(get_references.position),
+ get_references,
+ cx,
+ )
+ })?
+ .await
+ .into_iter();
+
+ lsp_store.update(&mut cx, |project, cx| proto::MultiLspQueryResponse {
+ responses: references
+ .map(|(server_id, references)| proto::LspResponse {
+ server_id: server_id.to_proto(),
+ response: Some(proto::lsp_response::Response::GetReferencesResponse(
+ GetReferences::response_to_proto(
+ references,
+ project,
+ sender_id,
+ &buffer_version,
+ cx,
+ ),
+ )),
+ })
+ .collect(),
+ })
+ }
None => anyhow::bail!("empty multi lsp query request"),
}
}
@@ -10526,6 +11085,10 @@ impl LspStore {
serialized_completion.source = proto::completion::Source::Custom as i32;
serialized_completion.resolved = true;
}
+ CompletionSource::Dap { sort_text } => {
+ serialized_completion.source = proto::completion::Source::Dap as i32;
+ serialized_completion.sort_text = Some(sort_text.clone());
+ }
}
serialized_completion
@@ -10580,6 +11143,11 @@ impl LspStore {
resolved: completion.resolved,
}
}
+ Some(proto::completion::Source::Dap) => CompletionSource::Dap {
+ sort_text: completion
+ .sort_text
+ .context("expected sort text to exist")?,
+ },
_ => anyhow::bail!("Unexpected completion source {}", completion.source),
},
})
@@ -2,6 +2,7 @@ use std::{
ops::ControlFlow,
path::{Path, PathBuf},
sync::Arc,
+ time::Duration,
};
use anyhow::{Context as _, Result, anyhow};
@@ -527,26 +528,6 @@ impl PrettierStore {
let mut new_plugins = plugins.collect::<HashSet<_>>();
let node = self.node.clone();
- let fs = Arc::clone(&self.fs);
- let locate_prettier_installation = match worktree.and_then(|worktree_id| {
- self.worktree_store
- .read(cx)
- .worktree_for_id(worktree_id, cx)
- .map(|worktree| worktree.read(cx).abs_path())
- }) {
- Some(locate_from) => {
- let installed_prettiers = self.prettier_instances.keys().cloned().collect();
- cx.background_spawn(async move {
- Prettier::locate_prettier_installation(
- fs.as_ref(),
- &installed_prettiers,
- locate_from.as_ref(),
- )
- .await
- })
- }
- None => Task::ready(Ok(ControlFlow::Continue(None))),
- };
new_plugins.retain(|plugin| !self.default_prettier.installed_plugins.contains(plugin));
let mut installation_attempt = 0;
let previous_installation_task = match &mut self.default_prettier.prettier {
@@ -574,15 +555,34 @@ impl PrettierStore {
}
};
- log::info!("Initializing default prettier with plugins {new_plugins:?}");
let plugins_to_install = new_plugins.clone();
let fs = Arc::clone(&self.fs);
let new_installation_task = cx
- .spawn(async move |project, cx| {
- match locate_prettier_installation
+ .spawn(async move |prettier_store, cx| {
+ cx.background_executor().timer(Duration::from_millis(30)).await;
+ let location_data = prettier_store.update(cx, |prettier_store, cx| {
+ worktree.and_then(|worktree_id| {
+ prettier_store.worktree_store
+ .read(cx)
+ .worktree_for_id(worktree_id, cx)
+ .map(|worktree| worktree.read(cx).abs_path())
+ }).map(|locate_from| {
+ let installed_prettiers = prettier_store.prettier_instances.keys().cloned().collect();
+ (locate_from, installed_prettiers)
+ })
+ })?;
+ let locate_prettier_installation = match location_data {
+ Some((locate_from, installed_prettiers)) => Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &installed_prettiers,
+ locate_from.as_ref(),
+ )
.await
- .context("locate prettier installation")
- .map_err(Arc::new)?
+ .context("locate prettier installation").map_err(Arc::new)?,
+ None => ControlFlow::Continue(None),
+ };
+
+ match locate_prettier_installation
{
ControlFlow::Break(()) => return Ok(()),
ControlFlow::Continue(prettier_path) => {
@@ -593,8 +593,8 @@ impl PrettierStore {
if let Some(previous_installation_task) = previous_installation_task {
if let Err(e) = previous_installation_task.await {
log::error!("Failed to install default prettier: {e:#}");
- project.update(cx, |project, _| {
- if let PrettierInstallation::NotInstalled { attempts, not_installed_plugins, .. } = &mut project.default_prettier.prettier {
+ prettier_store.update(cx, |prettier_store, _| {
+ if let PrettierInstallation::NotInstalled { attempts, not_installed_plugins, .. } = &mut prettier_store.default_prettier.prettier {
*attempts += 1;
new_plugins.extend(not_installed_plugins.iter().cloned());
installation_attempt = *attempts;
@@ -604,8 +604,8 @@ impl PrettierStore {
}
};
if installation_attempt > prettier::FAIL_THRESHOLD {
- project.update(cx, |project, _| {
- if let PrettierInstallation::NotInstalled { installation_task, .. } = &mut project.default_prettier.prettier {
+ prettier_store.update(cx, |prettier_store, _| {
+ if let PrettierInstallation::NotInstalled { installation_task, .. } = &mut prettier_store.default_prettier.prettier {
*installation_task = None;
};
})?;
@@ -614,19 +614,20 @@ impl PrettierStore {
);
return Ok(());
}
- project.update(cx, |project, _| {
+ prettier_store.update(cx, |prettier_store, _| {
new_plugins.retain(|plugin| {
- !project.default_prettier.installed_plugins.contains(plugin)
+ !prettier_store.default_prettier.installed_plugins.contains(plugin)
});
- if let PrettierInstallation::NotInstalled { not_installed_plugins, .. } = &mut project.default_prettier.prettier {
+ if let PrettierInstallation::NotInstalled { not_installed_plugins, .. } = &mut prettier_store.default_prettier.prettier {
not_installed_plugins.retain(|plugin| {
- !project.default_prettier.installed_plugins.contains(plugin)
+ !prettier_store.default_prettier.installed_plugins.contains(plugin)
});
not_installed_plugins.extend(new_plugins.iter().cloned());
}
needs_install |= !new_plugins.is_empty();
})?;
if needs_install {
+ log::info!("Initializing default prettier with plugins {new_plugins:?}");
let installed_plugins = new_plugins.clone();
cx.background_spawn(async move {
install_prettier_packages(fs.as_ref(), new_plugins, node).await?;
@@ -637,17 +638,27 @@ impl PrettierStore {
.await
.context("prettier & plugins install")
.map_err(Arc::new)?;
- log::info!("Initialized prettier with plugins: {installed_plugins:?}");
- project.update(cx, |project, _| {
- project.default_prettier.prettier =
+ log::info!("Initialized default prettier with plugins: {installed_plugins:?}");
+ prettier_store.update(cx, |prettier_store, _| {
+ prettier_store.default_prettier.prettier =
PrettierInstallation::Installed(PrettierInstance {
attempt: 0,
prettier: None,
});
- project.default_prettier
+ prettier_store.default_prettier
.installed_plugins
.extend(installed_plugins);
})?;
+ } else {
+ prettier_store.update(cx, |prettier_store, _| {
+ if let PrettierInstallation::NotInstalled { .. } = &mut prettier_store.default_prettier.prettier {
+ prettier_store.default_prettier.prettier =
+ PrettierInstallation::Installed(PrettierInstance {
+ attempt: 0,
+ prettier: None,
+ });
+ }
+ })?;
}
}
}
@@ -767,6 +778,7 @@ pub(super) async fn format_with_prettier(
}
}
+#[derive(Debug)]
pub struct DefaultPrettier {
prettier: PrettierInstallation,
installed_plugins: HashSet<Arc<str>>,
@@ -131,7 +131,8 @@ pub use language::Location;
#[cfg(any(test, feature = "test-support"))]
pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX;
pub use task_inventory::{
- BasicContextProvider, ContextProviderWithTasks, Inventory, TaskContexts, TaskSourceKind,
+ BasicContextProvider, ContextProviderWithTasks, DebugScenarioContext, Inventory, TaskContexts,
+ TaskSourceKind,
};
pub use buffer_store::ProjectTransaction;
@@ -455,6 +456,10 @@ pub enum CompletionSource {
/// Whether this completion has been resolved, to ensure it happens once per completion.
resolved: bool,
},
+ Dap {
+ /// The sort text for this completion.
+ sort_text: String,
+ },
Custom,
BufferWord {
word_range: Range<Anchor>,
@@ -695,7 +700,7 @@ pub struct MarkupContent {
pub value: String,
}
-#[derive(Debug, Clone)]
+#[derive(Debug, Clone, PartialEq)]
pub struct LocationLink {
pub origin: Option<Location>,
pub target: Location,
@@ -2975,6 +2980,20 @@ impl Project {
}),
Err(_) => {}
},
+ SettingsObserverEvent::LocalDebugScenariosUpdated(result) => match result {
+ Err(InvalidSettingsError::Debug { message, path }) => {
+ let message =
+ format!("Failed to set local debug scenarios in {path:?}:\n{message}");
+ cx.emit(Event::Toast {
+ notification_id: format!("local-debug-scenarios-{path:?}").into(),
+ message,
+ });
+ }
+ Ok(path) => cx.emit(Event::HideToast {
+ notification_id: format!("local-debug-scenarios-{path:?}").into(),
+ }),
+ Err(_) => {}
+ },
}
}
@@ -3327,91 +3346,52 @@ impl Project {
})
}
- #[inline(never)]
- fn definition_impl(
- &mut self,
- buffer: &Entity<Buffer>,
- position: PointUtf16,
- cx: &mut Context<Self>,
- ) -> Task<Result<Vec<LocationLink>>> {
- self.request_lsp(
- buffer.clone(),
- LanguageServerToQuery::FirstCapable,
- GetDefinition { position },
- cx,
- )
- }
- pub fn definition<T: ToPointUtf16>(
+ pub fn definitions<T: ToPointUtf16>(
&mut self,
buffer: &Entity<Buffer>,
position: T,
cx: &mut Context<Self>,
) -> Task<Result<Vec<LocationLink>>> {
let position = position.to_point_utf16(buffer.read(cx));
- self.definition_impl(buffer, position, cx)
- }
-
- fn declaration_impl(
- &mut self,
- buffer: &Entity<Buffer>,
- position: PointUtf16,
- cx: &mut Context<Self>,
- ) -> Task<Result<Vec<LocationLink>>> {
- self.request_lsp(
- buffer.clone(),
- LanguageServerToQuery::FirstCapable,
- GetDeclaration { position },
- cx,
- )
+ self.lsp_store.update(cx, |lsp_store, cx| {
+ lsp_store.definitions(buffer, position, cx)
+ })
}
- pub fn declaration<T: ToPointUtf16>(
+ pub fn declarations<T: ToPointUtf16>(
&mut self,
buffer: &Entity<Buffer>,
position: T,
cx: &mut Context<Self>,
) -> Task<Result<Vec<LocationLink>>> {
let position = position.to_point_utf16(buffer.read(cx));
- self.declaration_impl(buffer, position, cx)
- }
-
- fn type_definition_impl(
- &mut self,
- buffer: &Entity<Buffer>,
- position: PointUtf16,
- cx: &mut Context<Self>,
- ) -> Task<Result<Vec<LocationLink>>> {
- self.request_lsp(
- buffer.clone(),
- LanguageServerToQuery::FirstCapable,
- GetTypeDefinition { position },
- cx,
- )
+ self.lsp_store.update(cx, |lsp_store, cx| {
+ lsp_store.declarations(buffer, position, cx)
+ })
}
- pub fn type_definition<T: ToPointUtf16>(
+ pub fn type_definitions<T: ToPointUtf16>(
&mut self,
buffer: &Entity<Buffer>,
position: T,
cx: &mut Context<Self>,
) -> Task<Result<Vec<LocationLink>>> {
let position = position.to_point_utf16(buffer.read(cx));
- self.type_definition_impl(buffer, position, cx)
+ self.lsp_store.update(cx, |lsp_store, cx| {
+ lsp_store.type_definitions(buffer, position, cx)
+ })
}
- pub fn implementation<T: ToPointUtf16>(
+ pub fn implementations<T: ToPointUtf16>(
&mut self,
buffer: &Entity<Buffer>,
position: T,
cx: &mut Context<Self>,
) -> Task<Result<Vec<LocationLink>>> {
let position = position.to_point_utf16(buffer.read(cx));
- self.request_lsp(
- buffer.clone(),
- LanguageServerToQuery::FirstCapable,
- GetImplementation { position },
- cx,
- )
+ self.lsp_store.update(cx, |lsp_store, cx| {
+ lsp_store.implementations(buffer, position, cx)
+ })
}
pub fn references<T: ToPointUtf16>(
@@ -3421,12 +3401,9 @@ impl Project {
cx: &mut Context<Self>,
) -> Task<Result<Vec<Location>>> {
let position = position.to_point_utf16(buffer.read(cx));
- self.request_lsp(
- buffer.clone(),
- LanguageServerToQuery::FirstCapable,
- GetReferences { position },
- cx,
- )
+ self.lsp_store.update(cx, |lsp_store, cx| {
+ lsp_store.references(buffer, position, cx)
+ })
}
fn document_highlights_impl(
@@ -36,7 +36,6 @@ use crate::{
};
#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema)]
-#[schemars(deny_unknown_fields)]
pub struct ProjectSettings {
/// Configuration for language servers.
///
@@ -554,6 +553,7 @@ pub enum SettingsObserverMode {
pub enum SettingsObserverEvent {
LocalSettingsUpdated(Result<PathBuf, InvalidSettingsError>),
LocalTasksUpdated(Result<PathBuf, InvalidSettingsError>),
+ LocalDebugScenariosUpdated(Result<PathBuf, InvalidSettingsError>),
}
impl EventEmitter<SettingsObserverEvent> for SettingsObserver {}
@@ -565,6 +565,7 @@ pub struct SettingsObserver {
project_id: u64,
task_store: Entity<TaskStore>,
_global_task_config_watcher: Task<()>,
+ _global_debug_config_watcher: Task<()>,
}
/// SettingsObserver observers changes to .zed/{settings, task}.json files in local worktrees
@@ -597,6 +598,11 @@ impl SettingsObserver {
paths::tasks_file().clone(),
cx,
),
+ _global_debug_config_watcher: Self::subscribe_to_global_debug_scenarios_changes(
+ fs.clone(),
+ paths::debug_scenarios_file().clone(),
+ cx,
+ ),
}
}
@@ -617,6 +623,11 @@ impl SettingsObserver {
paths::tasks_file().clone(),
cx,
),
+ _global_debug_config_watcher: Self::subscribe_to_global_debug_scenarios_changes(
+ fs.clone(),
+ paths::debug_scenarios_file().clone(),
+ cx,
+ ),
}
}
@@ -1047,6 +1058,61 @@ impl SettingsObserver {
}
})
}
+ fn subscribe_to_global_debug_scenarios_changes(
+ fs: Arc<dyn Fs>,
+ file_path: PathBuf,
+ cx: &mut Context<Self>,
+ ) -> Task<()> {
+ let mut user_tasks_file_rx =
+ watch_config_file(&cx.background_executor(), fs, file_path.clone());
+ let user_tasks_content = cx.background_executor().block(user_tasks_file_rx.next());
+ let weak_entry = cx.weak_entity();
+ cx.spawn(async move |settings_observer, cx| {
+ let Ok(task_store) = settings_observer.read_with(cx, |settings_observer, _| {
+ settings_observer.task_store.clone()
+ }) else {
+ return;
+ };
+ if let Some(user_tasks_content) = user_tasks_content {
+ let Ok(()) = task_store.update(cx, |task_store, cx| {
+ task_store
+ .update_user_debug_scenarios(
+ TaskSettingsLocation::Global(&file_path),
+ Some(&user_tasks_content),
+ cx,
+ )
+ .log_err();
+ }) else {
+ return;
+ };
+ }
+ while let Some(user_tasks_content) = user_tasks_file_rx.next().await {
+ let Ok(result) = task_store.update(cx, |task_store, cx| {
+ task_store.update_user_debug_scenarios(
+ TaskSettingsLocation::Global(&file_path),
+ Some(&user_tasks_content),
+ cx,
+ )
+ }) else {
+ break;
+ };
+
+ weak_entry
+ .update(cx, |_, cx| match result {
+ Ok(()) => cx.emit(SettingsObserverEvent::LocalDebugScenariosUpdated(Ok(
+ file_path.clone(),
+ ))),
+ Err(err) => cx.emit(SettingsObserverEvent::LocalDebugScenariosUpdated(
+ Err(InvalidSettingsError::Tasks {
+ path: file_path.clone(),
+ message: err.to_string(),
+ }),
+ )),
+ })
+ .ok();
+ }
+ })
+ }
}
pub fn local_settings_kind_from_proto(kind: proto::LocalSettingsKind) -> LocalSettingsKind {
@@ -568,7 +568,7 @@ async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
.into_iter()
.map(|(source_kind, task)| {
let resolved = task.resolved;
- (source_kind, resolved.command)
+ (source_kind, resolved.command.unwrap())
})
.collect::<Vec<_>>(),
vec![(
@@ -2023,7 +2023,7 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
cx.update(|cx| {
SettingsStore::update_global(cx, |settings, cx| {
settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
- settings.languages.insert(
+ settings.languages.0.insert(
"Rust".into(),
LanguageSettingsContent {
enable_language_server: Some(false),
@@ -2042,14 +2042,14 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
cx.update(|cx| {
SettingsStore::update_global(cx, |settings, cx| {
settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
- settings.languages.insert(
+ settings.languages.0.insert(
LanguageName::new("Rust"),
LanguageSettingsContent {
enable_language_server: Some(true),
..Default::default()
},
);
- settings.languages.insert(
+ settings.languages.0.insert(
LanguageName::new("JavaScript"),
LanguageSettingsContent {
enable_language_server: Some(false),
@@ -2993,7 +2993,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
)))
});
let mut definitions = project
- .update(cx, |project, cx| project.definition(&buffer, 22, cx))
+ .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
.await
.unwrap();
@@ -12,7 +12,7 @@ use anyhow::Result;
use collections::{HashMap, HashSet, VecDeque};
use dap::DapRegistry;
use fs::Fs;
-use gpui::{App, AppContext as _, Context, Entity, SharedString, Task};
+use gpui::{App, AppContext as _, Context, Entity, SharedString, Task, WeakEntity};
use itertools::Itertools;
use language::{
Buffer, ContextLocation, ContextProvider, File, Language, LanguageToolchainStore, Location,
@@ -31,11 +31,18 @@ use worktree::WorktreeId;
use crate::{task_store::TaskSettingsLocation, worktree_store::WorktreeStore};
+#[derive(Clone, Debug, Default)]
+pub struct DebugScenarioContext {
+ pub task_context: TaskContext,
+ pub worktree_id: Option<WorktreeId>,
+ pub active_buffer: Option<WeakEntity<Buffer>>,
+}
+
/// Inventory tracks available tasks for a given project.
pub struct Inventory {
fs: Arc<dyn Fs>,
last_scheduled_tasks: VecDeque<(TaskSourceKind, ResolvedTask)>,
- last_scheduled_scenarios: VecDeque<DebugScenario>,
+ last_scheduled_scenarios: VecDeque<(DebugScenario, DebugScenarioContext)>,
templates_from_settings: InventoryFor<TaskTemplate>,
scenarios_from_settings: InventoryFor<DebugScenario>,
}
@@ -245,16 +252,29 @@ impl Inventory {
})
}
- pub fn scenario_scheduled(&mut self, scenario: DebugScenario) {
+ pub fn scenario_scheduled(
+ &mut self,
+ scenario: DebugScenario,
+ task_context: TaskContext,
+ worktree_id: Option<WorktreeId>,
+ active_buffer: Option<WeakEntity<Buffer>>,
+ ) {
self.last_scheduled_scenarios
- .retain(|s| s.label != scenario.label);
- self.last_scheduled_scenarios.push_back(scenario);
+ .retain(|(s, _)| s.label != scenario.label);
+ self.last_scheduled_scenarios.push_back((
+ scenario,
+ DebugScenarioContext {
+ task_context,
+ worktree_id,
+ active_buffer,
+ },
+ ));
if self.last_scheduled_scenarios.len() > 5_000 {
self.last_scheduled_scenarios.pop_front();
}
}
- pub fn last_scheduled_scenario(&self) -> Option<&DebugScenario> {
+ pub fn last_scheduled_scenario(&self) -> Option<&(DebugScenario, DebugScenarioContext)> {
self.last_scheduled_scenarios.back()
}
@@ -265,7 +285,10 @@ impl Inventory {
current_resolved_tasks: Vec<(TaskSourceKind, task::ResolvedTask)>,
add_current_language_tasks: bool,
cx: &mut App,
- ) -> Task<(Vec<DebugScenario>, Vec<(TaskSourceKind, DebugScenario)>)> {
+ ) -> Task<(
+ Vec<(DebugScenario, DebugScenarioContext)>,
+ Vec<(TaskSourceKind, DebugScenario)>,
+ )> {
let mut scenarios = Vec::new();
if let Some(worktree_id) = task_contexts
@@ -765,7 +788,7 @@ impl Inventory {
}
}
}
- self.last_scheduled_scenarios.retain_mut(|scenario| {
+ self.last_scheduled_scenarios.retain_mut(|(scenario, _)| {
if !previously_existing_scenarios.contains(&scenario.label) {
return true;
}
@@ -1304,7 +1327,7 @@ mod tests {
.clone();
inventory.update(cx, |this, _| {
- this.scenario_scheduled(scenario.clone());
+ this.scenario_scheduled(scenario.clone(), TaskContext::default(), None, None);
});
assert_eq!(
@@ -1316,7 +1339,8 @@ mod tests {
.0
.first()
.unwrap()
- .clone(),
+ .clone()
+ .0,
scenario
);
@@ -1346,6 +1370,7 @@ mod tests {
.0
.first()
.unwrap()
+ .0
.adapter,
"Delve",
);
@@ -1367,15 +1392,14 @@ mod tests {
.unwrap();
});
- assert_eq!(
+ assert!(
inventory
.update(cx, |this, cx| {
this.list_debug_scenarios(&TaskContexts::default(), vec![], vec![], false, cx)
})
.await
.0
- .first(),
- None
+ .is_empty(),
);
}
@@ -149,7 +149,7 @@ impl Project {
let settings = self.terminal_settings(&path, cx).clone();
let builder = ShellBuilder::new(ssh_details.is_none(), &settings.shell).non_interactive();
- let (command, args) = builder.build(command, &Vec::new());
+ let (command, args) = builder.build(Some(command), &Vec::new());
let mut env = self
.environment
@@ -297,7 +297,10 @@ impl Project {
.or_insert_with(|| "xterm-256color".to_string());
let (program, args) = wrap_for_ssh(
&ssh_command,
- Some((&spawn_task.command, &spawn_task.args)),
+ spawn_task
+ .command
+ .as_ref()
+ .map(|command| (command, &spawn_task.args)),
path.as_deref(),
env,
python_venv_directory.as_deref(),
@@ -317,14 +320,16 @@ impl Project {
add_environment_path(&mut env, &venv_path.join("bin")).log_err();
}
- (
- task_state,
+ let shell = if let Some(program) = spawn_task.command {
Shell::WithArguments {
- program: spawn_task.command,
+ program,
args: spawn_task.args,
title_override: None,
- },
- )
+ }
+ } else {
+ Shell::System
+ };
+ (task_state, shell)
}
}
}
@@ -56,7 +56,7 @@ use theme::ThemeSettings;
use ui::{
Color, ContextMenu, DecoratedIcon, Icon, IconDecoration, IconDecorationKind, IndentGuideColors,
IndentGuideLayout, KeyBinding, Label, LabelSize, ListItem, ListItemSpacing, Scrollbar,
- ScrollbarState, Tooltip, prelude::*, v_flex,
+ ScrollbarState, StickyCandidate, Tooltip, prelude::*, v_flex,
};
use util::{ResultExt, TakeUntilExt, TryFutureExt, maybe, paths::compare_paths};
use workspace::{
@@ -173,6 +173,7 @@ struct EntryDetails {
is_editing: bool,
is_processing: bool,
is_cut: bool,
+ sticky: Option<StickyDetails>,
filename_text_color: Color,
diagnostic_severity: Option<DiagnosticSeverity>,
git_status: GitSummary,
@@ -181,6 +182,12 @@ struct EntryDetails {
canonical_path: Option<Arc<Path>>,
}
+#[derive(Debug, PartialEq, Eq, Clone)]
+struct StickyDetails {
+ sticky_index: usize,
+}
+
+/// Permanently deletes the selected file or directory.
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)]
#[action(namespace = project_panel)]
#[serde(deny_unknown_fields)]
@@ -189,6 +196,7 @@ struct Delete {
pub skip_prompt: bool,
}
+/// Moves the selected file or directory to the system trash.
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)]
#[action(namespace = project_panel)]
#[serde(deny_unknown_fields)]
@@ -200,32 +208,59 @@ struct Trash {
actions!(
project_panel,
[
+ /// Expands the selected entry in the project tree.
ExpandSelectedEntry,
+ /// Collapses the selected entry in the project tree.
CollapseSelectedEntry,
+ /// Collapses all entries in the project tree.
CollapseAllEntries,
+ /// Creates a new directory.
NewDirectory,
+ /// Creates a new file.
NewFile,
+ /// Copies the selected file or directory.
Copy,
+ /// Duplicates the selected file or directory.
Duplicate,
+ /// Reveals the selected item in the system file manager.
RevealInFileManager,
+ /// Removes the selected folder from the project.
RemoveFromProject,
+ /// Opens the selected file with the system's default application.
OpenWithSystem,
+ /// Cuts the selected file or directory.
Cut,
+ /// Pastes the previously cut or copied item.
Paste,
+ /// Renames the selected file or directory.
Rename,
+ /// Opens the selected file in the editor.
Open,
+ /// Opens the selected file in a permanent tab.
OpenPermanent,
+ /// Toggles focus on the project panel.
ToggleFocus,
+ /// Toggles visibility of git-ignored files.
ToggleHideGitIgnore,
+ /// Starts a new search in the selected directory.
NewSearchInDirectory,
+ /// Unfolds the selected directory.
UnfoldDirectory,
+ /// Folds the selected directory.
FoldDirectory,
+ /// Selects the parent directory.
SelectParent,
+ /// Selects the next entry with git changes.
SelectNextGitEntry,
+ /// Selects the previous entry with git changes.
SelectPrevGitEntry,
+ /// Selects the next entry with diagnostics.
SelectNextDiagnostic,
+ /// Selects the previous entry with diagnostics.
SelectPrevDiagnostic,
+ /// Selects the next directory.
SelectNextDirectory,
+ /// Selects the previous directory.
SelectPrevDirectory,
]
);
@@ -820,13 +855,11 @@ impl ProjectPanel {
.action("Copy", Box::new(Copy))
.action("Duplicate", Box::new(Duplicate))
// TODO: Paste should always be visible, cbut disabled when clipboard is empty
- .map(|menu| {
- if self.clipboard.as_ref().is_some() {
- menu.action("Paste", Box::new(Paste))
- } else {
- menu.disabled_action("Paste", Box::new(Paste))
- }
- })
+ .action_disabled_when(
+ self.clipboard.as_ref().is_none(),
+ "Paste",
+ Box::new(Paste),
+ )
.separator()
.action("Copy Path", Box::new(zed_actions::workspace::CopyPath))
.action(
@@ -3276,12 +3309,13 @@ impl ProjectPanel {
fn entry_at_index(&self, index: usize) -> Option<(WorktreeId, GitEntryRef<'_>)> {
let mut offset = 0;
for (worktree_id, visible_worktree_entries, _) in &self.visible_entries {
- if visible_worktree_entries.len() > offset + index {
+ let current_len = visible_worktree_entries.len();
+ if index < offset + current_len {
return visible_worktree_entries
- .get(index)
+ .get(index - offset)
.map(|entry| (*worktree_id, entry.to_ref()));
}
- offset += visible_worktree_entries.len();
+ offset += current_len;
}
None
}
@@ -3338,22 +3372,13 @@ impl ProjectPanel {
}
let end_ix = range.end.min(ix + visible_worktree_entries.len());
- let (git_status_setting, show_file_icons, show_folder_icons) = {
+ let git_status_setting = {
let settings = ProjectPanelSettings::get_global(cx);
- (
- settings.git_status,
- settings.file_icons,
- settings.folder_icons,
- )
+ settings.git_status
};
if let Some(worktree) = self.project.read(cx).worktree_for_id(*worktree_id, cx) {
let snapshot = worktree.read(cx).snapshot();
let root_name = OsStr::new(snapshot.root_name());
- let expanded_entry_ids = self
- .expanded_dir_ids
- .get(&snapshot.id())
- .map(Vec::as_slice)
- .unwrap_or(&[]);
let entry_range = range.start.saturating_sub(ix)..end_ix - ix;
let entries = entries_paths.get_or_init(|| {
@@ -3366,80 +3391,17 @@ impl ProjectPanel {
let status = git_status_setting
.then_some(entry.git_summary)
.unwrap_or_default();
- let is_expanded = expanded_entry_ids.binary_search(&entry.id).is_ok();
- let icon = match entry.kind {
- EntryKind::File => {
- if show_file_icons {
- FileIcons::get_icon(&entry.path, cx)
- } else {
- None
- }
- }
- _ => {
- if show_folder_icons {
- FileIcons::get_folder_icon(is_expanded, cx)
- } else {
- FileIcons::get_chevron_icon(is_expanded, cx)
- }
- }
- };
- let (depth, difference) =
- ProjectPanel::calculate_depth_and_difference(&entry, entries);
-
- let filename = match difference {
- diff if diff > 1 => entry
- .path
- .iter()
- .skip(entry.path.components().count() - diff)
- .collect::<PathBuf>()
- .to_str()
- .unwrap_or_default()
- .to_string(),
- _ => entry
- .path
- .file_name()
- .map(|name| name.to_string_lossy().into_owned())
- .unwrap_or_else(|| root_name.to_string_lossy().to_string()),
- };
- let selection = SelectedEntry {
- worktree_id: snapshot.id(),
- entry_id: entry.id,
- };
-
- let is_marked = self.marked_entries.contains(&selection);
-
- let diagnostic_severity = self
- .diagnostics
- .get(&(*worktree_id, entry.path.to_path_buf()))
- .cloned();
-
- let filename_text_color =
- entry_git_aware_label_color(status, entry.is_ignored, is_marked);
-
- let mut details = EntryDetails {
- filename,
- icon,
- path: entry.path.clone(),
- depth,
- kind: entry.kind,
- is_ignored: entry.is_ignored,
- is_expanded,
- is_selected: self.selection == Some(selection),
- is_marked,
- is_editing: false,
- is_processing: false,
- is_cut: self
- .clipboard
- .as_ref()
- .map_or(false, |e| e.is_cut() && e.items().contains(&selection)),
- filename_text_color,
- diagnostic_severity,
- git_status: status,
- is_private: entry.is_private,
- worktree_id: *worktree_id,
- canonical_path: entry.canonical_path.clone(),
- };
+ let mut details = self.details_for_entry(
+ entry,
+ *worktree_id,
+ root_name,
+ entries,
+ status,
+ None,
+ window,
+ cx,
+ );
if let Some(edit_state) = &self.edit_state {
let is_edited_entry = if edit_state.is_new_entry() {
@@ -3851,6 +3813,8 @@ impl ProjectPanel {
const GROUP_NAME: &str = "project_entry";
let kind = details.kind;
+ let is_sticky = details.sticky.is_some();
+ let sticky_index = details.sticky.as_ref().map(|this| this.sticky_index);
let settings = ProjectPanelSettings::get_global(cx);
let show_editor = details.is_editing && !details.is_processing;
@@ -3974,141 +3938,144 @@ impl ProjectPanel {
.border_r_2()
.border_color(border_color)
.hover(|style| style.bg(bg_hover_color).border_color(border_hover_color))
- .on_drag_move::<ExternalPaths>(cx.listener(
- move |this, event: &DragMoveEvent<ExternalPaths>, _, cx| {
- let is_current_target = this.drag_target_entry.as_ref()
- .map(|entry| entry.entry_id) == Some(entry_id);
-
- if !event.bounds.contains(&event.event.position) {
- // Entry responsible for setting drag target is also responsible to
- // clear it up after drag is out of bounds
+ .when(!is_sticky, |this| {
+ this
+ .when(is_highlighted && folded_directory_drag_target.is_none(), |this| this.border_color(transparent_white()).bg(item_colors.drag_over))
+ .on_drag_move::<ExternalPaths>(cx.listener(
+ move |this, event: &DragMoveEvent<ExternalPaths>, _, cx| {
+ let is_current_target = this.drag_target_entry.as_ref()
+ .map(|entry| entry.entry_id) == Some(entry_id);
+
+ if !event.bounds.contains(&event.event.position) {
+ // Entry responsible for setting drag target is also responsible to
+ // clear it up after drag is out of bounds
+ if is_current_target {
+ this.drag_target_entry = None;
+ }
+ return;
+ }
+
if is_current_target {
- this.drag_target_entry = None;
+ return;
}
- return;
- }
- if is_current_target {
- return;
- }
+ let Some((entry_id, highlight_entry_id)) = maybe!({
+ let target_worktree = this.project.read(cx).worktree_for_id(selection.worktree_id, cx)?.read(cx);
+ let target_entry = target_worktree.entry_for_path(&path_for_external_paths)?;
+ let highlight_entry_id = this.highlight_entry_for_external_drag(target_entry, target_worktree);
+ Some((target_entry.id, highlight_entry_id))
+ }) else {
+ return;
+ };
- let Some((entry_id, highlight_entry_id)) = maybe!({
- let target_worktree = this.project.read(cx).worktree_for_id(selection.worktree_id, cx)?.read(cx);
- let target_entry = target_worktree.entry_for_path(&path_for_external_paths)?;
- let highlight_entry_id = this.highlight_entry_for_external_drag(target_entry, target_worktree);
- Some((target_entry.id, highlight_entry_id))
- }) else {
- return;
- };
+ this.drag_target_entry = Some(DragTargetEntry {
+ entry_id,
+ highlight_entry_id,
+ });
+ this.marked_entries.clear();
+ },
+ ))
+ .on_drop(cx.listener(
+ move |this, external_paths: &ExternalPaths, window, cx| {
+ this.drag_target_entry = None;
+ this.hover_scroll_task.take();
+ this.drop_external_files(external_paths.paths(), entry_id, window, cx);
+ cx.stop_propagation();
+ },
+ ))
+ .on_drag_move::<DraggedSelection>(cx.listener(
+ move |this, event: &DragMoveEvent<DraggedSelection>, window, cx| {
+ let is_current_target = this.drag_target_entry.as_ref()
+ .map(|entry| entry.entry_id) == Some(entry_id);
+
+ if !event.bounds.contains(&event.event.position) {
+ // Entry responsible for setting drag target is also responsible to
+ // clear it up after drag is out of bounds
+ if is_current_target {
+ this.drag_target_entry = None;
+ }
+ return;
+ }
- this.drag_target_entry = Some(DragTargetEntry {
- entry_id,
- highlight_entry_id,
- });
- this.marked_entries.clear();
- },
- ))
- .on_drop(cx.listener(
- move |this, external_paths: &ExternalPaths, window, cx| {
- this.drag_target_entry = None;
- this.hover_scroll_task.take();
- this.drop_external_files(external_paths.paths(), entry_id, window, cx);
- cx.stop_propagation();
- },
- ))
- .on_drag_move::<DraggedSelection>(cx.listener(
- move |this, event: &DragMoveEvent<DraggedSelection>, window, cx| {
- let is_current_target = this.drag_target_entry.as_ref()
- .map(|entry| entry.entry_id) == Some(entry_id);
-
- if !event.bounds.contains(&event.event.position) {
- // Entry responsible for setting drag target is also responsible to
- // clear it up after drag is out of bounds
if is_current_target {
- this.drag_target_entry = None;
+ return;
}
- return;
- }
-
- if is_current_target {
- return;
- }
- let drag_state = event.drag(cx);
- let Some((entry_id, highlight_entry_id)) = maybe!({
- let target_worktree = this.project.read(cx).worktree_for_id(selection.worktree_id, cx)?.read(cx);
- let target_entry = target_worktree.entry_for_path(&path_for_dragged_selection)?;
- let highlight_entry_id = this.highlight_entry_for_selection_drag(target_entry, target_worktree, drag_state, cx);
- Some((target_entry.id, highlight_entry_id))
- }) else {
- return;
- };
+ let drag_state = event.drag(cx);
+ let Some((entry_id, highlight_entry_id)) = maybe!({
+ let target_worktree = this.project.read(cx).worktree_for_id(selection.worktree_id, cx)?.read(cx);
+ let target_entry = target_worktree.entry_for_path(&path_for_dragged_selection)?;
+ let highlight_entry_id = this.highlight_entry_for_selection_drag(target_entry, target_worktree, drag_state, cx);
+ Some((target_entry.id, highlight_entry_id))
+ }) else {
+ return;
+ };
- this.drag_target_entry = Some(DragTargetEntry {
- entry_id,
- highlight_entry_id,
- });
- if drag_state.items().count() == 1 {
- this.marked_entries.clear();
- this.marked_entries.insert(drag_state.active_selection);
- }
- this.hover_expand_task.take();
+ this.drag_target_entry = Some(DragTargetEntry {
+ entry_id,
+ highlight_entry_id,
+ });
+ if drag_state.items().count() == 1 {
+ this.marked_entries.clear();
+ this.marked_entries.insert(drag_state.active_selection);
+ }
+ this.hover_expand_task.take();
- if !kind.is_dir()
- || this
- .expanded_dir_ids
- .get(&details.worktree_id)
- .map_or(false, |ids| ids.binary_search(&entry_id).is_ok())
- {
- return;
- }
+ if !kind.is_dir()
+ || this
+ .expanded_dir_ids
+ .get(&details.worktree_id)
+ .map_or(false, |ids| ids.binary_search(&entry_id).is_ok())
+ {
+ return;
+ }
- let bounds = event.bounds;
- this.hover_expand_task =
- Some(cx.spawn_in(window, async move |this, cx| {
- cx.background_executor()
- .timer(Duration::from_millis(500))
- .await;
- this.update_in(cx, |this, window, cx| {
- this.hover_expand_task.take();
- if this.drag_target_entry.as_ref().map(|entry| entry.entry_id) == Some(entry_id)
- && bounds.contains(&window.mouse_position())
- {
- this.expand_entry(worktree_id, entry_id, cx);
- this.update_visible_entries(
- Some((worktree_id, entry_id)),
- cx,
- );
- cx.notify();
- }
- })
- .ok();
- }));
- },
- ))
- .on_drag(
- dragged_selection,
- move |selection, click_offset, _window, cx| {
- cx.new(|_| DraggedProjectEntryView {
- details: details.clone(),
- click_offset,
- selection: selection.active_selection,
- selections: selection.marked_selections.clone(),
- })
- },
- )
- .when(is_highlighted && folded_directory_drag_target.is_none(), |this| this.border_color(transparent_white()).bg(item_colors.drag_over))
- .on_drop(
- cx.listener(move |this, selections: &DraggedSelection, window, cx| {
- this.drag_target_entry = None;
- this.hover_scroll_task.take();
- this.hover_expand_task.take();
- if folded_directory_drag_target.is_some() {
- return;
- }
- this.drag_onto(selections, entry_id, kind.is_file(), window, cx);
- }),
- )
+ let bounds = event.bounds;
+ this.hover_expand_task =
+ Some(cx.spawn_in(window, async move |this, cx| {
+ cx.background_executor()
+ .timer(Duration::from_millis(500))
+ .await;
+ this.update_in(cx, |this, window, cx| {
+ this.hover_expand_task.take();
+ if this.drag_target_entry.as_ref().map(|entry| entry.entry_id) == Some(entry_id)
+ && bounds.contains(&window.mouse_position())
+ {
+ this.expand_entry(worktree_id, entry_id, cx);
+ this.update_visible_entries(
+ Some((worktree_id, entry_id)),
+ cx,
+ );
+ cx.notify();
+ }
+ })
+ .ok();
+ }));
+ },
+ ))
+ .on_drag(
+ dragged_selection,
+ move |selection, click_offset, _window, cx| {
+ cx.new(|_| DraggedProjectEntryView {
+ details: details.clone(),
+ click_offset,
+ selection: selection.active_selection,
+ selections: selection.marked_selections.clone(),
+ })
+ },
+ )
+ .on_drop(
+ cx.listener(move |this, selections: &DraggedSelection, window, cx| {
+ this.drag_target_entry = None;
+ this.hover_scroll_task.take();
+ this.hover_expand_task.take();
+ if folded_directory_drag_target.is_some() {
+ return;
+ }
+ this.drag_onto(selections, entry_id, kind.is_file(), window, cx);
+ }),
+ )
+ })
.on_mouse_down(
MouseButton::Left,
cx.listener(move |this, _, _, cx| {
@@ -4140,7 +4107,7 @@ impl ProjectPanel {
current_selection.zip(target_selection)
{
let range_start = source_index.min(target_index);
- let range_end = source_index.max(target_index) + 1; // Make the range inclusive.
+ let range_end = source_index.max(target_index) + 1;
let mut new_selections = BTreeSet::new();
this.for_each_visible_entry(
range_start..range_end,
@@ -4186,6 +4153,16 @@ impl ProjectPanel {
let allow_preview = preview_tabs_enabled && click_count == 1;
this.open_entry(entry_id, focus_opened_item, allow_preview, cx);
}
+
+ if is_sticky {
+ if let Some((_, _, index)) = this.index_for_entry(entry_id, worktree_id) {
+ let strategy = sticky_index
+ .map(ScrollStrategy::ToPosition)
+ .unwrap_or(ScrollStrategy::Top);
+ this.scroll_handle.scroll_to_item(index, strategy);
+ cx.notify();
+ }
+ }
}),
)
.child(
@@ -4300,51 +4277,99 @@ impl ProjectPanel {
let target_entry_id = folded_ancestors.ancestors.get(components_len - 1 - delimiter_target_index).cloned();
this = this.child(
div()
- .on_drop(cx.listener(move |this, selections: &DraggedSelection, window, cx| {
- this.hover_scroll_task.take();
- this.drag_target_entry = None;
- this.folded_directory_drag_target = None;
- if let Some(target_entry_id) = target_entry_id {
- this.drag_onto(selections, target_entry_id, kind.is_file(), window, cx);
- }
- }))
+ .when(!is_sticky, |div| {
+ div
+ .on_drop(cx.listener(move |this, selections: &DraggedSelection, window, cx| {
+ this.hover_scroll_task.take();
+ this.drag_target_entry = None;
+ this.folded_directory_drag_target = None;
+ if let Some(target_entry_id) = target_entry_id {
+ this.drag_onto(selections, target_entry_id, kind.is_file(), window, cx);
+ }
+ }))
+ .on_drag_move(cx.listener(
+ move |this, event: &DragMoveEvent<DraggedSelection>, _, _| {
+ if event.bounds.contains(&event.event.position) {
+ this.folded_directory_drag_target = Some(
+ FoldedDirectoryDragTarget {
+ entry_id,
+ index: delimiter_target_index,
+ is_delimiter_target: true,
+ }
+ );
+ } else {
+ let is_current_target = this.folded_directory_drag_target
+ .map_or(false, |target|
+ target.entry_id == entry_id &&
+ target.index == delimiter_target_index &&
+ target.is_delimiter_target
+ );
+ if is_current_target {
+ this.folded_directory_drag_target = None;
+ }
+ }
+
+ },
+ ))
+ })
+ .child(
+ Label::new(DELIMITER.clone())
+ .single_line()
+ .color(filename_text_color)
+ )
+ );
+ }
+ let id = SharedString::from(format!(
+ "project_panel_path_component_{}_{index}",
+ entry_id.to_usize()
+ ));
+ let label = div()
+ .id(id)
+ .when(!is_sticky,| div| {
+ div
+ .when(index != components_len - 1, |div|{
+ let target_entry_id = folded_ancestors.ancestors.get(components_len - 1 - index).cloned();
+ div
.on_drag_move(cx.listener(
move |this, event: &DragMoveEvent<DraggedSelection>, _, _| {
- if event.bounds.contains(&event.event.position) {
+ if event.bounds.contains(&event.event.position) {
this.folded_directory_drag_target = Some(
FoldedDirectoryDragTarget {
entry_id,
- index: delimiter_target_index,
- is_delimiter_target: true,
+ index,
+ is_delimiter_target: false,
}
);
} else {
let is_current_target = this.folded_directory_drag_target
+ .as_ref()
.map_or(false, |target|
target.entry_id == entry_id &&
- target.index == delimiter_target_index &&
- target.is_delimiter_target
+ target.index == index &&
+ !target.is_delimiter_target
);
if is_current_target {
this.folded_directory_drag_target = None;
}
}
-
},
))
- .child(
- Label::new(DELIMITER.clone())
- .single_line()
- .color(filename_text_color)
- )
- );
- }
- let id = SharedString::from(format!(
- "project_panel_path_component_{}_{index}",
- entry_id.to_usize()
- ));
- let label = div()
- .id(id)
+ .on_drop(cx.listener(move |this, selections: &DraggedSelection, window,cx| {
+ this.hover_scroll_task.take();
+ this.drag_target_entry = None;
+ this.folded_directory_drag_target = None;
+ if let Some(target_entry_id) = target_entry_id {
+ this.drag_onto(selections, target_entry_id, kind.is_file(), window, cx);
+ }
+ }))
+ .when(folded_directory_drag_target.map_or(false, |target|
+ target.entry_id == entry_id &&
+ target.index == index
+ ), |this| {
+ this.bg(item_colors.drag_over)
+ })
+ })
+ })
.on_click(cx.listener(move |this, _, _, cx| {
if index != active_index {
if let Some(folds) =
@@ -4356,48 +4381,6 @@ impl ProjectPanel {
}
}
}))
- .when(index != components_len - 1, |div|{
- let target_entry_id = folded_ancestors.ancestors.get(components_len - 1 - index).cloned();
- div
- .on_drag_move(cx.listener(
- move |this, event: &DragMoveEvent<DraggedSelection>, _, _| {
- if event.bounds.contains(&event.event.position) {
- this.folded_directory_drag_target = Some(
- FoldedDirectoryDragTarget {
- entry_id,
- index,
- is_delimiter_target: false,
- }
- );
- } else {
- let is_current_target = this.folded_directory_drag_target
- .as_ref()
- .map_or(false, |target|
- target.entry_id == entry_id &&
- target.index == index &&
- !target.is_delimiter_target
- );
- if is_current_target {
- this.folded_directory_drag_target = None;
- }
- }
- },
- ))
- .on_drop(cx.listener(move |this, selections: &DraggedSelection, window,cx| {
- this.hover_scroll_task.take();
- this.drag_target_entry = None;
- this.folded_directory_drag_target = None;
- if let Some(target_entry_id) = target_entry_id {
- this.drag_onto(selections, target_entry_id, kind.is_file(), window, cx);
- }
- }))
- .when(folded_directory_drag_target.map_or(false, |target|
- target.entry_id == entry_id &&
- target.index == index
- ), |this| {
- this.bg(item_colors.drag_over)
- })
- })
.child(
Label::new(component)
.single_line()
@@ -4469,6 +4452,108 @@ impl ProjectPanel {
)
}
+ fn details_for_entry(
+ &self,
+ entry: &Entry,
+ worktree_id: WorktreeId,
+ root_name: &OsStr,
+ entries_paths: &HashSet<Arc<Path>>,
+ git_status: GitSummary,
+ sticky: Option<StickyDetails>,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> EntryDetails {
+ let (show_file_icons, show_folder_icons) = {
+ let settings = ProjectPanelSettings::get_global(cx);
+ (settings.file_icons, settings.folder_icons)
+ };
+
+ let expanded_entry_ids = self
+ .expanded_dir_ids
+ .get(&worktree_id)
+ .map(Vec::as_slice)
+ .unwrap_or(&[]);
+ let is_expanded = expanded_entry_ids.binary_search(&entry.id).is_ok();
+
+ let icon = match entry.kind {
+ EntryKind::File => {
+ if show_file_icons {
+ FileIcons::get_icon(&entry.path, cx)
+ } else {
+ None
+ }
+ }
+ _ => {
+ if show_folder_icons {
+ FileIcons::get_folder_icon(is_expanded, cx)
+ } else {
+ FileIcons::get_chevron_icon(is_expanded, cx)
+ }
+ }
+ };
+
+ let (depth, difference) =
+ ProjectPanel::calculate_depth_and_difference(&entry, entries_paths);
+
+ let filename = match difference {
+ diff if diff > 1 => entry
+ .path
+ .iter()
+ .skip(entry.path.components().count() - diff)
+ .collect::<PathBuf>()
+ .to_str()
+ .unwrap_or_default()
+ .to_string(),
+ _ => entry
+ .path
+ .file_name()
+ .map(|name| name.to_string_lossy().into_owned())
+ .unwrap_or_else(|| root_name.to_string_lossy().to_string()),
+ };
+
+ let selection = SelectedEntry {
+ worktree_id,
+ entry_id: entry.id,
+ };
+ let is_marked = self.marked_entries.contains(&selection);
+ let is_selected = self.selection == Some(selection);
+
+ let diagnostic_severity = self
+ .diagnostics
+ .get(&(worktree_id, entry.path.to_path_buf()))
+ .cloned();
+
+ let filename_text_color =
+ entry_git_aware_label_color(git_status, entry.is_ignored, is_marked);
+
+ let is_cut = self
+ .clipboard
+ .as_ref()
+ .map_or(false, |e| e.is_cut() && e.items().contains(&selection));
+
+ EntryDetails {
+ filename,
+ icon,
+ path: entry.path.clone(),
+ depth,
+ kind: entry.kind,
+ is_ignored: entry.is_ignored,
+ is_expanded,
+ is_selected,
+ is_marked,
+ is_editing: false,
+ is_processing: false,
+ is_cut,
+ sticky,
+ filename_text_color,
+ diagnostic_severity,
+ git_status,
+ is_private: entry.is_private,
+ worktree_id,
+ canonical_path: entry.canonical_path.clone(),
+ }
+ }
+
fn render_vertical_scrollbar(&self, cx: &mut Context<Self>) -> Option<Stateful<Div>> {
if !Self::should_show_scrollbar(cx)
|| !(self.show_scrollbar || self.vertical_scrollbar_state.is_dragging())
@@ -4723,6 +4808,156 @@ impl ProjectPanel {
}
None
}
+
+ fn candidate_entries_in_range_for_sticky(
+ &self,
+ range: Range<usize>,
+ _window: &mut Window,
+ _cx: &mut Context<Self>,
+ ) -> Vec<StickyProjectPanelCandidate> {
+ let mut result = Vec::new();
+ let mut current_offset = 0;
+
+ for (_, visible_worktree_entries, entries_paths) in &self.visible_entries {
+ let worktree_len = visible_worktree_entries.len();
+ let worktree_end_offset = current_offset + worktree_len;
+
+ if current_offset >= range.end {
+ break;
+ }
+
+ if worktree_end_offset > range.start {
+ let local_start = range.start.saturating_sub(current_offset);
+ let local_end = range.end.saturating_sub(current_offset).min(worktree_len);
+
+ let paths = entries_paths.get_or_init(|| {
+ visible_worktree_entries
+ .iter()
+ .map(|e| e.path.clone())
+ .collect()
+ });
+
+ let entries_from_this_worktree = visible_worktree_entries[local_start..local_end]
+ .iter()
+ .enumerate()
+ .map(|(i, entry)| {
+ let (depth, _) = Self::calculate_depth_and_difference(&entry.entry, paths);
+ StickyProjectPanelCandidate {
+ index: current_offset + local_start + i,
+ depth,
+ }
+ });
+
+ result.extend(entries_from_this_worktree);
+ }
+
+ current_offset = worktree_end_offset;
+ }
+
+ result
+ }
+
+ fn render_sticky_entries(
+ &self,
+ child: StickyProjectPanelCandidate,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> SmallVec<[AnyElement; 8]> {
+ let project = self.project.read(cx);
+
+ let Some((worktree_id, entry_ref)) = self.entry_at_index(child.index) else {
+ return SmallVec::new();
+ };
+
+ let Some((_, visible_worktree_entries, entries_paths)) = self
+ .visible_entries
+ .iter()
+ .find(|(id, _, _)| *id == worktree_id)
+ else {
+ return SmallVec::new();
+ };
+
+ let Some(worktree) = project.worktree_for_id(worktree_id, cx) else {
+ return SmallVec::new();
+ };
+ let worktree = worktree.read(cx).snapshot();
+
+ let paths = entries_paths.get_or_init(|| {
+ visible_worktree_entries
+ .iter()
+ .map(|e| e.path.clone())
+ .collect()
+ });
+
+ let mut sticky_parents = Vec::new();
+ let mut current_path = entry_ref.path.clone();
+
+ 'outer: loop {
+ if let Some(parent_path) = current_path.parent() {
+ for ancestor_path in parent_path.ancestors() {
+ if paths.contains(ancestor_path) {
+ if let Some(parent_entry) = worktree.entry_for_path(ancestor_path) {
+ sticky_parents.push(parent_entry.clone());
+ current_path = parent_entry.path.clone();
+ continue 'outer;
+ }
+ }
+ }
+ }
+ break 'outer;
+ }
+
+ sticky_parents.reverse();
+
+ let git_status_enabled = ProjectPanelSettings::get_global(cx).git_status;
+ let root_name = OsStr::new(worktree.root_name());
+
+ let git_summaries_by_id = if git_status_enabled {
+ visible_worktree_entries
+ .iter()
+ .map(|e| (e.id, e.git_summary))
+ .collect::<HashMap<_, _>>()
+ } else {
+ Default::default()
+ };
+
+ sticky_parents
+ .iter()
+ .enumerate()
+ .map(|(index, entry)| {
+ let git_status = git_summaries_by_id
+ .get(&entry.id)
+ .copied()
+ .unwrap_or_default();
+ let sticky_details = Some(StickyDetails {
+ sticky_index: index,
+ });
+ let details = self.details_for_entry(
+ entry,
+ worktree_id,
+ root_name,
+ paths,
+ git_status,
+ sticky_details,
+ window,
+ cx,
+ );
+ self.render_entry(entry.id, details, window, cx).into_any()
+ })
+ .collect()
+ }
+}
+
+#[derive(Clone)]
+struct StickyProjectPanelCandidate {
+ index: usize,
+ depth: usize,
+}
+
+impl StickyCandidate for StickyProjectPanelCandidate {
+ fn depth(&self) -> usize {
+ self.depth
+ }
}
fn item_width_estimate(depth: usize, item_text_chars: usize, is_symlink: bool) -> usize {
@@ -4741,6 +4976,7 @@ impl Render for ProjectPanel {
let indent_size = ProjectPanelSettings::get_global(cx).indent_size;
let show_indent_guides =
ProjectPanelSettings::get_global(cx).indent_guides.show == ShowIndentGuides::Always;
+ let show_sticky_scroll = ProjectPanelSettings::get_global(cx).sticky_scroll;
let is_local = project.is_local();
if has_worktree {
@@ -40,6 +40,7 @@ pub struct ProjectPanelSettings {
pub git_status: bool,
pub indent_size: f32,
pub indent_guides: IndentGuidesSettings,
+ pub sticky_scroll: bool,
pub auto_reveal_entries: bool,
pub auto_fold_dirs: bool,
pub scrollbar: ScrollbarSettings,
@@ -150,6 +151,10 @@ pub struct ProjectPanelSettingsContent {
///
/// Default: false
pub hide_root: Option<bool>,
+ /// Whether to stick parent directories at top of the project panel.
+ ///
+ /// Default: true
+ pub sticky_scroll: Option<bool>,
}
impl Settings for ProjectPanelSettings {
@@ -535,7 +535,7 @@ message DebugScenario {
message SpawnInTerminal {
string label = 1;
- string command = 2;
+ optional string command = 2;
repeated string args = 3;
map<string, string> env = 4;
optional string cwd = 5;
@@ -222,11 +222,13 @@ message Completion {
optional Anchor buffer_word_end = 10;
Anchor old_insert_start = 11;
Anchor old_insert_end = 12;
+ optional string sort_text = 13;
enum Source {
Lsp = 0;
Custom = 1;
BufferWord = 2;
+ Dap = 3;
}
}
@@ -757,6 +759,11 @@ message MultiLspQuery {
GetCodeLens get_code_lens = 8;
GetDocumentDiagnostics get_document_diagnostics = 9;
GetDocumentColor get_document_color = 10;
+ GetDefinition get_definition = 11;
+ GetDeclaration get_declaration = 12;
+ GetTypeDefinition get_type_definition = 13;
+ GetImplementation get_implementation = 14;
+ GetReferences get_references = 15;
}
}
@@ -795,6 +802,11 @@ message LspResponse {
GetCodeLensResponse get_code_lens_response = 4;
GetDocumentDiagnosticsResponse get_document_diagnostics_response = 5;
GetDocumentColorResponse get_document_color_response = 6;
+ GetDefinitionResponse get_definition_response = 8;
+ GetDeclarationResponse get_declaration_response = 9;
+ GetTypeDefinitionResponse get_type_definition_response = 10;
+ GetImplementationResponse get_implementation_response = 11;
+ GetReferencesResponse get_references_response = 12;
}
uint64 server_id = 7;
}
@@ -164,7 +164,7 @@ fn init_panic_hook() {
}),
app_version: format!("remote-server-{version}"),
app_commit_sha: option_env!("ZED_COMMIT_SHA").map(|sha| sha.into()),
- release_channel: release_channel.display_name().into(),
+ release_channel: release_channel.dev_name().into(),
target: env!("TARGET").to_owned().into(),
os_name: telemetry::os_name(),
os_version: Some(telemetry::os_version()),
@@ -656,7 +656,7 @@ impl Render for CodeCell {
// .bg(cx.theme().colors().editor_background)
// .border(px(1.))
// .border_color(cx.theme().colors().border)
- // .shadow_sm()
+ // .shadow_xs()
.children(content)
},
))),
@@ -28,12 +28,19 @@ use nbformat::v4::Metadata as NotebookMetadata;
actions!(
notebook,
[
+ /// Opens a Jupyter notebook file.
OpenNotebook,
+ /// Runs all cells in the notebook.
RunAll,
+ /// Clears all cell outputs.
ClearOutputs,
+ /// Moves the current cell up.
MoveCellUp,
+ /// Moves the current cell down.
MoveCellDown,
+ /// Adds a new markdown cell.
AddMarkdownBlock,
+ /// Adds a new code cell.
AddCodeBlock,
]
);
@@ -16,13 +16,21 @@ use crate::repl_store::ReplStore;
actions!(
repl,
[
+ /// Runs the current cell and advances to the next one.
Run,
+ /// Runs the current cell without advancing.
RunInPlace,
+ /// Clears all outputs in the REPL.
ClearOutputs,
+ /// Opens the REPL sessions panel.
Sessions,
+ /// Interrupts the currently running kernel.
Interrupt,
+ /// Shuts down the current kernel.
Shutdown,
+ /// Restarts the current kernel.
Restart,
+ /// Refreshes the list of available kernelspecs.
RefreshKernelspecs
]
);
@@ -61,7 +61,8 @@ impl ReqwestClient {
})
.ok()
}) {
- client = client.proxy(proxy);
+ // Respect NO_PROXY env var
+ client = client.proxy(proxy.no_proxy(reqwest::NoProxy::from_env()));
client_has_proxy = true;
} else {
client_has_proxy = false;
@@ -37,7 +37,16 @@ pub fn init(cx: &mut App) {
actions!(
rules_library,
- [NewRule, DeleteRule, DuplicateRule, ToggleDefaultRule]
+ [
+ /// Creates a new rule in the rules library.
+ NewRule,
+ /// Deletes the selected rule.
+ DeleteRule,
+ /// Duplicates the selected rule.
+ DuplicateRule,
+ /// Toggles whether the selected rule is a default rule.
+ ToggleDefaultRule
+ ]
);
const BUILT_IN_TOOLTIP_TEXT: &'static str = concat!(
@@ -46,6 +46,7 @@ use registrar::{ForDeployed, ForDismissed, SearchActionsRegistrar, WithResults};
const MAX_BUFFER_SEARCH_HISTORY_SIZE: usize = 50;
+/// Opens the buffer search interface with the specified configuration.
#[derive(PartialEq, Clone, Deserialize, JsonSchema, Action)]
#[action(namespace = buffer_search)]
#[serde(deny_unknown_fields)]
@@ -58,7 +59,17 @@ pub struct Deploy {
pub selection_search_enabled: bool,
}
-actions!(buffer_search, [DeployReplace, Dismiss, FocusEditor]);
+actions!(
+ buffer_search,
+ [
+ /// Deploys the search and replace interface.
+ DeployReplace,
+ /// Dismisses the search bar.
+ Dismiss,
+ /// Focuses back on the editor.
+ FocusEditor
+ ]
+);
impl Deploy {
pub fn find() -> Self {
@@ -101,7 +112,7 @@ pub struct BufferSearchBar {
search_options: SearchOptions,
default_options: SearchOptions,
configured_options: SearchOptions,
- query_contains_error: bool,
+ query_error: Option<String>,
dismissed: bool,
search_history: SearchHistory,
search_history_cursor: SearchHistoryCursor,
@@ -217,7 +228,7 @@ impl Render for BufferSearchBar {
if in_replace {
key_context.add("in_replace");
}
- let editor_border = if self.query_contains_error {
+ let editor_border = if self.query_error.is_some() {
Color::Error.color(cx)
} else {
cx.theme().colors().border
@@ -469,6 +480,14 @@ impl Render for BufferSearchBar {
)
});
+ let query_error_line = self.query_error.as_ref().map(|error| {
+ Label::new(error)
+ .size(LabelSize::Small)
+ .color(Color::Error)
+ .mt_neg_1()
+ .ml_2()
+ });
+
v_flex()
.id("buffer_search")
.gap_2()
@@ -524,6 +543,7 @@ impl Render for BufferSearchBar {
.w_full()
},
))
+ .children(query_error_line)
.children(replace_line)
}
}
@@ -728,7 +748,7 @@ impl BufferSearchBar {
configured_options: search_options,
search_options,
pending_search: None,
- query_contains_error: false,
+ query_error: None,
dismissed: true,
search_history: SearchHistory::new(
Some(MAX_BUFFER_SEARCH_HISTORY_SIZE),
@@ -1230,7 +1250,7 @@ impl BufferSearchBar {
self.pending_search.take();
if let Some(active_searchable_item) = self.active_searchable_item.as_ref() {
- self.query_contains_error = false;
+ self.query_error = None;
if query.is_empty() {
self.clear_active_searchable_item_matches(window, cx);
let _ = done_tx.send(());
@@ -1255,8 +1275,8 @@ impl BufferSearchBar {
None,
) {
Ok(query) => query.with_replacement(self.replacement(cx)),
- Err(_) => {
- self.query_contains_error = true;
+ Err(e) => {
+ self.query_error = Some(e.to_string());
self.clear_active_searchable_item_matches(window, cx);
cx.notify();
return done_rx;
@@ -1274,8 +1294,8 @@ impl BufferSearchBar {
None,
) {
Ok(query) => query.with_replacement(self.replacement(cx)),
- Err(_) => {
- self.query_contains_error = true;
+ Err(e) => {
+ self.query_error = Some(e.to_string());
self.clear_active_searchable_item_matches(window, cx);
cx.notify();
return done_rx;
@@ -47,7 +47,16 @@ use workspace::{
actions!(
project_search,
- [SearchInNew, ToggleFocus, NextField, ToggleFilters]
+ [
+ /// Searches in a new project search tab.
+ SearchInNew,
+ /// Toggles focus between the search bar and the search results.
+ ToggleFocus,
+ /// Moves to the next input field.
+ NextField,
+ /// Toggles the search filters panel.
+ ToggleFilters
+ ]
);
#[derive(Default)]
@@ -208,6 +217,7 @@ pub struct ProjectSearchView {
included_opened_only: bool,
regex_language: Option<Arc<Language>>,
_subscriptions: Vec<Subscription>,
+ query_error: Option<String>,
}
#[derive(Debug, Clone)]
@@ -876,6 +886,7 @@ impl ProjectSearchView {
included_opened_only: false,
regex_language: None,
_subscriptions: subscriptions,
+ query_error: None,
};
this.entity_changed(window, cx);
this
@@ -1209,14 +1220,16 @@ impl ProjectSearchView {
if should_unmark_error {
cx.notify();
}
+ self.query_error = None;
Some(query)
}
- Err(_e) => {
+ Err(e) => {
let should_mark_error = self.panels_with_errors.insert(InputPanel::Query);
if should_mark_error {
cx.notify();
}
+ self.query_error = Some(e.to_string());
None
}
@@ -2291,6 +2304,14 @@ impl Render for ProjectSearchBar {
key_context.add("in_replace");
}
+ let query_error_line = search.query_error.as_ref().map(|error| {
+ Label::new(error)
+ .size(LabelSize::Small)
+ .color(Color::Error)
+ .mt_neg_1()
+ .ml_2()
+ });
+
v_flex()
.py(px(1.0))
.key_context(key_context)
@@ -2342,6 +2363,7 @@ impl Render for ProjectSearchBar {
.gap_2()
.w_full()
.child(search_line)
+ .children(query_error_line)
.children(replace_line)
.children(filter_line)
}
@@ -23,19 +23,35 @@ pub fn init(cx: &mut App) {
actions!(
search,
[
+ /// Focuses on the search input field.
FocusSearch,
+ /// Toggles whole word matching.
ToggleWholeWord,
+ /// Toggles case-sensitive search.
ToggleCaseSensitive,
+ /// Toggles searching in ignored files.
ToggleIncludeIgnored,
+ /// Toggles regular expression mode.
ToggleRegex,
+ /// Toggles the replace interface.
ToggleReplace,
+ /// Toggles searching within selection only.
ToggleSelection,
+ /// Selects the next search match.
SelectNextMatch,
+ /// Selects the previous search match.
SelectPreviousMatch,
+ /// Selects all search matches.
SelectAllMatches,
+ /// Cycles through search modes.
+ CycleMode,
+ /// Navigates to the next query in search history.
NextHistoryQuery,
+ /// Navigates to the previous query in search history.
PreviousHistoryQuery,
+ /// Replaces all matches.
ReplaceAll,
+ /// Replaces the next match.
ReplaceNext,
]
);
@@ -3,15 +3,12 @@ use collections::{BTreeMap, HashMap, IndexMap};
use fs::Fs;
use gpui::{
Action, ActionBuildError, App, InvalidKeystrokeError, KEYSTROKE_PARSE_EXPECTED_MESSAGE,
- KeyBinding, KeyBindingContextPredicate, KeyBindingMetaIndex, NoAction,
-};
-use schemars::{
- JsonSchema,
- r#gen::{SchemaGenerator, SchemaSettings},
- schema::{ArrayValidation, InstanceType, Schema, SchemaObject, SubschemaValidation},
+ KeyBinding, KeyBindingContextPredicate, KeyBindingMetaIndex, Keystroke, NoAction, SharedString,
};
+use schemars::{JsonSchema, json_schema};
use serde::Deserialize;
-use serde_json::Value;
+use serde_json::{Value, json};
+use std::borrow::Cow;
use std::{any::TypeId, fmt::Write, rc::Rc, sync::Arc, sync::LazyLock};
use util::{
asset_str,
@@ -123,14 +120,14 @@ impl std::fmt::Display for KeymapAction {
impl JsonSchema for KeymapAction {
/// This is used when generating the JSON schema for the `KeymapAction` type, so that it can
/// reference the keymap action schema.
- fn schema_name() -> String {
+ fn schema_name() -> Cow<'static, str> {
"KeymapAction".into()
}
/// This schema will be replaced with the full action schema in
/// `KeymapFile::generate_json_schema`.
- fn json_schema(_: &mut SchemaGenerator) -> Schema {
- Schema::Bool(true)
+ fn json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema {
+ json_schema!(true)
}
}
@@ -402,7 +399,13 @@ impl KeymapFile {
},
};
- let key_binding = match KeyBinding::load(keystrokes, action, context, key_equivalents) {
+ let key_binding = match KeyBinding::load(
+ keystrokes,
+ action,
+ context,
+ key_equivalents,
+ action_input_string.map(SharedString::from),
+ ) {
Ok(key_binding) => key_binding,
Err(InvalidKeystrokeError { keystroke }) => {
return Err(format!(
@@ -424,9 +427,11 @@ impl KeymapFile {
}
pub fn generate_json_schema_for_registered_actions(cx: &mut App) -> Value {
- let mut generator = SchemaSettings::draft07()
- .with(|settings| settings.option_add_null_type = false)
- .into_generator();
+ // instead of using DefaultDenyUnknownFields, actions typically use
+ // `#[serde(deny_unknown_fields)]` so that these cases are reported as parse failures. This
+ // is because the rest of the keymap will still load in these cases, whereas other settings
+ // files would not.
+ let mut generator = schemars::generate::SchemaSettings::draft2019_09().into_generator();
let action_schemas = cx.action_schemas(&mut generator);
let deprecations = cx.deprecated_actions_to_preferred_actions();
@@ -440,92 +445,70 @@ impl KeymapFile {
}
fn generate_json_schema(
- generator: SchemaGenerator,
- action_schemas: Vec<(&'static str, Option<Schema>)>,
+ mut generator: schemars::SchemaGenerator,
+ action_schemas: Vec<(&'static str, Option<schemars::Schema>)>,
deprecations: &HashMap<&'static str, &'static str>,
deprecation_messages: &HashMap<&'static str, &'static str>,
) -> serde_json::Value {
- fn set<I, O>(input: I) -> Option<O>
- where
- I: Into<O>,
- {
- Some(input.into())
- }
-
- fn add_deprecation(schema_object: &mut SchemaObject, message: String) {
- schema_object.extensions.insert(
- // deprecationMessage is not part of the JSON Schema spec,
- // but json-language-server recognizes it.
- "deprecationMessage".to_owned(),
+ fn add_deprecation(schema: &mut schemars::Schema, message: String) {
+ schema.insert(
+ // deprecationMessage is not part of the JSON Schema spec, but
+ // json-language-server recognizes it.
+ "deprecationMessage".to_string(),
Value::String(message),
);
}
- fn add_deprecation_preferred_name(schema_object: &mut SchemaObject, new_name: &str) {
- add_deprecation(schema_object, format!("Deprecated, use {new_name}"));
+ fn add_deprecation_preferred_name(schema: &mut schemars::Schema, new_name: &str) {
+ add_deprecation(schema, format!("Deprecated, use {new_name}"));
}
- fn add_description(schema_object: &mut SchemaObject, description: String) {
- schema_object
- .metadata
- .get_or_insert(Default::default())
- .description = Some(description);
+ fn add_description(schema: &mut schemars::Schema, description: String) {
+ schema.insert("description".to_string(), Value::String(description));
}
- let empty_object: SchemaObject = SchemaObject {
- instance_type: set(InstanceType::Object),
- ..Default::default()
- };
+ let empty_object = json_schema!({
+ "type": "object"
+ });
// This is a workaround for a json-language-server issue where it matches the first
// alternative that matches the value's shape and uses that for documentation.
//
// In the case of the array validations, it would even provide an error saying that the name
// must match the name of the first alternative.
- let mut plain_action = SchemaObject {
- instance_type: set(InstanceType::String),
- const_value: Some(Value::String("".to_owned())),
- ..Default::default()
- };
+ let mut plain_action = json_schema!({
+ "type": "string",
+ "const": ""
+ });
let no_action_message = "No action named this.";
add_description(&mut plain_action, no_action_message.to_owned());
add_deprecation(&mut plain_action, no_action_message.to_owned());
- let mut matches_action_name = SchemaObject {
- const_value: Some(Value::String("".to_owned())),
- ..Default::default()
- };
- let no_action_message = "No action named this that takes input.";
- add_description(&mut matches_action_name, no_action_message.to_owned());
- add_deprecation(&mut matches_action_name, no_action_message.to_owned());
- let action_with_input = SchemaObject {
- instance_type: set(InstanceType::Array),
- array: set(ArrayValidation {
- items: set(vec![
- matches_action_name.into(),
- // Accept any value, as we want this to be the preferred match when there is a
- // typo in the name.
- Schema::Bool(true),
- ]),
- min_items: Some(2),
- max_items: Some(2),
- ..Default::default()
- }),
- ..Default::default()
- };
- let mut keymap_action_alternatives = vec![plain_action.into(), action_with_input.into()];
- for (name, action_schema) in action_schemas.into_iter() {
- let schema = if let Some(Schema::Object(schema)) = action_schema {
- Some(schema)
- } else {
- None
- };
+ let mut matches_action_name = json_schema!({
+ "const": ""
+ });
+ let no_action_message_input = "No action named this that takes input.";
+ add_description(&mut matches_action_name, no_action_message_input.to_owned());
+ add_deprecation(&mut matches_action_name, no_action_message_input.to_owned());
+
+ let action_with_input = json_schema!({
+ "type": "array",
+ "items": [
+ matches_action_name,
+ true
+ ],
+ "minItems": 2,
+ "maxItems": 2
+ });
+ let mut keymap_action_alternatives = vec![plain_action, action_with_input];
- let description = schema.as_ref().and_then(|schema| {
+ for (name, action_schema) in action_schemas.into_iter() {
+ let description = action_schema.as_ref().and_then(|schema| {
schema
- .metadata
- .as_ref()
- .and_then(|metadata| metadata.description.clone())
+ .as_object()
+ .and_then(|obj| obj.get("description"))
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_string())
});
let deprecation = if name == NoAction.name() {
@@ -535,84 +518,64 @@ impl KeymapFile {
};
// Add an alternative for plain action names.
- let mut plain_action = SchemaObject {
- instance_type: set(InstanceType::String),
- const_value: Some(Value::String(name.to_string())),
- ..Default::default()
- };
+ let mut plain_action = json_schema!({
+ "type": "string",
+ "const": name
+ });
if let Some(message) = deprecation_messages.get(name) {
add_deprecation(&mut plain_action, message.to_string());
} else if let Some(new_name) = deprecation {
add_deprecation_preferred_name(&mut plain_action, new_name);
}
- if let Some(description) = description.clone() {
- add_description(&mut plain_action, description);
+ if let Some(desc) = description.clone() {
+ add_description(&mut plain_action, desc);
}
- keymap_action_alternatives.push(plain_action.into());
+ keymap_action_alternatives.push(plain_action);
// Add an alternative for actions with data specified as a [name, data] array.
//
- // When a struct with no deserializable fields is added with impl_actions! /
- // impl_actions_as! an empty object schema is produced. The action should be invoked
- // without data in this case.
- if let Some(schema) = schema {
+ // When a struct with no deserializable fields is added by deriving `Action`, an empty
+ // object schema is produced. The action should be invoked without data in this case.
+ if let Some(schema) = action_schema {
if schema != empty_object {
- let mut matches_action_name = SchemaObject {
- const_value: Some(Value::String(name.to_string())),
- ..Default::default()
- };
- if let Some(description) = description.clone() {
- add_description(&mut matches_action_name, description);
+ let mut matches_action_name = json_schema!({
+ "const": name
+ });
+ if let Some(desc) = description.clone() {
+ add_description(&mut matches_action_name, desc);
}
if let Some(message) = deprecation_messages.get(name) {
add_deprecation(&mut matches_action_name, message.to_string());
} else if let Some(new_name) = deprecation {
add_deprecation_preferred_name(&mut matches_action_name, new_name);
}
- let action_with_input = SchemaObject {
- instance_type: set(InstanceType::Array),
- array: set(ArrayValidation {
- items: set(vec![matches_action_name.into(), schema.into()]),
- min_items: Some(2),
- max_items: Some(2),
- ..Default::default()
- }),
- ..Default::default()
- };
- keymap_action_alternatives.push(action_with_input.into());
+ let action_with_input = json_schema!({
+ "type": "array",
+ "items": [matches_action_name, schema],
+ "minItems": 2,
+ "maxItems": 2
+ });
+ keymap_action_alternatives.push(action_with_input);
}
}
}
// Placing null first causes json-language-server to default assuming actions should be
// null, so place it last.
- keymap_action_alternatives.push(
- SchemaObject {
- instance_type: set(InstanceType::Null),
- ..Default::default()
- }
- .into(),
- );
+ keymap_action_alternatives.push(json_schema!({
+ "type": "null"
+ }));
- let action_schema = SchemaObject {
- subschemas: set(SubschemaValidation {
- one_of: Some(keymap_action_alternatives),
- ..Default::default()
+ // The `KeymapSection` schema will reference the `KeymapAction` schema by name, so setting
+ // the definition of `KeymapAction` results in the full action schema being used.
+ generator.definitions_mut().insert(
+ KeymapAction::schema_name().to_string(),
+ json!({
+ "oneOf": keymap_action_alternatives
}),
- ..Default::default()
- }
- .into();
+ );
- // The `KeymapSection` schema will reference the `KeymapAction` schema by name, so replacing
- // the definition of `KeymapAction` results in the full action schema being used.
- let mut root_schema = generator.into_root_schema_for::<KeymapFile>();
- root_schema
- .definitions
- .insert(KeymapAction::schema_name(), action_schema);
-
- // This and other json schemas can be viewed via `dev: open language server logs` ->
- // `json-language-server` -> `Server Info`.
- serde_json::to_value(root_schema).unwrap()
+ generator.root_schema_for::<KeymapFile>().to_value()
}
pub fn sections(&self) -> impl DoubleEndedIterator<Item = &KeymapSection> {
@@ -673,6 +636,13 @@ impl KeymapFile {
continue;
};
for (keystrokes, action) in bindings {
+ let Ok(keystrokes) = keystrokes
+ .split_whitespace()
+ .map(Keystroke::parse)
+ .collect::<Result<Vec<_>, _>>()
+ else {
+ continue;
+ };
if keystrokes != target.keystrokes {
continue;
}
@@ -687,9 +657,9 @@ impl KeymapFile {
if let Some(index) = found_index {
let (replace_range, replace_value) = replace_top_level_array_value_in_json_text(
&keymap_contents,
- &["bindings", target.keystrokes],
+ &["bindings", &target.keystrokes_unparsed()],
Some(&source_action_value),
- Some(source.keystrokes),
+ Some(&source.keystrokes_unparsed()),
index,
tab_size,
)
@@ -721,7 +691,7 @@ impl KeymapFile {
value.insert("bindings".to_string(), {
let mut bindings = serde_json::Map::new();
let action = keybinding.action_value()?;
- bindings.insert(keybinding.keystrokes.into(), action);
+ bindings.insert(keybinding.keystrokes_unparsed(), action);
bindings.into()
});
@@ -748,11 +718,11 @@ pub enum KeybindUpdateOperation<'a> {
}
pub struct KeybindUpdateTarget<'a> {
- context: Option<&'a str>,
- keystrokes: &'a str,
- action_name: &'a str,
- use_key_equivalents: bool,
- input: Option<&'a str>,
+ pub context: Option<&'a str>,
+ pub keystrokes: &'a [Keystroke],
+ pub action_name: &'a str,
+ pub use_key_equivalents: bool,
+ pub input: Option<&'a str>,
}
impl<'a> KeybindUpdateTarget<'a> {
@@ -768,6 +738,16 @@ impl<'a> KeybindUpdateTarget<'a> {
};
return Ok(value);
}
+
+ fn keystrokes_unparsed(&self) -> String {
+ let mut keystrokes = String::with_capacity(self.keystrokes.len() * 8);
+ for keystroke in self.keystrokes {
+ keystrokes.push_str(&keystroke.unparse());
+ keystrokes.push(' ');
+ }
+ keystrokes.pop();
+ keystrokes
+ }
}
#[derive(Clone, Copy, PartialEq, Eq)]
@@ -804,10 +784,10 @@ impl KeybindSource {
pub fn from_meta(index: KeyBindingMetaIndex) -> Self {
match index {
- _ if index == Self::USER => KeybindSource::User,
- _ if index == Self::USER => KeybindSource::Base,
- _ if index == Self::DEFAULT => KeybindSource::Default,
- _ if index == Self::VIM => KeybindSource::Vim,
+ Self::USER => KeybindSource::User,
+ Self::BASE => KeybindSource::Base,
+ Self::DEFAULT => KeybindSource::Default,
+ Self::VIM => KeybindSource::Vim,
_ => unreachable!(),
}
}
@@ -851,6 +831,8 @@ mod tests {
#[test]
fn keymap_update() {
+ use gpui::Keystroke;
+
zlog::init_test();
#[track_caller]
fn check_keymap_update(
@@ -863,10 +845,18 @@ mod tests {
pretty_assertions::assert_eq!(expected.to_string(), result);
}
+ #[track_caller]
+ fn parse_keystrokes(keystrokes: &str) -> Vec<Keystroke> {
+ return keystrokes
+ .split(' ')
+ .map(|s| Keystroke::parse(s).expect("Keystrokes valid"))
+ .collect();
+ }
+
check_keymap_update(
"[]",
KeybindUpdateOperation::Add(KeybindUpdateTarget {
- keystrokes: "ctrl-a",
+ keystrokes: &parse_keystrokes("ctrl-a"),
action_name: "zed::SomeAction",
context: None,
use_key_equivalents: false,
@@ -892,7 +882,7 @@ mod tests {
]"#
.unindent(),
KeybindUpdateOperation::Add(KeybindUpdateTarget {
- keystrokes: "ctrl-b",
+ keystrokes: &parse_keystrokes("ctrl-b"),
action_name: "zed::SomeOtherAction",
context: None,
use_key_equivalents: false,
@@ -923,7 +913,7 @@ mod tests {
]"#
.unindent(),
KeybindUpdateOperation::Add(KeybindUpdateTarget {
- keystrokes: "ctrl-b",
+ keystrokes: &parse_keystrokes("ctrl-b"),
action_name: "zed::SomeOtherAction",
context: None,
use_key_equivalents: false,
@@ -959,7 +949,7 @@ mod tests {
]"#
.unindent(),
KeybindUpdateOperation::Add(KeybindUpdateTarget {
- keystrokes: "ctrl-b",
+ keystrokes: &parse_keystrokes("ctrl-b"),
action_name: "zed::SomeOtherAction",
context: Some("Zed > Editor && some_condition = true"),
use_key_equivalents: true,
@@ -998,14 +988,14 @@ mod tests {
.unindent(),
KeybindUpdateOperation::Replace {
target: KeybindUpdateTarget {
- keystrokes: "ctrl-a",
+ keystrokes: &parse_keystrokes("ctrl-a"),
action_name: "zed::SomeAction",
context: None,
use_key_equivalents: false,
input: None,
},
source: KeybindUpdateTarget {
- keystrokes: "ctrl-b",
+ keystrokes: &parse_keystrokes("ctrl-b"),
action_name: "zed::SomeOtherAction",
context: None,
use_key_equivalents: false,
@@ -1044,14 +1034,14 @@ mod tests {
.unindent(),
KeybindUpdateOperation::Replace {
target: KeybindUpdateTarget {
- keystrokes: "ctrl-a",
+ keystrokes: &parse_keystrokes("ctrl-a"),
action_name: "zed::SomeAction",
context: None,
use_key_equivalents: false,
input: None,
},
source: KeybindUpdateTarget {
- keystrokes: "ctrl-b",
+ keystrokes: &parse_keystrokes("ctrl-b"),
action_name: "zed::SomeOtherAction",
context: None,
use_key_equivalents: false,
@@ -1085,14 +1075,14 @@ mod tests {
.unindent(),
KeybindUpdateOperation::Replace {
target: KeybindUpdateTarget {
- keystrokes: "ctrl-a",
+ keystrokes: &parse_keystrokes("ctrl-a"),
action_name: "zed::SomeNonexistentAction",
context: None,
use_key_equivalents: false,
input: None,
},
source: KeybindUpdateTarget {
- keystrokes: "ctrl-b",
+ keystrokes: &parse_keystrokes("ctrl-b"),
action_name: "zed::SomeOtherAction",
context: None,
use_key_equivalents: false,
@@ -1128,14 +1118,14 @@ mod tests {
.unindent(),
KeybindUpdateOperation::Replace {
target: KeybindUpdateTarget {
- keystrokes: "ctrl-a",
+ keystrokes: &parse_keystrokes("ctrl-a"),
action_name: "zed::SomeAction",
context: None,
use_key_equivalents: false,
input: None,
},
source: KeybindUpdateTarget {
- keystrokes: "ctrl-b",
+ keystrokes: &parse_keystrokes("ctrl-b"),
action_name: "zed::SomeOtherAction",
context: None,
use_key_equivalents: false,
@@ -14,8 +14,8 @@ use util::asset_str;
pub use editable_setting_control::*;
pub use key_equivalents::*;
pub use keymap_file::{
- KeyBindingValidator, KeyBindingValidatorRegistration, KeybindSource, KeymapFile,
- KeymapFileLoadResult,
+ KeyBindingValidator, KeyBindingValidatorRegistration, KeybindSource, KeybindUpdateOperation,
+ KeybindUpdateTarget, KeymapFile, KeymapFileLoadResult,
};
pub use settings_file::*;
pub use settings_json::*;
@@ -1,84 +1,24 @@
-use std::{ops::Range, sync::LazyLock};
-
use anyhow::Result;
-use schemars::schema::{
- ArrayValidation, InstanceType, RootSchema, Schema, SchemaObject, SingleOrVec,
-};
+use gpui::App;
use serde::{Serialize, de::DeserializeOwned};
use serde_json::Value;
+use std::{ops::Range, sync::LazyLock};
use tree_sitter::{Query, StreamingIterator as _};
use util::RangeExt;
+/// Parameters that are used when generating some JSON schemas at runtime.
pub struct SettingsJsonSchemaParams<'a> {
pub language_names: &'a [String],
pub font_names: &'a [String],
}
-impl SettingsJsonSchemaParams<'_> {
- pub fn font_family_schema(&self) -> Schema {
- let available_fonts: Vec<_> = self.font_names.iter().cloned().map(Value::String).collect();
-
- SchemaObject {
- instance_type: Some(InstanceType::String.into()),
- enum_values: Some(available_fonts),
- ..Default::default()
- }
- .into()
- }
-
- pub fn font_fallback_schema(&self) -> Schema {
- SchemaObject {
- instance_type: Some(SingleOrVec::Vec(vec![
- InstanceType::Array,
- InstanceType::Null,
- ])),
- array: Some(Box::new(ArrayValidation {
- items: Some(schemars::schema::SingleOrVec::Single(Box::new(
- self.font_family_schema(),
- ))),
- unique_items: Some(true),
- ..Default::default()
- })),
- ..Default::default()
- }
- .into()
- }
+/// Value registered which specifies JSON schemas that are generated at runtime.
+pub struct ParameterizedJsonSchema {
+ pub add_and_get_ref:
+ fn(&mut schemars::SchemaGenerator, &SettingsJsonSchemaParams, &App) -> schemars::Schema,
}
-type PropertyName<'a> = &'a str;
-type ReferencePath<'a> = &'a str;
-
-/// Modifies the provided [`RootSchema`] by adding references to all of the specified properties.
-///
-/// # Examples
-///
-/// ```
-/// # let root_schema = RootSchema::default();
-/// add_references_to_properties(&mut root_schema, &[
-/// ("property_a", "#/definitions/DefinitionA"),
-/// ("property_b", "#/definitions/DefinitionB"),
-/// ])
-/// ```
-pub fn add_references_to_properties(
- root_schema: &mut RootSchema,
- properties_with_references: &[(PropertyName, ReferencePath)],
-) {
- for (property, definition) in properties_with_references {
- let Some(schema) = root_schema.schema.object().properties.get_mut(*property) else {
- log::warn!("property '{property}' not found in JSON schema");
- continue;
- };
-
- match schema {
- Schema::Object(schema) => {
- schema.reference = Some(definition.to_string());
- }
- Schema::Bool(_) => {
- // Boolean schemas can't have references.
- }
- }
- }
-}
+inventory::collect!(ParameterizedJsonSchema);
pub fn update_value_in_json_text<'a>(
text: &mut String,
@@ -6,7 +6,7 @@ use futures::{FutureExt, StreamExt, channel::mpsc, future::LocalBoxFuture};
use gpui::{App, AsyncApp, BorrowAppContext, Global, Task, UpdateGlobal};
use paths::{EDITORCONFIG_NAME, local_settings_file_relative_path, task_file_name};
-use schemars::{JsonSchema, r#gen::SchemaGenerator, schema::RootSchema};
+use schemars::JsonSchema;
use serde::{Deserialize, Serialize, de::DeserializeOwned};
use serde_json::{Value, json};
use smallvec::SmallVec;
@@ -18,14 +18,16 @@ use std::{
str::{self, FromStr},
sync::Arc,
};
-
-use util::{ResultExt as _, merge_non_null_json_value_into};
+use util::{
+ ResultExt as _, merge_non_null_json_value_into,
+ schemars::{DefaultDenyUnknownFields, add_new_subschema},
+};
pub type EditorconfigProperties = ec4rs::Properties;
use crate::{
- SettingsJsonSchemaParams, VsCodeSettings, WorktreeId, parse_json_with_comments,
- update_value_in_json_text,
+ ParameterizedJsonSchema, SettingsJsonSchemaParams, VsCodeSettings, WorktreeId,
+ parse_json_with_comments, update_value_in_json_text,
};
/// A value that can be defined as a user setting.
@@ -57,14 +59,6 @@ pub trait Settings: 'static + Send + Sync {
where
Self: Sized;
- fn json_schema(
- generator: &mut SchemaGenerator,
- _: &SettingsJsonSchemaParams,
- _: &App,
- ) -> RootSchema {
- generator.root_schema_for::<Self::FileContent>()
- }
-
fn missing_default() -> anyhow::Error {
anyhow::anyhow!("missing default")
}
@@ -253,12 +247,7 @@ trait AnySettingValue: 'static + Send + Sync {
fn all_local_values(&self) -> Vec<(WorktreeId, Arc<Path>, &dyn Any)>;
fn set_global_value(&mut self, value: Box<dyn Any>);
fn set_local_value(&mut self, root_id: WorktreeId, path: Arc<Path>, value: Box<dyn Any>);
- fn json_schema(
- &self,
- generator: &mut SchemaGenerator,
- _: &SettingsJsonSchemaParams,
- cx: &App,
- ) -> RootSchema;
+ fn json_schema(&self, generator: &mut schemars::SchemaGenerator) -> schemars::Schema;
fn edits_for_update(
&self,
raw_settings: &serde_json::Value,
@@ -276,11 +265,11 @@ impl SettingsStore {
let (setting_file_updates_tx, mut setting_file_updates_rx) = mpsc::unbounded();
Self {
setting_values: Default::default(),
- raw_default_settings: serde_json::json!({}),
+ raw_default_settings: json!({}),
raw_global_settings: None,
- raw_user_settings: serde_json::json!({}),
+ raw_user_settings: json!({}),
raw_server_settings: None,
- raw_extension_settings: serde_json::json!({}),
+ raw_extension_settings: json!({}),
raw_local_settings: Default::default(),
raw_editorconfig_settings: BTreeMap::default(),
tab_size_callback: Default::default(),
@@ -631,7 +620,7 @@ impl SettingsStore {
));
}
- fn json_tab_size(&self) -> usize {
+ pub fn json_tab_size(&self) -> usize {
const DEFAULT_JSON_TAB_SIZE: usize = 2;
if let Some((setting_type_id, callback)) = &self.tab_size_callback {
@@ -877,128 +866,186 @@ impl SettingsStore {
}
pub fn json_schema(&self, schema_params: &SettingsJsonSchemaParams, cx: &App) -> Value {
- use schemars::{
- r#gen::SchemaSettings,
- schema::{Schema, SchemaObject},
- };
-
- let settings = SchemaSettings::draft07().with(|settings| {
- settings.option_add_null_type = true;
+ let mut generator = schemars::generate::SchemaSettings::draft2019_09()
+ .with_transform(DefaultDenyUnknownFields)
+ .into_generator();
+ let mut combined_schema = json!({
+ "type": "object",
+ "properties": {}
});
- let mut generator = SchemaGenerator::new(settings);
- let mut combined_schema = RootSchema::default();
+ // Merge together settings schemas, similarly to json schema's "allOf". This merging is
+ // recursive, though at time of writing this recursive nature isn't used very much. An
+ // example of it is the schema for `jupyter` having contribution from both `EditorSettings`
+ // and `JupyterSettings`.
+ //
+ // This logic could be removed in favor of "allOf", but then there isn't the opportunity to
+ // validate and fully control the merge.
for setting_value in self.setting_values.values() {
- let setting_schema = setting_value.json_schema(&mut generator, schema_params, cx);
- combined_schema
- .definitions
- .extend(setting_schema.definitions);
-
- let target_schema = if let Some(key) = setting_value.key() {
- let key_schema = combined_schema
- .schema
- .object()
- .properties
- .entry(key.to_string())
- .or_insert_with(|| Schema::Object(SchemaObject::default()));
- if let Schema::Object(key_schema) = key_schema {
- key_schema
- } else {
- continue;
+ let mut setting_schema = setting_value.json_schema(&mut generator);
+
+ if let Some(key) = setting_value.key() {
+ if let Some(properties) = combined_schema.get_mut("properties") {
+ if let Some(properties_obj) = properties.as_object_mut() {
+ if let Some(target) = properties_obj.get_mut(key) {
+ merge_schema(target, setting_schema.to_value());
+ } else {
+ properties_obj.insert(key.to_string(), setting_schema.to_value());
+ }
+ }
}
} else {
- &mut combined_schema.schema
- };
-
- merge_schema(target_schema, setting_schema.schema);
+ setting_schema.remove("description");
+ setting_schema.remove("additionalProperties");
+ merge_schema(&mut combined_schema, setting_schema.to_value());
+ }
}
- fn merge_schema(target: &mut SchemaObject, mut source: SchemaObject) {
- let source_subschemas = source.subschemas();
- let target_subschemas = target.subschemas();
- if let Some(all_of) = source_subschemas.all_of.take() {
- target_subschemas
- .all_of
- .get_or_insert(Vec::new())
- .extend(all_of);
- }
- if let Some(any_of) = source_subschemas.any_of.take() {
- target_subschemas
- .any_of
- .get_or_insert(Vec::new())
- .extend(any_of);
- }
- if let Some(one_of) = source_subschemas.one_of.take() {
- target_subschemas
- .one_of
- .get_or_insert(Vec::new())
- .extend(one_of);
- }
+ fn merge_schema(target: &mut serde_json::Value, source: serde_json::Value) {
+ let (Some(target_obj), serde_json::Value::Object(source_obj)) =
+ (target.as_object_mut(), source)
+ else {
+ return;
+ };
- if let Some(source) = source.object {
- let target_properties = &mut target.object().properties;
- for (key, value) in source.properties {
- match target_properties.entry(key) {
- btree_map::Entry::Vacant(e) => {
- e.insert(value);
+ for (source_key, source_value) in source_obj {
+ match source_key.as_str() {
+ "properties" => {
+ let serde_json::Value::Object(source_properties) = source_value else {
+ log::error!(
+ "bug: expected object for `{}` json schema field, but got: {}",
+ source_key,
+ source_value
+ );
+ continue;
+ };
+ let target_properties =
+ target_obj.entry(source_key.clone()).or_insert(json!({}));
+ let Some(target_properties) = target_properties.as_object_mut() else {
+ log::error!(
+ "bug: expected object for `{}` json schema field, but got: {}",
+ source_key,
+ target_properties
+ );
+ continue;
+ };
+ for (key, value) in source_properties {
+ if let Some(existing) = target_properties.get_mut(&key) {
+ merge_schema(existing, value);
+ } else {
+ target_properties.insert(key, value);
+ }
}
- btree_map::Entry::Occupied(e) => {
- if let (Schema::Object(target), Schema::Object(src)) =
- (e.into_mut(), value)
- {
- merge_schema(target, src);
+ }
+ "allOf" | "anyOf" | "oneOf" => {
+ let serde_json::Value::Array(source_array) = source_value else {
+ log::error!(
+ "bug: expected array for `{}` json schema field, but got: {}",
+ source_key,
+ source_value,
+ );
+ continue;
+ };
+ let target_array =
+ target_obj.entry(source_key.clone()).or_insert(json!([]));
+ let Some(target_array) = target_array.as_array_mut() else {
+ log::error!(
+ "bug: expected array for `{}` json schema field, but got: {}",
+ source_key,
+ target_array,
+ );
+ continue;
+ };
+ target_array.extend(source_array);
+ }
+ "type"
+ | "$ref"
+ | "enum"
+ | "minimum"
+ | "maximum"
+ | "pattern"
+ | "description"
+ | "additionalProperties" => {
+ if let Some(old_value) =
+ target_obj.insert(source_key.clone(), source_value.clone())
+ {
+ if old_value != source_value {
+ log::error!(
+ "bug: while merging JSON schemas, \
+ mismatch `\"{}\": {}` (before was `{}`)",
+ source_key,
+ old_value,
+ source_value
+ );
}
}
}
+ _ => {
+ log::error!(
+ "bug: while merging settings JSON schemas, \
+ encountered unexpected `\"{}\": {}`",
+ source_key,
+ source_value
+ );
+ }
}
}
+ }
- overwrite(&mut target.instance_type, source.instance_type);
- overwrite(&mut target.string, source.string);
- overwrite(&mut target.number, source.number);
- overwrite(&mut target.reference, source.reference);
- overwrite(&mut target.array, source.array);
- overwrite(&mut target.enum_values, source.enum_values);
-
- fn overwrite<T>(target: &mut Option<T>, source: Option<T>) {
- if let Some(source) = source {
- *target = Some(source);
- }
- }
+ // add schemas which are determined at runtime
+ for parameterized_json_schema in inventory::iter::<ParameterizedJsonSchema>() {
+ (parameterized_json_schema.add_and_get_ref)(&mut generator, schema_params, cx);
}
+ // add merged settings schema to the definitions
const ZED_SETTINGS: &str = "ZedSettings";
- let RootSchema {
- meta_schema,
- schema: zed_settings_schema,
- mut definitions,
- } = combined_schema;
- definitions.insert(ZED_SETTINGS.to_string(), zed_settings_schema.into());
- let zed_settings_ref = Schema::new_ref(format!("#/definitions/{ZED_SETTINGS}"));
-
- // settings file contents matches ZedSettings + overrides for each release stage
- let mut root_schema = json!({
+ let zed_settings_ref = add_new_subschema(&mut generator, ZED_SETTINGS, combined_schema);
+
+ // add `ZedReleaseStageSettings` which is the same as `ZedSettings` except that unknown
+ // fields are rejected.
+ let mut zed_release_stage_settings = zed_settings_ref.clone();
+ zed_release_stage_settings.insert("unevaluatedProperties".to_string(), false.into());
+ let zed_release_stage_settings_ref = add_new_subschema(
+ &mut generator,
+ "ZedReleaseStageSettings",
+ zed_release_stage_settings.to_value(),
+ );
+
+ // Remove `"additionalProperties": false` added by `DefaultDenyUnknownFields` so that
+ // unknown fields can be handled by the root schema and `ZedReleaseStageSettings`.
+ let mut definitions = generator.take_definitions(true);
+ definitions
+ .get_mut(ZED_SETTINGS)
+ .unwrap()
+ .as_object_mut()
+ .unwrap()
+ .remove("additionalProperties");
+
+ let meta_schema = generator
+ .settings()
+ .meta_schema
+ .as_ref()
+ .expect("meta_schema should be present in schemars settings")
+ .to_string();
+
+ json!({
+ "$schema": meta_schema,
+ "title": "Zed Settings",
+ "unevaluatedProperties": false,
+ // ZedSettings + settings overrides for each release stage
"allOf": [
zed_settings_ref,
{
"properties": {
- "dev": zed_settings_ref,
- "nightly": zed_settings_ref,
- "stable": zed_settings_ref,
- "preview": zed_settings_ref,
+ "dev": zed_release_stage_settings_ref,
+ "nightly": zed_release_stage_settings_ref,
+ "stable": zed_release_stage_settings_ref,
+ "preview": zed_release_stage_settings_ref,
}
}
],
- "definitions": definitions,
- });
-
- if let Some(meta_schema) = meta_schema {
- if let Some(root_schema_object) = root_schema.as_object_mut() {
- root_schema_object.insert("$schema".to_string(), meta_schema.into());
- }
- }
-
- root_schema
+ "$defs": definitions,
+ })
}
fn recompute_values(
@@ -1311,13 +1358,8 @@ impl<T: Settings> AnySettingValue for SettingValue<T> {
}
}
- fn json_schema(
- &self,
- generator: &mut SchemaGenerator,
- params: &SettingsJsonSchemaParams,
- cx: &App,
- ) -> RootSchema {
- T::json_schema(generator, params, cx)
+ fn json_schema(&self, generator: &mut schemars::SchemaGenerator) -> schemars::Schema {
+ T::FileContent::json_schema(generator)
}
fn edits_for_update(
@@ -1912,7 +1954,6 @@ mod tests {
}
#[derive(Default, Clone, Serialize, Deserialize, JsonSchema)]
- #[schemars(deny_unknown_fields)]
struct UserSettingsContent {
name: Option<String>,
age: Option<u32>,
@@ -1955,7 +1996,6 @@ mod tests {
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
- #[schemars(deny_unknown_fields)]
struct MultiKeySettingsJson {
key1: Option<String>,
key2: Option<String>,
@@ -1994,7 +2034,6 @@ mod tests {
}
#[derive(Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
- #[schemars(deny_unknown_fields)]
struct JournalSettingsJson {
pub path: Option<String>,
pub hour_format: Option<HourFormat>,
@@ -2089,7 +2128,6 @@ mod tests {
}
#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
- #[schemars(deny_unknown_fields)]
struct LanguageSettingEntry {
language_setting_1: Option<bool>,
language_setting_2: Option<bool>,
@@ -12,16 +12,29 @@ workspace = true
path = "src/settings_ui.rs"
[dependencies]
+anyhow.workspace = true
+collections.workspace = true
+command_palette.workspace = true
command_palette_hooks.workspace = true
+component.workspace = true
+db.workspace = true
editor.workspace = true
feature_flags.workspace = true
fs.workspace = true
+fuzzy.workspace = true
gpui.workspace = true
+language.workspace = true
log.workspace = true
+menu.workspace = true
+paths.workspace = true
+project.workspace = true
schemars.workspace = true
+search.workspace = true
serde.workspace = true
settings.workspace = true
theme.workspace = true
+tree-sitter-json.workspace = true
+tree-sitter-rust.workspace = true
ui.workspace = true
util.workspace = true
workspace-hack.workspace = true
@@ -2,7 +2,9 @@ use std::sync::Arc;
use gpui::{App, FontFeatures, FontWeight};
use settings::{EditableSettingControl, Settings};
-use theme::{FontFamilyCache, SystemAppearance, ThemeMode, ThemeRegistry, ThemeSettings};
+use theme::{
+ FontFamilyCache, FontFamilyName, SystemAppearance, ThemeMode, ThemeRegistry, ThemeSettings,
+};
use ui::{
CheckboxWithLabel, ContextMenu, DropdownMenu, NumericStepper, SettingsContainer, SettingsGroup,
ToggleButton, prelude::*,
@@ -189,7 +191,7 @@ impl EditableSettingControl for UiFontFamilyControl {
value: Self::Value,
_cx: &App,
) {
- settings.ui_font_family = Some(value.to_string());
+ settings.ui_font_family = Some(FontFamilyName(value.into()));
}
}
@@ -0,0 +1,1279 @@
+use std::{ops::Range, sync::Arc};
+
+use anyhow::{Context as _, anyhow};
+use collections::HashSet;
+use editor::{Editor, EditorEvent};
+use feature_flags::FeatureFlagViewExt;
+use fs::Fs;
+use fuzzy::{StringMatch, StringMatchCandidate};
+use gpui::{
+ AppContext as _, AsyncApp, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable,
+ Global, KeyContext, Keystroke, ModifiersChangedEvent, ScrollStrategy, StyledText, Subscription,
+ WeakEntity, actions, div, transparent_black,
+};
+use language::{Language, LanguageConfig};
+use settings::KeybindSource;
+
+use util::ResultExt;
+
+use ui::{
+ ActiveTheme as _, App, BorrowAppContext, ContextMenu, ParentElement as _, Render, SharedString,
+ Styled as _, Tooltip, Window, prelude::*, right_click_menu,
+};
+use workspace::{Item, ModalView, SerializableItem, Workspace, register_serializable_item};
+
+use crate::{
+ SettingsUiFeatureFlag,
+ keybindings::persistence::KEYBINDING_EDITORS,
+ ui_components::table::{Table, TableInteractionState},
+};
+
+actions!(
+ zed,
+ [
+ /// Opens the keymap editor.
+ OpenKeymapEditor
+ ]
+);
+
+const KEYMAP_EDITOR_NAMESPACE: &'static str = "keymap_editor";
+actions!(
+ keymap_editor,
+ [
+ /// Edits the selected key binding.
+ EditBinding,
+ /// Copies the action name to clipboard.
+ CopyAction,
+ /// Copies the context predicate to clipboard.
+ CopyContext
+ ]
+);
+
+pub fn init(cx: &mut App) {
+ let keymap_event_channel = KeymapEventChannel::new();
+ cx.set_global(keymap_event_channel);
+
+ cx.on_action(|_: &OpenKeymapEditor, cx| {
+ workspace::with_active_or_new_workspace(cx, move |workspace, window, cx| {
+ let existing = workspace
+ .active_pane()
+ .read(cx)
+ .items()
+ .find_map(|item| item.downcast::<KeymapEditor>());
+
+ if let Some(existing) = existing {
+ workspace.activate_item(&existing, true, true, window, cx);
+ } else {
+ let keymap_editor =
+ cx.new(|cx| KeymapEditor::new(workspace.weak_handle(), window, cx));
+ workspace.add_item_to_active_pane(Box::new(keymap_editor), None, true, window, cx);
+ }
+ });
+ });
+
+ cx.observe_new(|_workspace: &mut Workspace, window, cx| {
+ let Some(window) = window else { return };
+
+ let keymap_ui_actions = [std::any::TypeId::of::<OpenKeymapEditor>()];
+
+ command_palette_hooks::CommandPaletteFilter::update_global(cx, |filter, _cx| {
+ filter.hide_action_types(&keymap_ui_actions);
+ filter.hide_namespace(KEYMAP_EDITOR_NAMESPACE);
+ });
+
+ cx.observe_flag::<SettingsUiFeatureFlag, _>(
+ window,
+ move |is_enabled, _workspace, _, cx| {
+ if is_enabled {
+ command_palette_hooks::CommandPaletteFilter::update_global(
+ cx,
+ |filter, _cx| {
+ filter.show_action_types(keymap_ui_actions.iter());
+ filter.show_namespace(KEYMAP_EDITOR_NAMESPACE);
+ },
+ );
+ } else {
+ command_palette_hooks::CommandPaletteFilter::update_global(
+ cx,
+ |filter, _cx| {
+ filter.hide_action_types(&keymap_ui_actions);
+ filter.hide_namespace(KEYMAP_EDITOR_NAMESPACE);
+ },
+ );
+ }
+ },
+ )
+ .detach();
+ })
+ .detach();
+
+ register_serializable_item::<KeymapEditor>(cx);
+}
+
+pub struct KeymapEventChannel {}
+
+impl Global for KeymapEventChannel {}
+
+impl KeymapEventChannel {
+ fn new() -> Self {
+ Self {}
+ }
+
+ pub fn trigger_keymap_changed(cx: &mut App) {
+ let Some(_event_channel) = cx.try_global::<Self>() else {
+ // don't panic if no global defined. This usually happens in tests
+ return;
+ };
+ cx.update_global(|_event_channel: &mut Self, _| {
+ /* triggers observers in KeymapEditors */
+ });
+ }
+}
+
+struct KeymapEditor {
+ workspace: WeakEntity<Workspace>,
+ focus_handle: FocusHandle,
+ _keymap_subscription: Subscription,
+ keybindings: Vec<ProcessedKeybinding>,
+ // corresponds 1 to 1 with keybindings
+ string_match_candidates: Arc<Vec<StringMatchCandidate>>,
+ matches: Vec<StringMatch>,
+ table_interaction_state: Entity<TableInteractionState>,
+ filter_editor: Entity<Editor>,
+ selected_index: Option<usize>,
+}
+
+impl EventEmitter<()> for KeymapEditor {}
+
+impl Focusable for KeymapEditor {
+ fn focus_handle(&self, cx: &App) -> gpui::FocusHandle {
+ return self.filter_editor.focus_handle(cx);
+ }
+}
+
+impl KeymapEditor {
+ fn new(workspace: WeakEntity<Workspace>, window: &mut Window, cx: &mut Context<Self>) -> Self {
+ let focus_handle = cx.focus_handle();
+
+ let _keymap_subscription =
+ cx.observe_global::<KeymapEventChannel>(Self::update_keybindings);
+ let table_interaction_state = TableInteractionState::new(window, cx);
+
+ let filter_editor = cx.new(|cx| {
+ let mut editor = Editor::single_line(window, cx);
+ editor.set_placeholder_text("Filter action names…", cx);
+ editor
+ });
+
+ cx.subscribe(&filter_editor, |this, _, e: &EditorEvent, cx| {
+ if !matches!(e, EditorEvent::BufferEdited) {
+ return;
+ }
+
+ this.update_matches(cx);
+ })
+ .detach();
+
+ let mut this = Self {
+ workspace,
+ keybindings: vec![],
+ string_match_candidates: Arc::new(vec![]),
+ matches: vec![],
+ focus_handle: focus_handle.clone(),
+ _keymap_subscription,
+ table_interaction_state,
+ filter_editor,
+ selected_index: None,
+ };
+
+ this.update_keybindings(cx);
+
+ this
+ }
+
+ fn current_query(&self, cx: &mut Context<Self>) -> String {
+ self.filter_editor.read(cx).text(cx)
+ }
+
+ fn update_matches(&self, cx: &mut Context<Self>) {
+ let query = self.current_query(cx);
+
+ cx.spawn(async move |this, cx| Self::process_query(this, query, cx).await)
+ .detach();
+ }
+
+ async fn process_query(
+ this: WeakEntity<Self>,
+ query: String,
+ cx: &mut AsyncApp,
+ ) -> anyhow::Result<()> {
+ let query = command_palette::normalize_action_query(&query);
+ let (string_match_candidates, keybind_count) = this.read_with(cx, |this, _| {
+ (this.string_match_candidates.clone(), this.keybindings.len())
+ })?;
+ let executor = cx.background_executor().clone();
+ let mut matches = fuzzy::match_strings(
+ &string_match_candidates,
+ &query,
+ true,
+ true,
+ keybind_count,
+ &Default::default(),
+ executor,
+ )
+ .await;
+ this.update(cx, |this, cx| {
+ if query.is_empty() {
+ // apply default sort
+ // sorts by source precedence, and alphabetically by action name within each source
+ matches.sort_by_key(|match_item| {
+ let keybind = &this.keybindings[match_item.candidate_id];
+ let source = keybind.source.as_ref().map(|s| s.0);
+ use KeybindSource::*;
+ let source_precedence = match source {
+ Some(User) => 0,
+ Some(Vim) => 1,
+ Some(Base) => 2,
+ Some(Default) => 3,
+ None => 4,
+ };
+ return (source_precedence, keybind.action.as_ref());
+ });
+ }
+ this.selected_index.take();
+ this.scroll_to_item(0, ScrollStrategy::Top, cx);
+ this.matches = matches;
+ cx.notify();
+ })
+ }
+
+ fn process_bindings(
+ json_language: Arc<Language>,
+ rust_language: Arc<Language>,
+ cx: &mut App,
+ ) -> (Vec<ProcessedKeybinding>, Vec<StringMatchCandidate>) {
+ let key_bindings_ptr = cx.key_bindings();
+ let lock = key_bindings_ptr.borrow();
+ let key_bindings = lock.bindings();
+ let mut unmapped_action_names =
+ HashSet::from_iter(cx.all_action_names().into_iter().copied());
+ let action_documentation = cx.action_documentation();
+
+ let mut processed_bindings = Vec::new();
+ let mut string_match_candidates = Vec::new();
+
+ for key_binding in key_bindings {
+ let source = key_binding.meta().map(settings::KeybindSource::from_meta);
+
+ let keystroke_text = ui::text_for_keystrokes(key_binding.keystrokes(), cx);
+ let ui_key_binding = Some(
+ ui::KeyBinding::new_from_gpui(key_binding.clone(), cx)
+ .vim_mode(source == Some(settings::KeybindSource::Vim)),
+ );
+
+ let context = key_binding
+ .predicate()
+ .map(|predicate| {
+ KeybindContextString::Local(predicate.to_string().into(), rust_language.clone())
+ })
+ .unwrap_or(KeybindContextString::Global);
+
+ let source = source.map(|source| (source, source.name().into()));
+
+ let action_name = key_binding.action().name();
+ unmapped_action_names.remove(&action_name);
+ let action_input = key_binding
+ .action_input()
+ .map(|input| SyntaxHighlightedText::new(input, json_language.clone()));
+ let action_docs = action_documentation.get(action_name).copied();
+
+ let index = processed_bindings.len();
+ let string_match_candidate = StringMatchCandidate::new(index, &action_name);
+ processed_bindings.push(ProcessedKeybinding {
+ keystroke_text: keystroke_text.into(),
+ ui_key_binding,
+ action: action_name.into(),
+ action_input,
+ action_docs,
+ context: Some(context),
+ source,
+ });
+ string_match_candidates.push(string_match_candidate);
+ }
+
+ let empty = SharedString::new_static("");
+ for action_name in unmapped_action_names.into_iter() {
+ let index = processed_bindings.len();
+ let string_match_candidate = StringMatchCandidate::new(index, &action_name);
+ processed_bindings.push(ProcessedKeybinding {
+ keystroke_text: empty.clone(),
+ ui_key_binding: None,
+ action: action_name.into(),
+ action_input: None,
+ action_docs: action_documentation.get(action_name).copied(),
+ context: None,
+ source: None,
+ });
+ string_match_candidates.push(string_match_candidate);
+ }
+
+ (processed_bindings, string_match_candidates)
+ }
+
+ fn update_keybindings(&mut self, cx: &mut Context<KeymapEditor>) {
+ let workspace = self.workspace.clone();
+ cx.spawn(async move |this, cx| {
+ let json_language = Self::load_json_language(workspace.clone(), cx).await;
+ let rust_language = Self::load_rust_language(workspace.clone(), cx).await;
+
+ let query = this.update(cx, |this, cx| {
+ let (key_bindings, string_match_candidates) =
+ Self::process_bindings(json_language, rust_language, cx);
+ this.keybindings = key_bindings;
+ this.string_match_candidates = Arc::new(string_match_candidates);
+ this.matches = this
+ .string_match_candidates
+ .iter()
+ .enumerate()
+ .map(|(ix, candidate)| StringMatch {
+ candidate_id: ix,
+ score: 0.0,
+ positions: vec![],
+ string: candidate.string.clone(),
+ })
+ .collect();
+ this.current_query(cx)
+ })?;
+ // calls cx.notify
+ Self::process_query(this, query, cx).await
+ })
+ .detach_and_log_err(cx);
+ }
+
+ async fn load_json_language(
+ workspace: WeakEntity<Workspace>,
+ cx: &mut AsyncApp,
+ ) -> Arc<Language> {
+ let json_language_task = workspace
+ .read_with(cx, |workspace, cx| {
+ workspace
+ .project()
+ .read(cx)
+ .languages()
+ .language_for_name("JSON")
+ })
+ .context("Failed to load JSON language")
+ .log_err();
+ let json_language = match json_language_task {
+ Some(task) => task.await.context("Failed to load JSON language").log_err(),
+ None => None,
+ };
+ return json_language.unwrap_or_else(|| {
+ Arc::new(Language::new(
+ LanguageConfig {
+ name: "JSON".into(),
+ ..Default::default()
+ },
+ Some(tree_sitter_json::LANGUAGE.into()),
+ ))
+ });
+ }
+
+ async fn load_rust_language(
+ workspace: WeakEntity<Workspace>,
+ cx: &mut AsyncApp,
+ ) -> Arc<Language> {
+ let rust_language_task = workspace
+ .read_with(cx, |workspace, cx| {
+ workspace
+ .project()
+ .read(cx)
+ .languages()
+ .language_for_name("Rust")
+ })
+ .context("Failed to load Rust language")
+ .log_err();
+ let rust_language = match rust_language_task {
+ Some(task) => task.await.context("Failed to load Rust language").log_err(),
+ None => None,
+ };
+ return rust_language.unwrap_or_else(|| {
+ Arc::new(Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::LANGUAGE.into()),
+ ))
+ });
+ }
+
+ fn dispatch_context(&self, _window: &Window, _cx: &Context<Self>) -> KeyContext {
+ let mut dispatch_context = KeyContext::new_with_defaults();
+ dispatch_context.add("KeymapEditor");
+ dispatch_context.add("menu");
+
+ dispatch_context
+ }
+
+ fn scroll_to_item(&self, index: usize, strategy: ScrollStrategy, cx: &mut App) {
+ let index = usize::min(index, self.matches.len().saturating_sub(1));
+ self.table_interaction_state.update(cx, |this, _cx| {
+ this.scroll_handle.scroll_to_item(index, strategy);
+ });
+ }
+
+ fn focus_search(
+ &mut self,
+ _: &search::FocusSearch,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if !self
+ .filter_editor
+ .focus_handle(cx)
+ .contains_focused(window, cx)
+ {
+ window.focus(&self.filter_editor.focus_handle(cx));
+ } else {
+ self.filter_editor.update(cx, |editor, cx| {
+ editor.select_all(&Default::default(), window, cx);
+ });
+ }
+ self.selected_index.take();
+ }
+
+ fn selected_binding(&self) -> Option<&ProcessedKeybinding> {
+ self.selected_index
+ .and_then(|match_index| self.matches.get(match_index))
+ .map(|r#match| r#match.candidate_id)
+ .and_then(|keybind_index| self.keybindings.get(keybind_index))
+ }
+
+ fn select_next(&mut self, _: &menu::SelectNext, window: &mut Window, cx: &mut Context<Self>) {
+ if let Some(selected) = self.selected_index {
+ let selected = selected + 1;
+ if selected >= self.matches.len() {
+ self.select_last(&Default::default(), window, cx);
+ } else {
+ self.selected_index = Some(selected);
+ self.scroll_to_item(selected, ScrollStrategy::Center, cx);
+ cx.notify();
+ }
+ } else {
+ self.select_first(&Default::default(), window, cx);
+ }
+ }
+
+ fn select_previous(
+ &mut self,
+ _: &menu::SelectPrevious,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if let Some(selected) = self.selected_index {
+ if selected == 0 {
+ return;
+ }
+
+ let selected = selected - 1;
+
+ if selected >= self.matches.len() {
+ self.select_last(&Default::default(), window, cx);
+ } else {
+ self.selected_index = Some(selected);
+ self.scroll_to_item(selected, ScrollStrategy::Center, cx);
+ cx.notify();
+ }
+ } else {
+ self.select_last(&Default::default(), window, cx);
+ }
+ }
+
+ fn select_first(
+ &mut self,
+ _: &menu::SelectFirst,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if self.matches.get(0).is_some() {
+ self.selected_index = Some(0);
+ self.scroll_to_item(0, ScrollStrategy::Center, cx);
+ cx.notify();
+ }
+ }
+
+ fn select_last(&mut self, _: &menu::SelectLast, _window: &mut Window, cx: &mut Context<Self>) {
+ if self.matches.last().is_some() {
+ let index = self.matches.len() - 1;
+ self.selected_index = Some(index);
+ self.scroll_to_item(index, ScrollStrategy::Center, cx);
+ cx.notify();
+ }
+ }
+
+ fn confirm(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
+ self.edit_selected_keybinding(window, cx);
+ }
+
+ fn edit_selected_keybinding(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ let Some(keybind) = self.selected_binding() else {
+ return;
+ };
+ self.workspace
+ .update(cx, |workspace, cx| {
+ let fs = workspace.app_state().fs.clone();
+ workspace.toggle_modal(window, cx, |window, cx| {
+ let modal = KeybindingEditorModal::new(keybind.clone(), fs, window, cx);
+ window.focus(&modal.focus_handle(cx));
+ modal
+ });
+ })
+ .log_err();
+ }
+
+ fn edit_binding(&mut self, _: &EditBinding, window: &mut Window, cx: &mut Context<Self>) {
+ self.edit_selected_keybinding(window, cx);
+ }
+
+ fn copy_context_to_clipboard(
+ &mut self,
+ _: &CopyContext,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let context = self
+ .selected_binding()
+ .and_then(|binding| binding.context.as_ref())
+ .and_then(KeybindContextString::local_str)
+ .map(|context| context.to_string());
+ let Some(context) = context else {
+ return;
+ };
+ cx.write_to_clipboard(gpui::ClipboardItem::new_string(context.clone()));
+ }
+
+ fn copy_action_to_clipboard(
+ &mut self,
+ _: &CopyAction,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let action = self
+ .selected_binding()
+ .map(|binding| binding.action.to_string());
+ let Some(action) = action else {
+ return;
+ };
+ cx.write_to_clipboard(gpui::ClipboardItem::new_string(action.clone()));
+ }
+}
+
+#[derive(Clone)]
+struct ProcessedKeybinding {
+ keystroke_text: SharedString,
+ ui_key_binding: Option<ui::KeyBinding>,
+ action: SharedString,
+ action_input: Option<SyntaxHighlightedText>,
+ action_docs: Option<&'static str>,
+ context: Option<KeybindContextString>,
+ source: Option<(KeybindSource, SharedString)>,
+}
+
+#[derive(Clone, Debug, IntoElement)]
+enum KeybindContextString {
+ Global,
+ Local(SharedString, Arc<Language>),
+}
+
+impl KeybindContextString {
+ const GLOBAL: SharedString = SharedString::new_static("<global>");
+
+ pub fn local(&self) -> Option<&SharedString> {
+ match self {
+ KeybindContextString::Global => None,
+ KeybindContextString::Local(name, _) => Some(name),
+ }
+ }
+
+ pub fn local_str(&self) -> Option<&str> {
+ match self {
+ KeybindContextString::Global => None,
+ KeybindContextString::Local(name, _) => Some(name),
+ }
+ }
+}
+
+impl RenderOnce for KeybindContextString {
+ fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement {
+ match self {
+ KeybindContextString::Global => StyledText::new(KeybindContextString::GLOBAL.clone())
+ .with_highlights([(
+ 0..KeybindContextString::GLOBAL.len(),
+ gpui::HighlightStyle::color(_cx.theme().colors().text_muted),
+ )])
+ .into_any_element(),
+ KeybindContextString::Local(name, language) => {
+ SyntaxHighlightedText::new(name, language).into_any_element()
+ }
+ }
+ }
+}
+
+impl Item for KeymapEditor {
+ type Event = ();
+
+ fn tab_content_text(&self, _detail: usize, _cx: &App) -> ui::SharedString {
+ "Keymap Editor".into()
+ }
+}
+
+impl Render for KeymapEditor {
+ fn render(&mut self, window: &mut Window, cx: &mut ui::Context<Self>) -> impl ui::IntoElement {
+ let row_count = self.matches.len();
+ let theme = cx.theme();
+
+ v_flex()
+ .id("keymap-editor")
+ .track_focus(&self.focus_handle)
+ .key_context(self.dispatch_context(window, cx))
+ .on_action(cx.listener(Self::select_next))
+ .on_action(cx.listener(Self::select_previous))
+ .on_action(cx.listener(Self::select_first))
+ .on_action(cx.listener(Self::select_last))
+ .on_action(cx.listener(Self::focus_search))
+ .on_action(cx.listener(Self::confirm))
+ .on_action(cx.listener(Self::edit_binding))
+ .on_action(cx.listener(Self::copy_action_to_clipboard))
+ .on_action(cx.listener(Self::copy_context_to_clipboard))
+ .size_full()
+ .p_2()
+ .gap_1()
+ .bg(theme.colors().editor_background)
+ .child(
+ h_flex()
+ .key_context({
+ let mut context = KeyContext::new_with_defaults();
+ context.add("BufferSearchBar");
+ context
+ })
+ .h_8()
+ .pl_2()
+ .pr_1()
+ .py_1()
+ .border_1()
+ .border_color(theme.colors().border)
+ .rounded_lg()
+ .child(self.filter_editor.clone()),
+ )
+ .child(
+ Table::new()
+ .interactable(&self.table_interaction_state)
+ .striped()
+ .column_widths([rems(16.), rems(16.), rems(16.), rems(32.), rems(8.)])
+ .header(["Action", "Arguments", "Keystrokes", "Context", "Source"])
+ .uniform_list(
+ "keymap-editor-table",
+ row_count,
+ cx.processor(move |this, range: Range<usize>, _window, _cx| {
+ range
+ .filter_map(|index| {
+ let candidate_id = this.matches.get(index)?.candidate_id;
+ let binding = &this.keybindings[candidate_id];
+
+ let action = div()
+ .child(binding.action.clone())
+ .id(("keymap action", index))
+ .tooltip({
+ let action_name = binding.action.clone();
+ let action_docs = binding.action_docs;
+ move |_, cx| {
+ let action_tooltip = Tooltip::new(
+ command_palette::humanize_action_name(
+ &action_name,
+ ),
+ );
+ let action_tooltip = match action_docs {
+ Some(docs) => action_tooltip.meta(docs),
+ None => action_tooltip,
+ };
+ cx.new(|_| action_tooltip).into()
+ }
+ })
+ .into_any_element();
+ let keystrokes = binding.ui_key_binding.clone().map_or(
+ binding.keystroke_text.clone().into_any_element(),
+ IntoElement::into_any_element,
+ );
+ let action_input = binding
+ .action_input
+ .clone()
+ .map_or(gpui::Empty.into_any_element(), |input| {
+ input.into_any_element()
+ });
+ let context = binding
+ .context
+ .clone()
+ .map_or(gpui::Empty.into_any_element(), |context| {
+ context.into_any_element()
+ });
+ let source = binding
+ .source
+ .clone()
+ .map(|(_source, name)| name)
+ .unwrap_or_default()
+ .into_any_element();
+ Some([action, action_input, keystrokes, context, source])
+ })
+ .collect()
+ }),
+ )
+ .map_row(
+ cx.processor(|this, (row_index, row): (usize, Div), _window, cx| {
+ let is_selected = this.selected_index == Some(row_index);
+ let row = row
+ .id(("keymap-table-row", row_index))
+ .on_click(cx.listener(move |this, _event, _window, _cx| {
+ this.selected_index = Some(row_index);
+ }))
+ .border_2()
+ .border_color(transparent_black())
+ .when(is_selected, |row| {
+ row.border_color(cx.theme().colors().panel_focused_border)
+ });
+
+ right_click_menu(("keymap-table-row-menu", row_index))
+ .trigger({
+ let this = cx.weak_entity();
+ move |is_menu_open: bool, _window, cx| {
+ if is_menu_open {
+ this.update(cx, |this, cx| {
+ if this.selected_index != Some(row_index) {
+ this.selected_index = Some(row_index);
+ cx.notify();
+ }
+ })
+ .ok();
+ }
+ row
+ }
+ })
+ .menu({
+ let this = cx.weak_entity();
+ move |window, cx| build_keybind_context_menu(&this, window, cx)
+ })
+ .into_any_element()
+ }),
+ ),
+ )
+ }
+}
+
+#[derive(Debug, Clone, IntoElement)]
+struct SyntaxHighlightedText {
+ text: SharedString,
+ language: Arc<Language>,
+}
+
+impl SyntaxHighlightedText {
+ pub fn new(text: impl Into<SharedString>, language: Arc<Language>) -> Self {
+ Self {
+ text: text.into(),
+ language,
+ }
+ }
+}
+
+impl RenderOnce for SyntaxHighlightedText {
+ fn render(self, window: &mut Window, cx: &mut App) -> impl IntoElement {
+ let text_style = window.text_style();
+ let syntax_theme = cx.theme().syntax();
+
+ let text = self.text.clone();
+
+ let highlights = self
+ .language
+ .highlight_text(&text.as_ref().into(), 0..text.len());
+ let mut runs = Vec::with_capacity(highlights.len());
+ let mut offset = 0;
+
+ for (highlight_range, highlight_id) in highlights {
+ // Add un-highlighted text before the current highlight
+ if highlight_range.start > offset {
+ runs.push(text_style.to_run(highlight_range.start - offset));
+ }
+
+ let mut run_style = text_style.clone();
+ if let Some(highlight_style) = highlight_id.style(syntax_theme) {
+ run_style = run_style.highlight(highlight_style);
+ }
+ // add the highlighted range
+ runs.push(run_style.to_run(highlight_range.len()));
+ offset = highlight_range.end;
+ }
+
+ // Add any remaining un-highlighted text
+ if offset < text.len() {
+ runs.push(text_style.to_run(text.len() - offset));
+ }
+
+ return StyledText::new(text).with_runs(runs);
+ }
+}
+
+struct KeybindingEditorModal {
+ editing_keybind: ProcessedKeybinding,
+ keybind_editor: Entity<KeystrokeInput>,
+ fs: Arc<dyn Fs>,
+ error: Option<String>,
+}
+
+impl ModalView for KeybindingEditorModal {}
+
+impl EventEmitter<DismissEvent> for KeybindingEditorModal {}
+
+impl Focusable for KeybindingEditorModal {
+ fn focus_handle(&self, cx: &App) -> FocusHandle {
+ self.keybind_editor.focus_handle(cx)
+ }
+}
+
+impl KeybindingEditorModal {
+ pub fn new(
+ editing_keybind: ProcessedKeybinding,
+ fs: Arc<dyn Fs>,
+ _window: &mut Window,
+ cx: &mut App,
+ ) -> Self {
+ let keybind_editor = cx.new(KeystrokeInput::new);
+ Self {
+ editing_keybind,
+ fs,
+ keybind_editor,
+ error: None,
+ }
+ }
+}
+
+impl Render for KeybindingEditorModal {
+ fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let theme = cx.theme().colors();
+
+ return v_flex()
+ .w(rems(34.))
+ .elevation_3(cx)
+ .child(
+ v_flex()
+ .p_3()
+ .gap_2()
+ .child(
+ v_flex().child(Label::new("Edit Keystroke")).child(
+ Label::new(
+ "Input the desired keystroke for the selected action and hit save.",
+ )
+ .color(Color::Muted),
+ ),
+ )
+ .child(self.keybind_editor.clone()),
+ )
+ .child(
+ h_flex()
+ .p_2()
+ .w_full()
+ .gap_1()
+ .justify_end()
+ .border_t_1()
+ .border_color(cx.theme().colors().border_variant)
+ .child(
+ Button::new("cancel", "Cancel")
+ .on_click(cx.listener(|_, _, _, cx| cx.emit(DismissEvent))),
+ )
+ .child(Button::new("save-btn", "Save").on_click(cx.listener(
+ |this, _event, _window, cx| {
+ let existing_keybind = this.editing_keybind.clone();
+ let fs = this.fs.clone();
+ let new_keystrokes = this
+ .keybind_editor
+ .read_with(cx, |editor, _| editor.keystrokes.clone());
+ if new_keystrokes.is_empty() {
+ this.error = Some("Keystrokes cannot be empty".to_string());
+ cx.notify();
+ return;
+ }
+ let tab_size = cx.global::<settings::SettingsStore>().json_tab_size();
+ cx.spawn(async move |this, cx| {
+ if let Err(err) = save_keybinding_update(
+ existing_keybind,
+ &new_keystrokes,
+ &fs,
+ tab_size,
+ )
+ .await
+ {
+ this.update(cx, |this, cx| {
+ this.error = Some(err.to_string());
+ cx.notify();
+ })
+ .log_err();
+ }
+ })
+ .detach();
+ },
+ ))),
+ )
+ .when_some(self.error.clone(), |this, error| {
+ this.child(
+ div()
+ .bg(theme.background)
+ .border_color(theme.border)
+ .border_2()
+ .rounded_md()
+ .child(error),
+ )
+ });
+ }
+}
+
+async fn save_keybinding_update(
+ existing: ProcessedKeybinding,
+ new_keystrokes: &[Keystroke],
+ fs: &Arc<dyn Fs>,
+ tab_size: usize,
+) -> anyhow::Result<()> {
+ let keymap_contents = settings::KeymapFile::load_keymap_file(fs)
+ .await
+ .context("Failed to load keymap file")?;
+
+ let existing_keystrokes = existing
+ .ui_key_binding
+ .as_ref()
+ .map(|keybinding| keybinding.keystrokes.as_slice())
+ .unwrap_or_default();
+
+ let context = existing
+ .context
+ .as_ref()
+ .and_then(KeybindContextString::local_str);
+
+ let input = existing
+ .action_input
+ .as_ref()
+ .map(|input| input.text.as_ref());
+
+ let operation = if existing.ui_key_binding.is_some() {
+ settings::KeybindUpdateOperation::Replace {
+ target: settings::KeybindUpdateTarget {
+ context,
+ keystrokes: existing_keystrokes,
+ action_name: &existing.action,
+ use_key_equivalents: false,
+ input,
+ },
+ target_source: existing
+ .source
+ .map(|(source, _name)| source)
+ .unwrap_or(KeybindSource::User),
+ source: settings::KeybindUpdateTarget {
+ context,
+ keystrokes: new_keystrokes,
+ action_name: &existing.action,
+ use_key_equivalents: false,
+ input,
+ },
+ }
+ } else {
+ anyhow::bail!("Adding new bindings not implemented yet");
+ };
+ let updated_keymap_contents =
+ settings::KeymapFile::update_keybinding(operation, keymap_contents, tab_size)
+ .context("Failed to update keybinding")?;
+ fs.atomic_write(paths::keymap_file().clone(), updated_keymap_contents)
+ .await
+ .context("Failed to write keymap file")?;
+ Ok(())
+}
+
+struct KeystrokeInput {
+ keystrokes: Vec<Keystroke>,
+ focus_handle: FocusHandle,
+}
+
+impl KeystrokeInput {
+ fn new(cx: &mut Context<Self>) -> Self {
+ let focus_handle = cx.focus_handle();
+ Self {
+ keystrokes: Vec::new(),
+ focus_handle,
+ }
+ }
+
+ fn on_modifiers_changed(
+ &mut self,
+ event: &ModifiersChangedEvent,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if let Some(last) = self.keystrokes.last_mut()
+ && last.key.is_empty()
+ {
+ if !event.modifiers.modified() {
+ self.keystrokes.pop();
+ } else {
+ last.modifiers = event.modifiers;
+ }
+ } else {
+ self.keystrokes.push(Keystroke {
+ modifiers: event.modifiers,
+ key: "".to_string(),
+ key_char: None,
+ });
+ }
+ cx.stop_propagation();
+ cx.notify();
+ }
+
+ fn on_key_down(
+ &mut self,
+ event: &gpui::KeyDownEvent,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if event.is_held {
+ return;
+ }
+ if let Some(last) = self.keystrokes.last_mut()
+ && last.key.is_empty()
+ {
+ *last = event.keystroke.clone();
+ } else {
+ self.keystrokes.push(event.keystroke.clone());
+ }
+ cx.stop_propagation();
+ cx.notify();
+ }
+
+ fn on_key_up(
+ &mut self,
+ event: &gpui::KeyUpEvent,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if let Some(last) = self.keystrokes.last_mut()
+ && !last.key.is_empty()
+ && last.modifiers == event.keystroke.modifiers
+ {
+ self.keystrokes.push(Keystroke {
+ modifiers: event.keystroke.modifiers,
+ key: "".to_string(),
+ key_char: None,
+ });
+ }
+ cx.stop_propagation();
+ cx.notify();
+ }
+}
+
+impl Focusable for KeystrokeInput {
+ fn focus_handle(&self, _cx: &App) -> FocusHandle {
+ self.focus_handle.clone()
+ }
+}
+
+impl Render for KeystrokeInput {
+ fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let colors = cx.theme().colors();
+
+ return h_flex()
+ .id("keybinding_input")
+ .track_focus(&self.focus_handle)
+ .on_modifiers_changed(cx.listener(Self::on_modifiers_changed))
+ .on_key_down(cx.listener(Self::on_key_down))
+ .on_key_up(cx.listener(Self::on_key_up))
+ .focus(|mut style| {
+ style.border_color = Some(colors.border_focused);
+ style
+ })
+ .py_2()
+ .px_3()
+ .gap_2()
+ .min_h_8()
+ .w_full()
+ .justify_between()
+ .bg(colors.editor_background)
+ .border_1()
+ .rounded_md()
+ .flex_1()
+ .overflow_hidden()
+ .child(
+ h_flex()
+ .w_full()
+ .min_w_0()
+ .justify_center()
+ .flex_wrap()
+ .gap(ui::DynamicSpacing::Base04.rems(cx))
+ .children(self.keystrokes.iter().map(|keystroke| {
+ h_flex().children(ui::render_keystroke(
+ keystroke,
+ None,
+ Some(rems(0.875).into()),
+ ui::PlatformStyle::platform(),
+ false,
+ ))
+ })),
+ )
+ .child(
+ h_flex()
+ .gap_0p5()
+ .flex_none()
+ .child(
+ IconButton::new("backspace-btn", IconName::Delete)
+ .tooltip(Tooltip::text("Delete Keystroke"))
+ .on_click(cx.listener(|this, _event, _window, cx| {
+ this.keystrokes.pop();
+ cx.notify();
+ })),
+ )
+ .child(
+ IconButton::new("clear-btn", IconName::Eraser)
+ .tooltip(Tooltip::text("Clear Keystrokes"))
+ .on_click(cx.listener(|this, _event, _window, cx| {
+ this.keystrokes.clear();
+ cx.notify();
+ })),
+ ),
+ );
+ }
+}
+
+fn build_keybind_context_menu(
+ this: &WeakEntity<KeymapEditor>,
+ window: &mut Window,
+ cx: &mut App,
+) -> Entity<ContextMenu> {
+ ContextMenu::build(window, cx, |menu, _window, cx| {
+ let Some(this) = this.upgrade() else {
+ return menu;
+ };
+ let selected_binding = this.read_with(cx, |this, _cx| this.selected_binding().cloned());
+ let Some(selected_binding) = selected_binding else {
+ return menu;
+ };
+
+ let selected_binding_has_context = selected_binding
+ .context
+ .as_ref()
+ .and_then(KeybindContextString::local)
+ .is_some();
+
+ menu.action("Edit Binding", Box::new(EditBinding))
+ .action("Copy action", Box::new(CopyAction))
+ .action_disabled_when(
+ !selected_binding_has_context,
+ "Copy Context",
+ Box::new(CopyContext),
+ )
+ })
+}
+
+impl SerializableItem for KeymapEditor {
+ fn serialized_item_kind() -> &'static str {
+ "KeymapEditor"
+ }
+
+ fn cleanup(
+ workspace_id: workspace::WorkspaceId,
+ alive_items: Vec<workspace::ItemId>,
+ _window: &mut Window,
+ cx: &mut App,
+ ) -> gpui::Task<gpui::Result<()>> {
+ workspace::delete_unloaded_items(
+ alive_items,
+ workspace_id,
+ "keybinding_editors",
+ &KEYBINDING_EDITORS,
+ cx,
+ )
+ }
+
+ fn deserialize(
+ _project: Entity<project::Project>,
+ workspace: WeakEntity<Workspace>,
+ workspace_id: workspace::WorkspaceId,
+ item_id: workspace::ItemId,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> gpui::Task<gpui::Result<Entity<Self>>> {
+ window.spawn(cx, async move |cx| {
+ if KEYBINDING_EDITORS
+ .get_keybinding_editor(item_id, workspace_id)?
+ .is_some()
+ {
+ cx.update(|window, cx| cx.new(|cx| KeymapEditor::new(workspace, window, cx)))
+ } else {
+ Err(anyhow!("No keybinding editor to deserialize"))
+ }
+ })
+ }
+
+ fn serialize(
+ &mut self,
+ workspace: &mut Workspace,
+ item_id: workspace::ItemId,
+ _closing: bool,
+ _window: &mut Window,
+ cx: &mut ui::Context<Self>,
+ ) -> Option<gpui::Task<gpui::Result<()>>> {
+ let workspace_id = workspace.database_id()?;
+ Some(cx.background_spawn(async move {
+ KEYBINDING_EDITORS
+ .save_keybinding_editor(item_id, workspace_id)
+ .await
+ }))
+ }
+
+ fn should_serialize(&self, _event: &Self::Event) -> bool {
+ false
+ }
+}
+
+mod persistence {
+ use db::{define_connection, query, sqlez_macros::sql};
+ use workspace::WorkspaceDb;
+
+ define_connection! {
+ pub static ref KEYBINDING_EDITORS: KeybindingEditorDb<WorkspaceDb> =
+ &[sql!(
+ CREATE TABLE keybinding_editors (
+ workspace_id INTEGER,
+ item_id INTEGER UNIQUE,
+
+ PRIMARY KEY(workspace_id, item_id),
+ FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
+ ON DELETE CASCADE
+ ) STRICT;
+ )];
+ }
+
+ impl KeybindingEditorDb {
+ query! {
+ pub async fn save_keybinding_editor(
+ item_id: workspace::ItemId,
+ workspace_id: workspace::WorkspaceId
+ ) -> Result<()> {
+ INSERT OR REPLACE INTO keybinding_editors(item_id, workspace_id)
+ VALUES (?, ?)
+ }
+ }
+
+ query! {
+ pub fn get_keybinding_editor(
+ item_id: workspace::ItemId,
+ workspace_id: workspace::WorkspaceId
+ ) -> Result<Option<workspace::ItemId>> {
+ SELECT item_id
+ FROM keybinding_editors
+ WHERE item_id = ? AND workspace_id = ?
+ }
+ }
+ }
+}
@@ -20,26 +20,39 @@ use workspace::{Workspace, with_active_or_new_workspace};
use crate::appearance_settings_controls::AppearanceSettingsControls;
+pub mod keybindings;
+pub mod ui_components;
+
pub struct SettingsUiFeatureFlag;
impl FeatureFlag for SettingsUiFeatureFlag {
const NAME: &'static str = "settings-ui";
}
+/// Imports settings from Visual Studio Code.
#[derive(Copy, Clone, Debug, Default, PartialEq, Deserialize, JsonSchema, Action)]
#[action(namespace = zed)]
+#[serde(deny_unknown_fields)]
pub struct ImportVsCodeSettings {
#[serde(default)]
pub skip_prompt: bool,
}
+/// Imports settings from Cursor editor.
#[derive(Copy, Clone, Debug, Default, PartialEq, Deserialize, JsonSchema, Action)]
#[action(namespace = zed)]
+#[serde(deny_unknown_fields)]
pub struct ImportCursorSettings {
#[serde(default)]
pub skip_prompt: bool,
}
-actions!(zed, [OpenSettingsEditor]);
+actions!(
+ zed,
+ [
+ /// Opens the settings editor.
+ OpenSettingsEditor
+ ]
+);
pub fn init(cx: &mut App) {
cx.on_action(|_: &OpenSettingsEditor, cx| {
@@ -121,6 +134,8 @@ pub fn init(cx: &mut App) {
.detach();
})
.detach();
+
+ keybindings::init(cx);
}
async fn handle_import_vscode_settings(
@@ -0,0 +1 @@
+pub mod table;
@@ -0,0 +1,864 @@
+use std::{ops::Range, rc::Rc, time::Duration};
+
+use editor::{EditorSettings, ShowScrollbar, scroll::ScrollbarAutoHide};
+use gpui::{
+ AppContext, Axis, Context, Entity, FocusHandle, Length, ListHorizontalSizingBehavior,
+ ListSizingBehavior, MouseButton, Task, UniformListScrollHandle, WeakEntity, transparent_black,
+ uniform_list,
+};
+use settings::Settings as _;
+use ui::{
+ ActiveTheme as _, AnyElement, App, Button, ButtonCommon as _, ButtonStyle, Color, Component,
+ ComponentScope, Div, ElementId, FixedWidth as _, FluentBuilder as _, Indicator,
+ InteractiveElement as _, IntoElement, ParentElement, Pixels, RegisterComponent, RenderOnce,
+ Scrollbar, ScrollbarState, StatefulInteractiveElement as _, Styled, StyledExt as _,
+ StyledTypography, Window, div, example_group_with_title, h_flex, px, single_example, v_flex,
+};
+
+struct UniformListData<const COLS: usize> {
+ render_item_fn: Box<dyn Fn(Range<usize>, &mut Window, &mut App) -> Vec<[AnyElement; COLS]>>,
+ element_id: ElementId,
+ row_count: usize,
+}
+
+enum TableContents<const COLS: usize> {
+ Vec(Vec<[AnyElement; COLS]>),
+ UniformList(UniformListData<COLS>),
+}
+
+impl<const COLS: usize> TableContents<COLS> {
+ fn rows_mut(&mut self) -> Option<&mut Vec<[AnyElement; COLS]>> {
+ match self {
+ TableContents::Vec(rows) => Some(rows),
+ TableContents::UniformList(_) => None,
+ }
+ }
+
+ fn len(&self) -> usize {
+ match self {
+ TableContents::Vec(rows) => rows.len(),
+ TableContents::UniformList(data) => data.row_count,
+ }
+ }
+}
+
+pub struct TableInteractionState {
+ pub focus_handle: FocusHandle,
+ pub scroll_handle: UniformListScrollHandle,
+ pub horizontal_scrollbar: ScrollbarProperties,
+ pub vertical_scrollbar: ScrollbarProperties,
+}
+
+impl TableInteractionState {
+ pub fn new(window: &mut Window, cx: &mut App) -> Entity<Self> {
+ cx.new(|cx| {
+ let focus_handle = cx.focus_handle();
+
+ cx.on_focus_out(&focus_handle, window, |this: &mut Self, _, window, cx| {
+ this.hide_scrollbars(window, cx);
+ })
+ .detach();
+
+ let scroll_handle = UniformListScrollHandle::new();
+ let vertical_scrollbar = ScrollbarProperties {
+ axis: Axis::Vertical,
+ state: ScrollbarState::new(scroll_handle.clone()).parent_entity(&cx.entity()),
+ show_scrollbar: false,
+ show_track: false,
+ auto_hide: false,
+ hide_task: None,
+ };
+
+ let horizontal_scrollbar = ScrollbarProperties {
+ axis: Axis::Horizontal,
+ state: ScrollbarState::new(scroll_handle.clone()).parent_entity(&cx.entity()),
+ show_scrollbar: false,
+ show_track: false,
+ auto_hide: false,
+ hide_task: None,
+ };
+
+ let mut this = Self {
+ focus_handle,
+ scroll_handle,
+ horizontal_scrollbar,
+ vertical_scrollbar,
+ };
+
+ this.update_scrollbar_visibility(cx);
+ this
+ })
+ }
+
+ fn update_scrollbar_visibility(&mut self, cx: &mut Context<Self>) {
+ let show_setting = EditorSettings::get_global(cx).scrollbar.show;
+
+ let scroll_handle = self.scroll_handle.0.borrow();
+
+ let autohide = |show: ShowScrollbar, cx: &mut Context<Self>| match show {
+ ShowScrollbar::Auto => true,
+ ShowScrollbar::System => cx
+ .try_global::<ScrollbarAutoHide>()
+ .map_or_else(|| cx.should_auto_hide_scrollbars(), |autohide| autohide.0),
+ ShowScrollbar::Always => false,
+ ShowScrollbar::Never => false,
+ };
+
+ let longest_item_width = scroll_handle.last_item_size.and_then(|size| {
+ (size.contents.width > size.item.width).then_some(size.contents.width)
+ });
+
+ // is there an item long enough that we should show a horizontal scrollbar?
+ let item_wider_than_container = if let Some(longest_item_width) = longest_item_width {
+ longest_item_width > px(scroll_handle.base_handle.bounds().size.width.0)
+ } else {
+ true
+ };
+
+ let show_scrollbar = match show_setting {
+ ShowScrollbar::Auto | ShowScrollbar::System | ShowScrollbar::Always => true,
+ ShowScrollbar::Never => false,
+ };
+ let show_vertical = show_scrollbar;
+
+ let show_horizontal = item_wider_than_container && show_scrollbar;
+
+ let show_horizontal_track =
+ show_horizontal && matches!(show_setting, ShowScrollbar::Always);
+
+ // TODO: we probably should hide the scroll track when the list doesn't need to scroll
+ let show_vertical_track = show_vertical && matches!(show_setting, ShowScrollbar::Always);
+
+ self.vertical_scrollbar = ScrollbarProperties {
+ axis: self.vertical_scrollbar.axis,
+ state: self.vertical_scrollbar.state.clone(),
+ show_scrollbar: show_vertical,
+ show_track: show_vertical_track,
+ auto_hide: autohide(show_setting, cx),
+ hide_task: None,
+ };
+
+ self.horizontal_scrollbar = ScrollbarProperties {
+ axis: self.horizontal_scrollbar.axis,
+ state: self.horizontal_scrollbar.state.clone(),
+ show_scrollbar: show_horizontal,
+ show_track: show_horizontal_track,
+ auto_hide: autohide(show_setting, cx),
+ hide_task: None,
+ };
+
+ cx.notify();
+ }
+
+ fn hide_scrollbars(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ self.horizontal_scrollbar.hide(window, cx);
+ self.vertical_scrollbar.hide(window, cx);
+ }
+
+ pub fn listener<E: ?Sized>(
+ this: &Entity<Self>,
+ f: impl Fn(&mut Self, &E, &mut Window, &mut Context<Self>) + 'static,
+ ) -> impl Fn(&E, &mut Window, &mut App) + 'static {
+ let view = this.downgrade();
+ move |e: &E, window: &mut Window, cx: &mut App| {
+ view.update(cx, |view, cx| f(view, e, window, cx)).ok();
+ }
+ }
+
+ fn render_vertical_scrollbar_track(
+ this: &Entity<Self>,
+ parent: Div,
+ scroll_track_size: Pixels,
+ cx: &mut App,
+ ) -> Div {
+ if !this.read(cx).vertical_scrollbar.show_track {
+ return parent;
+ }
+ let child = v_flex()
+ .h_full()
+ .flex_none()
+ .w(scroll_track_size)
+ .bg(cx.theme().colors().background)
+ .child(
+ div()
+ .size_full()
+ .flex_1()
+ .border_l_1()
+ .border_color(cx.theme().colors().border),
+ );
+ parent.child(child)
+ }
+
+ fn render_vertical_scrollbar(this: &Entity<Self>, parent: Div, cx: &mut App) -> Div {
+ if !this.read(cx).vertical_scrollbar.show_scrollbar {
+ return parent;
+ }
+ let child = div()
+ .id(("table-vertical-scrollbar", this.entity_id()))
+ .occlude()
+ .flex_none()
+ .h_full()
+ .cursor_default()
+ .absolute()
+ .right_0()
+ .top_0()
+ .bottom_0()
+ .w(px(12.))
+ .on_mouse_move(Self::listener(this, |_, _, _, cx| {
+ cx.notify();
+ cx.stop_propagation()
+ }))
+ .on_hover(|_, _, cx| {
+ cx.stop_propagation();
+ })
+ .on_mouse_up(
+ MouseButton::Left,
+ Self::listener(this, |this, _, window, cx| {
+ if !this.vertical_scrollbar.state.is_dragging()
+ && !this.focus_handle.contains_focused(window, cx)
+ {
+ this.vertical_scrollbar.hide(window, cx);
+ cx.notify();
+ }
+
+ cx.stop_propagation();
+ }),
+ )
+ .on_any_mouse_down(|_, _, cx| {
+ cx.stop_propagation();
+ })
+ .on_scroll_wheel(Self::listener(&this, |_, _, _, cx| {
+ cx.notify();
+ }))
+ .children(Scrollbar::vertical(
+ this.read(cx).vertical_scrollbar.state.clone(),
+ ));
+ parent.child(child)
+ }
+
+ /// Renders the horizontal scrollbar.
+ ///
+ /// The right offset is used to determine how far to the right the
+ /// scrollbar should extend to, useful for ensuring it doesn't collide
+ /// with the vertical scrollbar when visible.
+ fn render_horizontal_scrollbar(
+ this: &Entity<Self>,
+ parent: Div,
+ right_offset: Pixels,
+ cx: &mut App,
+ ) -> Div {
+ if !this.read(cx).horizontal_scrollbar.show_scrollbar {
+ return parent;
+ }
+ let child = div()
+ .id(("table-horizontal-scrollbar", this.entity_id()))
+ .occlude()
+ .flex_none()
+ .w_full()
+ .cursor_default()
+ .absolute()
+ .bottom_neg_px()
+ .left_0()
+ .right_0()
+ .pr(right_offset)
+ .on_mouse_move(Self::listener(this, |_, _, _, cx| {
+ cx.notify();
+ cx.stop_propagation()
+ }))
+ .on_hover(|_, _, cx| {
+ cx.stop_propagation();
+ })
+ .on_any_mouse_down(|_, _, cx| {
+ cx.stop_propagation();
+ })
+ .on_mouse_up(
+ MouseButton::Left,
+ Self::listener(this, |this, _, window, cx| {
+ if !this.horizontal_scrollbar.state.is_dragging()
+ && !this.focus_handle.contains_focused(window, cx)
+ {
+ this.horizontal_scrollbar.hide(window, cx);
+ cx.notify();
+ }
+
+ cx.stop_propagation();
+ }),
+ )
+ .on_scroll_wheel(Self::listener(this, |_, _, _, cx| {
+ cx.notify();
+ }))
+ .children(Scrollbar::horizontal(
+ // percentage as f32..end_offset as f32,
+ this.read(cx).horizontal_scrollbar.state.clone(),
+ ));
+ parent.child(child)
+ }
+
+ fn render_horizontal_scrollbar_track(
+ this: &Entity<Self>,
+ parent: Div,
+ scroll_track_size: Pixels,
+ cx: &mut App,
+ ) -> Div {
+ if !this.read(cx).horizontal_scrollbar.show_track {
+ return parent;
+ }
+ let child = h_flex()
+ .w_full()
+ .h(scroll_track_size)
+ .flex_none()
+ .relative()
+ .child(
+ div()
+ .w_full()
+ .flex_1()
+ // for some reason the horizontal scrollbar is 1px
+ // taller than the vertical scrollbar??
+ .h(scroll_track_size - px(1.))
+ .bg(cx.theme().colors().background)
+ .border_t_1()
+ .border_color(cx.theme().colors().border),
+ )
+ .when(this.read(cx).vertical_scrollbar.show_track, |parent| {
+ parent
+ .child(
+ div()
+ .flex_none()
+ // -1px prevents a missing pixel between the two container borders
+ .w(scroll_track_size - px(1.))
+ .h_full(),
+ )
+ .child(
+ // HACK: Fill the missing 1px 🥲
+ div()
+ .absolute()
+ .right(scroll_track_size - px(1.))
+ .bottom(scroll_track_size - px(1.))
+ .size_px()
+ .bg(cx.theme().colors().border),
+ )
+ });
+
+ parent.child(child)
+ }
+}
+
+/// A table component
+#[derive(RegisterComponent, IntoElement)]
+pub struct Table<const COLS: usize = 3> {
+ striped: bool,
+ width: Option<Length>,
+ headers: Option<[AnyElement; COLS]>,
+ rows: TableContents<COLS>,
+ interaction_state: Option<WeakEntity<TableInteractionState>>,
+ column_widths: Option<[Length; COLS]>,
+ map_row: Option<Rc<dyn Fn((usize, Div), &mut Window, &mut App) -> AnyElement>>,
+}
+
+impl<const COLS: usize> Table<COLS> {
+ /// number of headers provided.
+ pub fn new() -> Self {
+ Table {
+ striped: false,
+ width: None,
+ headers: None,
+ rows: TableContents::Vec(Vec::new()),
+ interaction_state: None,
+ column_widths: None,
+ map_row: None,
+ }
+ }
+
+ /// Enables uniform list rendering.
+ /// The provided function will be passed directly to the `uniform_list` element.
+ /// Therefore, if this method is called, any calls to [`Table::row`] before or after
+ /// this method is called will be ignored.
+ pub fn uniform_list(
+ mut self,
+ id: impl Into<ElementId>,
+ row_count: usize,
+ render_item_fn: impl Fn(Range<usize>, &mut Window, &mut App) -> Vec<[AnyElement; COLS]>
+ + 'static,
+ ) -> Self {
+ self.rows = TableContents::UniformList(UniformListData {
+ element_id: id.into(),
+ row_count: row_count,
+ render_item_fn: Box::new(render_item_fn),
+ });
+ self
+ }
+
+ /// Enables row striping.
+ pub fn striped(mut self) -> Self {
+ self.striped = true;
+ self
+ }
+
+ /// Sets the width of the table.
+ /// Will enable horizontal scrolling if [`Self::interactable`] is also called.
+ pub fn width(mut self, width: impl Into<Length>) -> Self {
+ self.width = Some(width.into());
+ self
+ }
+
+ /// Enables interaction (primarily scrolling) with the table.
+ ///
+ /// Vertical scrolling will be enabled by default if the table is taller than its container.
+ ///
+ /// Horizontal scrolling will only be enabled if [`Self::width`] is also called, otherwise
+ /// the list will always shrink the table columns to fit their contents I.e. If [`Self::uniform_list`]
+ /// is used without a width and with [`Self::interactable`], the [`ListHorizontalSizingBehavior`] will
+ /// be set to [`ListHorizontalSizingBehavior::FitList`].
+ pub fn interactable(mut self, interaction_state: &Entity<TableInteractionState>) -> Self {
+ self.interaction_state = Some(interaction_state.downgrade());
+ self
+ }
+
+ pub fn header(mut self, headers: [impl IntoElement; COLS]) -> Self {
+ self.headers = Some(headers.map(IntoElement::into_any_element));
+ self
+ }
+
+ pub fn row(mut self, items: [impl IntoElement; COLS]) -> Self {
+ if let Some(rows) = self.rows.rows_mut() {
+ rows.push(items.map(IntoElement::into_any_element));
+ }
+ self
+ }
+
+ pub fn column_widths(mut self, widths: [impl Into<Length>; COLS]) -> Self {
+ self.column_widths = Some(widths.map(Into::into));
+ self
+ }
+
+ pub fn map_row(
+ mut self,
+ callback: impl Fn((usize, Div), &mut Window, &mut App) -> AnyElement + 'static,
+ ) -> Self {
+ self.map_row = Some(Rc::new(callback));
+ self
+ }
+}
+
+fn base_cell_style(width: Option<Length>, cx: &App) -> Div {
+ div()
+ .px_1p5()
+ .when_some(width, |this, width| this.w(width))
+ .when(width.is_none(), |this| this.flex_1())
+ .justify_start()
+ .text_ui(cx)
+ .whitespace_nowrap()
+ .text_ellipsis()
+ .overflow_hidden()
+}
+
+pub fn render_row<const COLS: usize>(
+ row_index: usize,
+ items: [impl IntoElement; COLS],
+ table_context: TableRenderContext<COLS>,
+ window: &mut Window,
+ cx: &mut App,
+) -> AnyElement {
+ let is_striped = table_context.striped;
+ let is_last = row_index == table_context.total_row_count - 1;
+ let bg = if row_index % 2 == 1 && is_striped {
+ Some(cx.theme().colors().text.opacity(0.05))
+ } else {
+ None
+ };
+ let column_widths = table_context
+ .column_widths
+ .map_or([None; COLS], |widths| widths.map(Some));
+
+ let row = div().w_full().child(
+ h_flex()
+ .id("table_row")
+ .w_full()
+ .justify_between()
+ .px_1p5()
+ .py_1()
+ .when_some(bg, |row, bg| row.bg(bg))
+ .when(!is_striped, |row| {
+ row.border_b_1()
+ .border_color(transparent_black())
+ .when(!is_last, |row| row.border_color(cx.theme().colors().border))
+ })
+ .children(
+ items
+ .map(IntoElement::into_any_element)
+ .into_iter()
+ .zip(column_widths)
+ .map(|(cell, width)| base_cell_style(width, cx).child(cell)),
+ ),
+ );
+
+ if let Some(map_row) = table_context.map_row {
+ map_row((row_index, row), window, cx)
+ } else {
+ row.into_any_element()
+ }
+}
+
+pub fn render_header<const COLS: usize>(
+ headers: [impl IntoElement; COLS],
+ table_context: TableRenderContext<COLS>,
+ cx: &mut App,
+) -> impl IntoElement {
+ let column_widths = table_context
+ .column_widths
+ .map_or([None; COLS], |widths| widths.map(Some));
+ div()
+ .flex()
+ .flex_row()
+ .items_center()
+ .justify_between()
+ .w_full()
+ .p_2()
+ .border_b_1()
+ .border_color(cx.theme().colors().border)
+ .children(
+ headers
+ .into_iter()
+ .zip(column_widths)
+ .map(|(h, width)| base_cell_style(width, cx).child(h)),
+ )
+}
+
+#[derive(Clone)]
+pub struct TableRenderContext<const COLS: usize> {
+ pub striped: bool,
+ pub total_row_count: usize,
+ pub column_widths: Option<[Length; COLS]>,
+ pub map_row: Option<Rc<dyn Fn((usize, Div), &mut Window, &mut App) -> AnyElement>>,
+}
+
+impl<const COLS: usize> TableRenderContext<COLS> {
+ fn new(table: &Table<COLS>) -> Self {
+ Self {
+ striped: table.striped,
+ total_row_count: table.rows.len(),
+ column_widths: table.column_widths,
+ map_row: table.map_row.clone(),
+ }
+ }
+}
+
+impl<const COLS: usize> RenderOnce for Table<COLS> {
+ fn render(mut self, window: &mut Window, cx: &mut App) -> impl IntoElement {
+ let table_context = TableRenderContext::new(&self);
+ let interaction_state = self.interaction_state.and_then(|state| state.upgrade());
+
+ let scroll_track_size = px(16.);
+ let h_scroll_offset = if interaction_state
+ .as_ref()
+ .is_some_and(|state| state.read(cx).vertical_scrollbar.show_scrollbar)
+ {
+ // magic number
+ px(3.)
+ } else {
+ px(0.)
+ };
+
+ let width = self.width;
+
+ let table = div()
+ .when_some(width, |this, width| this.w(width))
+ .h_full()
+ .v_flex()
+ .when_some(self.headers.take(), |this, headers| {
+ this.child(render_header(headers, table_context.clone(), cx))
+ })
+ .child(
+ div()
+ .flex_grow()
+ .w_full()
+ .relative()
+ .overflow_hidden()
+ .map(|parent| match self.rows {
+ TableContents::Vec(items) => {
+ parent.children(items.into_iter().enumerate().map(|(index, row)| {
+ render_row(index, row, table_context.clone(), window, cx)
+ }))
+ }
+ TableContents::UniformList(uniform_list_data) => parent.child(
+ uniform_list(
+ uniform_list_data.element_id,
+ uniform_list_data.row_count,
+ {
+ let render_item_fn = uniform_list_data.render_item_fn;
+ move |range: Range<usize>, window, cx| {
+ let elements = render_item_fn(range.clone(), window, cx);
+ elements
+ .into_iter()
+ .zip(range)
+ .map(|(row, row_index)| {
+ render_row(
+ row_index,
+ row,
+ table_context.clone(),
+ window,
+ cx,
+ )
+ })
+ .collect()
+ }
+ },
+ )
+ .size_full()
+ .flex_grow()
+ .with_sizing_behavior(ListSizingBehavior::Auto)
+ .with_horizontal_sizing_behavior(if width.is_some() {
+ ListHorizontalSizingBehavior::Unconstrained
+ } else {
+ ListHorizontalSizingBehavior::FitList
+ })
+ .when_some(
+ interaction_state.as_ref(),
+ |this, state| {
+ this.track_scroll(
+ state.read_with(cx, |s, _| s.scroll_handle.clone()),
+ )
+ },
+ ),
+ ),
+ })
+ .when_some(interaction_state.as_ref(), |this, interaction_state| {
+ this.map(|this| {
+ TableInteractionState::render_vertical_scrollbar_track(
+ interaction_state,
+ this,
+ scroll_track_size,
+ cx,
+ )
+ })
+ .map(|this| {
+ TableInteractionState::render_vertical_scrollbar(
+ interaction_state,
+ this,
+ cx,
+ )
+ })
+ }),
+ )
+ .when_some(
+ width.and(interaction_state.as_ref()),
+ |this, interaction_state| {
+ this.map(|this| {
+ TableInteractionState::render_horizontal_scrollbar_track(
+ interaction_state,
+ this,
+ scroll_track_size,
+ cx,
+ )
+ })
+ .map(|this| {
+ TableInteractionState::render_horizontal_scrollbar(
+ interaction_state,
+ this,
+ h_scroll_offset,
+ cx,
+ )
+ })
+ },
+ );
+
+ if let Some(interaction_state) = interaction_state.as_ref() {
+ table
+ .track_focus(&interaction_state.read(cx).focus_handle)
+ .id(("table", interaction_state.entity_id()))
+ .on_hover({
+ let interaction_state = interaction_state.downgrade();
+ move |hovered, window, cx| {
+ interaction_state
+ .update(cx, |interaction_state, cx| {
+ if *hovered {
+ interaction_state.horizontal_scrollbar.show(cx);
+ interaction_state.vertical_scrollbar.show(cx);
+ cx.notify();
+ } else if !interaction_state
+ .focus_handle
+ .contains_focused(window, cx)
+ {
+ interaction_state.hide_scrollbars(window, cx);
+ }
+ })
+ .ok();
+ }
+ })
+ .into_any_element()
+ } else {
+ table.into_any_element()
+ }
+ }
+}
+
+// computed state related to how to render scrollbars
+// one per axis
+// on render we just read this off the keymap editor
+// we update it when
+// - settings change
+// - on focus in, on focus out, on hover, etc.
+#[derive(Debug)]
+pub struct ScrollbarProperties {
+ axis: Axis,
+ show_scrollbar: bool,
+ show_track: bool,
+ auto_hide: bool,
+ hide_task: Option<Task<()>>,
+ state: ScrollbarState,
+}
+
+impl ScrollbarProperties {
+ // Shows the scrollbar and cancels any pending hide task
+ fn show(&mut self, cx: &mut Context<TableInteractionState>) {
+ if !self.auto_hide {
+ return;
+ }
+ self.show_scrollbar = true;
+ self.hide_task.take();
+ cx.notify();
+ }
+
+ fn hide(&mut self, window: &mut Window, cx: &mut Context<TableInteractionState>) {
+ const SCROLLBAR_SHOW_INTERVAL: Duration = Duration::from_secs(1);
+
+ if !self.auto_hide {
+ return;
+ }
+
+ let axis = self.axis;
+ self.hide_task = Some(cx.spawn_in(window, async move |keymap_editor, cx| {
+ cx.background_executor()
+ .timer(SCROLLBAR_SHOW_INTERVAL)
+ .await;
+
+ if let Some(keymap_editor) = keymap_editor.upgrade() {
+ keymap_editor
+ .update(cx, |keymap_editor, cx| {
+ match axis {
+ Axis::Vertical => {
+ keymap_editor.vertical_scrollbar.show_scrollbar = false
+ }
+ Axis::Horizontal => {
+ keymap_editor.horizontal_scrollbar.show_scrollbar = false
+ }
+ }
+ cx.notify();
+ })
+ .ok();
+ }
+ }));
+ }
+}
+
+impl Component for Table<3> {
+ fn scope() -> ComponentScope {
+ ComponentScope::Layout
+ }
+
+ fn description() -> Option<&'static str> {
+ Some("A table component for displaying data in rows and columns with optional styling.")
+ }
+
+ fn preview(_window: &mut Window, _cx: &mut App) -> Option<AnyElement> {
+ Some(
+ v_flex()
+ .gap_6()
+ .children(vec![
+ example_group_with_title(
+ "Basic Tables",
+ vec![
+ single_example(
+ "Simple Table",
+ Table::new()
+ .width(px(400.))
+ .header(["Name", "Age", "City"])
+ .row(["Alice", "28", "New York"])
+ .row(["Bob", "32", "San Francisco"])
+ .row(["Charlie", "25", "London"])
+ .into_any_element(),
+ ),
+ single_example(
+ "Two Column Table",
+ Table::new()
+ .header(["Category", "Value"])
+ .width(px(300.))
+ .row(["Revenue", "$100,000"])
+ .row(["Expenses", "$75,000"])
+ .row(["Profit", "$25,000"])
+ .into_any_element(),
+ ),
+ ],
+ ),
+ example_group_with_title(
+ "Styled Tables",
+ vec![
+ single_example(
+ "Default",
+ Table::new()
+ .width(px(400.))
+ .header(["Product", "Price", "Stock"])
+ .row(["Laptop", "$999", "In Stock"])
+ .row(["Phone", "$599", "Low Stock"])
+ .row(["Tablet", "$399", "Out of Stock"])
+ .into_any_element(),
+ ),
+ single_example(
+ "Striped",
+ Table::new()
+ .width(px(400.))
+ .striped()
+ .header(["Product", "Price", "Stock"])
+ .row(["Laptop", "$999", "In Stock"])
+ .row(["Phone", "$599", "Low Stock"])
+ .row(["Tablet", "$399", "Out of Stock"])
+ .row(["Headphones", "$199", "In Stock"])
+ .into_any_element(),
+ ),
+ ],
+ ),
+ example_group_with_title(
+ "Mixed Content Table",
+ vec![single_example(
+ "Table with Elements",
+ Table::new()
+ .width(px(840.))
+ .header(["Status", "Name", "Priority", "Deadline", "Action"])
+ .row([
+ Indicator::dot().color(Color::Success).into_any_element(),
+ "Project A".into_any_element(),
+ "High".into_any_element(),
+ "2023-12-31".into_any_element(),
+ Button::new("view_a", "View")
+ .style(ButtonStyle::Filled)
+ .full_width()
+ .into_any_element(),
+ ])
+ .row([
+ Indicator::dot().color(Color::Warning).into_any_element(),
+ "Project B".into_any_element(),
+ "Medium".into_any_element(),
+ "2024-03-15".into_any_element(),
+ Button::new("view_b", "View")
+ .style(ButtonStyle::Filled)
+ .full_width()
+ .into_any_element(),
+ ])
+ .row([
+ Indicator::dot().color(Color::Error).into_any_element(),
+ "Project C".into_any_element(),
+ "Low".into_any_element(),
+ "2024-06-30".into_any_element(),
+ Button::new("view_c", "View")
+ .style(ButtonStyle::Filled)
+ .full_width()
+ .into_any_element(),
+ ])
+ .into_any_element(),
+ )],
+ ),
+ ])
+ .into_any_element(),
+ )
+ }
+}
@@ -1,11 +1,9 @@
use collections::HashMap;
-use schemars::{
- JsonSchema,
- r#gen::SchemaSettings,
- schema::{ObjectValidation, Schema, SchemaObject},
-};
+use schemars::{JsonSchema, json_schema};
use serde::Deserialize;
use serde_json_lenient::Value;
+use std::borrow::Cow;
+use util::schemars::DefaultDenyUnknownFields;
#[derive(Deserialize)]
pub struct VsSnippetsFile {
@@ -15,29 +13,26 @@ pub struct VsSnippetsFile {
impl VsSnippetsFile {
pub fn generate_json_schema() -> Value {
- let schema = SchemaSettings::draft07()
- .with(|settings| settings.option_add_null_type = false)
+ let schema = schemars::generate::SchemaSettings::draft2019_09()
+ .with_transform(DefaultDenyUnknownFields)
.into_generator()
- .into_root_schema_for::<Self>();
+ .root_schema_for::<Self>();
serde_json_lenient::to_value(schema).unwrap()
}
}
impl JsonSchema for VsSnippetsFile {
- fn schema_name() -> String {
+ fn schema_name() -> Cow<'static, str> {
"VsSnippetsFile".into()
}
- fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
- SchemaObject {
- object: Some(Box::new(ObjectValidation {
- additional_properties: Some(Box::new(r#gen.subschema_for::<VsCodeSnippet>())),
- ..Default::default()
- })),
- ..Default::default()
- }
- .into()
+ fn json_schema(generator: &mut schemars::SchemaGenerator) -> schemars::Schema {
+ let snippet_schema = generator.subschema_for::<VsCodeSnippet>();
+ json_schema!({
+ "type": "object",
+ "additionalProperties": snippet_schema
+ })
}
}
@@ -54,7 +54,15 @@ impl From<ScopeFileName> for ScopeName {
}
}
-actions!(snippets, [ConfigureSnippets, OpenFolder]);
+actions!(
+ snippets,
+ [
+ /// Opens the snippets configuration file.
+ ConfigureSnippets,
+ /// Opens the snippets folder in the file manager.
+ OpenFolder
+ ]
+);
pub fn init(cx: &mut App) {
cx.observe_new(register).detach();
@@ -1,6 +1,7 @@
mod auto_height_editor;
mod cursor;
mod focus;
+mod indent_guides;
mod kitchen_sink;
mod overflow_scroll;
mod picker;
@@ -12,6 +13,7 @@ mod with_rem_size;
pub use auto_height_editor::*;
pub use cursor::*;
pub use focus::*;
+pub use indent_guides::*;
pub use kitchen_sink::*;
pub use overflow_scroll::*;
pub use picker::*;
@@ -1,13 +1,10 @@
-use std::fmt::format;
+use std::ops::Range;
+
+use gpui::{Entity, Render, div, uniform_list};
+use gpui::{prelude::*, *};
+use ui::{AbsoluteLength, Color, DefiniteLength, Label, LabelCommon, px, v_flex};
-use gpui::{
- DefaultColor, DefaultThemeAppearance, Hsla, Render, colors, div, prelude::*, uniform_list,
-};
use story::Story;
-use strum::IntoEnumIterator;
-use ui::{
- AbsoluteLength, ActiveTheme, Color, DefiniteLength, Label, LabelCommon, h_flex, px, v_flex,
-};
const LENGTH: usize = 100;
@@ -16,7 +13,7 @@ pub struct IndentGuidesStory {
}
impl IndentGuidesStory {
- pub fn model(window: &mut Window, cx: &mut AppContext) -> Model<Self> {
+ pub fn model(_window: &mut Window, cx: &mut App) -> Entity<Self> {
let mut depths = Vec::new();
depths.push(0);
depths.push(1);
@@ -33,16 +30,15 @@ impl IndentGuidesStory {
}
impl Render for IndentGuidesStory {
- fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
Story::container(cx)
- .child(Story::title("Indent guides"))
+ .child(Story::title("Indent guides", cx))
.child(
v_flex().size_full().child(
uniform_list(
- cx.entity().clone(),
"some-list",
self.depths.len(),
- |this, range, cx| {
+ cx.processor(move |this, range: Range<usize>, _window, _cx| {
this.depths
.iter()
.enumerate()
@@ -56,7 +52,7 @@ impl Render for IndentGuidesStory {
.child(Label::new(format!("Item {}", i)).color(Color::Info))
})
.collect()
- },
+ }),
)
.with_sizing_behavior(gpui::ListSizingBehavior::Infer)
.with_decoration(ui::indent_guides(
@@ -64,10 +60,10 @@ impl Render for IndentGuidesStory {
px(16.),
ui::IndentGuideColors {
default: Color::Info.color(cx),
- hovered: Color::Accent.color(cx),
+ hover: Color::Accent.color(cx),
active: Color::Accent.color(cx),
},
- |this, range, cx| {
+ |this, range, _cx, _context| {
this.depths
.iter()
.skip(range.start)
@@ -31,6 +31,7 @@ pub enum ComponentStory {
ToggleButton,
ViewportUnits,
WithRemSize,
+ IndentGuides,
}
impl ComponentStory {
@@ -60,6 +61,7 @@ impl ComponentStory {
Self::ToggleButton => cx.new(|_| ui::ToggleButtonStory).into(),
Self::ViewportUnits => cx.new(|_| crate::stories::ViewportUnitsStory).into(),
Self::WithRemSize => cx.new(|_| crate::stories::WithRemSizeStory).into(),
+ Self::IndentGuides => crate::stories::IndentGuidesStory::model(window, cx).into(),
}
}
}
@@ -9,7 +9,9 @@ use std::sync::Arc;
use clap::Parser;
use dialoguer::FuzzySelect;
use gpui::{
- AnyView, App, Bounds, Context, Render, Window, WindowBounds, WindowOptions, div, px, size,
+ AnyView, App, Bounds, Context, Render, Window, WindowBounds, WindowOptions,
+ colors::{Colors, GlobalColors},
+ div, px, size,
};
use log::LevelFilter;
use project::Project;
@@ -68,6 +70,8 @@ fn main() {
gpui::Application::new().with_assets(Assets).run(move |cx| {
load_embedded_fonts(cx).unwrap();
+ cx.set_global(GlobalColors(Arc::new(Colors::default())));
+
let http_client = ReqwestClient::user_agent("zed_storybook").unwrap();
cx.set_http_client(Arc::new(http_client));
@@ -25,7 +25,13 @@ use std::{path::PathBuf, process::Stdio, sync::Arc};
use ui::prelude::*;
use util::ResultExt;
-actions!(supermaven, [SignOut]);
+actions!(
+ supermaven,
+ [
+ /// Signs out of Supermaven.
+ SignOut
+ ]
+);
pub fn init(client: Arc<Client>, cx: &mut App) {
let supermaven = cx.new(|_| Supermaven::Starting);
@@ -5,7 +5,14 @@ pub mod svg_preview_view;
actions!(
svg,
- [OpenPreview, OpenPreviewToTheSide, OpenFollowingPreview]
+ [
+ /// Opens an SVG preview for the current file.
+ OpenPreview,
+ /// Opens an SVG preview in a split pane.
+ OpenPreviewToTheSide,
+ /// Opens a following SVG preview that syncs with the editor.
+ OpenFollowingPreview
+ ]
);
pub fn init(cx: &mut App) {
@@ -25,6 +25,7 @@ use workspace::{
const PANEL_WIDTH_REMS: f32 = 28.;
+/// Toggles the tab switcher interface.
#[derive(PartialEq, Clone, Deserialize, JsonSchema, Default, Action)]
#[action(namespace = tab_switcher)]
#[serde(deny_unknown_fields)]
@@ -32,7 +33,15 @@ pub struct Toggle {
#[serde(default)]
pub select_last: bool,
}
-actions!(tab_switcher, [CloseSelectedItem, ToggleAll]);
+actions!(
+ tab_switcher,
+ [
+ /// Closes the selected item in the tab switcher.
+ CloseSelectedItem,
+ /// Toggles between showing all tabs or just the current pane's tabs.
+ ToggleAll
+ ]
+);
pub struct TabSwitcher {
picker: Entity<Picker<TabSwitcherDelegate>>,
@@ -11,6 +11,10 @@ test-support = [
"util/test-support"
]
+[lib]
+path = "src/task.rs"
+doctest = false
+
[lints]
workspace = true
@@ -1,10 +1,8 @@
-use anyhow::Result;
use gpui::SharedString;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use serde_json::json;
-/// Represents a schema for a specific adapter
+/// JSON schema for a specific adapter
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
pub struct AdapterSchema {
/// The adapter name identifier
@@ -16,47 +14,3 @@ pub struct AdapterSchema {
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
#[serde(transparent)]
pub struct AdapterSchemas(pub Vec<AdapterSchema>);
-
-impl AdapterSchemas {
- pub fn generate_json_schema(&self) -> Result<serde_json_lenient::Value> {
- let adapter_conditions = self
- .0
- .iter()
- .map(|adapter_schema| {
- let adapter_name = adapter_schema.adapter.to_string();
- json!({
- "if": {
- "properties": {
- "adapter": { "const": adapter_name }
- }
- },
- "then": adapter_schema.schema
- })
- })
- .collect::<Vec<_>>();
-
- let schema = serde_json_lenient::json!({
- "$schema": "http://json-schema.org/draft-07/schema#",
- "title": "Debug Adapter Configurations",
- "description": "Configuration for debug adapters. Schema changes based on the selected adapter.",
- "type": "array",
- "items": {
- "type": "object",
- "required": ["adapter", "label"],
- "properties": {
- "adapter": {
- "type": "string",
- "description": "The name of the debug adapter"
- },
- "label": {
- "type": "string",
- "description": "The name of the debug configuration"
- },
- },
- "allOf": adapter_conditions
- }
- });
-
- Ok(serde_json_lenient::to_value(schema)?)
- }
-}
@@ -6,7 +6,7 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::net::Ipv4Addr;
use std::path::PathBuf;
-use util::debug_panic;
+use util::{debug_panic, schemars::add_new_subschema};
use crate::{TaskTemplate, adapter_schema::AdapterSchemas};
@@ -243,7 +243,7 @@ pub enum Request {
Attach,
}
-/// This struct represent a user created debug task from the new session modal
+/// This struct represent a user created debug task from the new process modal
#[derive(Deserialize, Serialize, PartialEq, Eq, Clone, Debug, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub struct ZedDebugConfig {
@@ -286,10 +286,10 @@ pub struct DebugScenario {
pub struct DebugTaskFile(pub Vec<DebugScenario>);
impl DebugTaskFile {
- pub fn generate_json_schema(schemas: &AdapterSchemas) -> serde_json_lenient::Value {
- let build_task_schema = schemars::schema_for!(BuildTaskDefinition);
- let mut build_task_value =
- serde_json_lenient::to_value(&build_task_schema).unwrap_or_default();
+ pub fn generate_json_schema(schemas: &AdapterSchemas) -> serde_json::Value {
+ let mut generator = schemars::generate::SchemaSettings::draft2019_09().into_generator();
+
+ let mut build_task_value = BuildTaskDefinition::json_schema(&mut generator).to_value();
if let Some(template_object) = build_task_value
.get_mut("anyOf")
@@ -300,7 +300,12 @@ impl DebugTaskFile {
.get_mut("properties")
.and_then(|value| value.as_object_mut())
{
- properties.remove("label");
+ if properties.remove("label").is_none() {
+ debug_panic!(
+ "Generated TaskTemplate json schema did not have expected 'label' field. \
+ Schema of 2nd alternative is: {template_object:?}"
+ );
+ }
}
if let Some(arr) = template_object
@@ -310,38 +315,60 @@ impl DebugTaskFile {
arr.retain(|v| v.as_str() != Some("label"));
}
} else {
- debug_panic!("Task Template schema in debug scenario's needs to be updated");
+ debug_panic!(
+ "Generated TaskTemplate json schema did not match expectations. \
+ Schema is: {build_task_value:?}"
+ );
}
- let task_definitions = build_task_value
- .get("definitions")
- .cloned()
- .unwrap_or_default();
-
let adapter_conditions = schemas
.0
.iter()
.map(|adapter_schema| {
let adapter_name = adapter_schema.adapter.to_string();
- serde_json::json!({
- "if": {
- "properties": {
- "adapter": { "const": adapter_name }
- }
- },
- "then": adapter_schema.schema
- })
+ add_new_subschema(
+ &mut generator,
+ &format!("{adapter_name}DebugSettings"),
+ serde_json::json!({
+ "if": {
+ "properties": {
+ "adapter": { "const": adapter_name }
+ }
+ },
+ "then": adapter_schema.schema
+ }),
+ )
})
.collect::<Vec<_>>();
- serde_json_lenient::json!({
- "$schema": "http://json-schema.org/draft-07/schema#",
+ let build_task_definition_ref = add_new_subschema(
+ &mut generator,
+ BuildTaskDefinition::schema_name().as_ref(),
+ build_task_value,
+ );
+
+ let meta_schema = generator
+ .settings()
+ .meta_schema
+ .as_ref()
+ .expect("meta_schema should be present in schemars settings")
+ .to_string();
+
+ serde_json::json!({
+ "$schema": meta_schema,
"title": "Debug Configurations",
"description": "Configuration for debug scenarios",
"type": "array",
"items": {
"type": "object",
"required": ["adapter", "label"],
+ // TODO: Uncommenting this will cause json-language-server to provide warnings for
+ // unrecognized properties. It should be enabled if/when there's an adapter JSON
+ // schema that's comprehensive. In order to not get warnings for the other schemas,
+ // `additionalProperties` or `unevaluatedProperties` (to handle "allOf" etc style
+ // schema combinations) could be set to `true` for that schema.
+ //
+ // "unevaluatedProperties": false,
"properties": {
"adapter": {
"type": "string",
@@ -351,7 +378,7 @@ impl DebugTaskFile {
"type": "string",
"description": "The name of the debug configuration"
},
- "build": build_task_value,
+ "build": build_task_definition_ref,
"tcp_connection": {
"type": "object",
"description": "Optional TCP connection information for connecting to an already running debug adapter",
@@ -374,7 +401,7 @@ impl DebugTaskFile {
},
"allOf": adapter_conditions
},
- "definitions": task_definitions
+ "$defs": generator.take_definitions(true),
})
}
}
@@ -1,33 +1,6 @@
-use schemars::{
- SchemaGenerator,
- schema::{ArrayValidation, InstanceType, Schema, SchemaObject, SingleOrVec, StringValidation},
-};
use serde::de::{self, Deserializer, Visitor};
use std::fmt;
-/// Generates a JSON schema for a non-empty string array.
-pub fn non_empty_string_vec_json_schema(_: &mut SchemaGenerator) -> Schema {
- Schema::Object(SchemaObject {
- instance_type: Some(InstanceType::Array.into()),
- array: Some(Box::new(ArrayValidation {
- unique_items: Some(true),
- items: Some(SingleOrVec::Single(Box::new(Schema::Object(
- SchemaObject {
- instance_type: Some(InstanceType::String.into()),
- string: Some(Box::new(StringValidation {
- min_length: Some(1), // Ensures string in the array is non-empty
- ..Default::default()
- })),
- ..Default::default()
- },
- )))),
- ..Default::default()
- })),
- format: Some("vec-of-non-empty-strings".to_string()), // Use a custom format keyword
- ..Default::default()
- })
-}
-
/// Deserializes a non-empty string array.
pub fn non_empty_string_vec<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
where
@@ -0,0 +1,172 @@
+use crate::Shell;
+
+#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)]
+enum ShellKind {
+ #[default]
+ Posix,
+ Powershell,
+ Cmd,
+}
+
+impl ShellKind {
+ fn new(program: &str) -> Self {
+ if program == "powershell"
+ || program.ends_with("powershell.exe")
+ || program == "pwsh"
+ || program.ends_with("pwsh.exe")
+ {
+ ShellKind::Powershell
+ } else if program == "cmd" || program.ends_with("cmd.exe") {
+ ShellKind::Cmd
+ } else {
+ // Someother shell detected, the user might install and use a
+ // unix-like shell.
+ ShellKind::Posix
+ }
+ }
+
+ fn to_shell_variable(&self, input: &str) -> String {
+ match self {
+ Self::Powershell => Self::to_powershell_variable(input),
+ Self::Cmd => Self::to_cmd_variable(input),
+ Self::Posix => input.to_owned(),
+ }
+ }
+
+ fn to_cmd_variable(input: &str) -> String {
+ if let Some(var_str) = input.strip_prefix("${") {
+ if var_str.find(':').is_none() {
+ // If the input starts with "${", remove the trailing "}"
+ format!("%{}%", &var_str[..var_str.len() - 1])
+ } else {
+ // `${SOME_VAR:-SOME_DEFAULT}`, we currently do not handle this situation,
+ // which will result in the task failing to run in such cases.
+ input.into()
+ }
+ } else if let Some(var_str) = input.strip_prefix('$') {
+ // If the input starts with "$", directly append to "$env:"
+ format!("%{}%", var_str)
+ } else {
+ // If no prefix is found, return the input as is
+ input.into()
+ }
+ }
+ fn to_powershell_variable(input: &str) -> String {
+ if let Some(var_str) = input.strip_prefix("${") {
+ if var_str.find(':').is_none() {
+ // If the input starts with "${", remove the trailing "}"
+ format!("$env:{}", &var_str[..var_str.len() - 1])
+ } else {
+ // `${SOME_VAR:-SOME_DEFAULT}`, we currently do not handle this situation,
+ // which will result in the task failing to run in such cases.
+ input.into()
+ }
+ } else if let Some(var_str) = input.strip_prefix('$') {
+ // If the input starts with "$", directly append to "$env:"
+ format!("$env:{}", var_str)
+ } else {
+ // If no prefix is found, return the input as is
+ input.into()
+ }
+ }
+
+ fn args_for_shell(&self, interactive: bool, combined_command: String) -> Vec<String> {
+ match self {
+ ShellKind::Powershell => vec!["-C".to_owned(), combined_command],
+ ShellKind::Cmd => vec!["/C".to_owned(), combined_command],
+ ShellKind::Posix => interactive
+ .then(|| "-i".to_owned())
+ .into_iter()
+ .chain(["-c".to_owned(), combined_command])
+ .collect(),
+ }
+ }
+}
+
+fn system_shell() -> String {
+ if cfg!(target_os = "windows") {
+ // `alacritty_terminal` uses this as default on Windows. See:
+ // https://github.com/alacritty/alacritty/blob/0d4ab7bca43213d96ddfe40048fc0f922543c6f8/alacritty_terminal/src/tty/windows/mod.rs#L130
+ // We could use `util::get_windows_system_shell()` here, but we are running tasks here, so leave it to `powershell.exe`
+ // should be okay.
+ "powershell.exe".to_string()
+ } else {
+ std::env::var("SHELL").unwrap_or("/bin/sh".to_string())
+ }
+}
+
+/// ShellBuilder is used to turn a user-requested task into a
+/// program that can be executed by the shell.
+pub struct ShellBuilder {
+ /// The shell to run
+ program: String,
+ args: Vec<String>,
+ interactive: bool,
+ kind: ShellKind,
+}
+
+pub static DEFAULT_REMOTE_SHELL: &str = "\"${SHELL:-sh}\"";
+
+impl ShellBuilder {
+ /// Create a new ShellBuilder as configured.
+ pub fn new(is_local: bool, shell: &Shell) -> Self {
+ let (program, args) = match shell {
+ Shell::System => {
+ if is_local {
+ (system_shell(), Vec::new())
+ } else {
+ (DEFAULT_REMOTE_SHELL.to_string(), Vec::new())
+ }
+ }
+ Shell::Program(shell) => (shell.clone(), Vec::new()),
+ Shell::WithArguments { program, args, .. } => (program.clone(), args.clone()),
+ };
+ let kind = ShellKind::new(&program);
+ Self {
+ program,
+ args,
+ interactive: true,
+ kind,
+ }
+ }
+ pub fn non_interactive(mut self) -> Self {
+ self.interactive = false;
+ self
+ }
+ /// Returns the label to show in the terminal tab
+ pub fn command_label(&self, command_label: &str) -> String {
+ match self.kind {
+ ShellKind::Powershell => {
+ format!("{} -C '{}'", self.program, command_label)
+ }
+ ShellKind::Cmd => {
+ format!("{} /C '{}'", self.program, command_label)
+ }
+ ShellKind::Posix => {
+ let interactivity = self.interactive.then_some("-i ").unwrap_or_default();
+ format!("{} {interactivity}-c '{}'", self.program, command_label)
+ }
+ }
+ }
+ /// Returns the program and arguments to run this task in a shell.
+ pub fn build(
+ mut self,
+ task_command: Option<String>,
+ task_args: &Vec<String>,
+ ) -> (String, Vec<String>) {
+ if let Some(task_command) = task_command {
+ let combined_command = task_args
+ .into_iter()
+ .fold(task_command, |mut command, arg| {
+ command.push(' ');
+ command.push_str(&self.kind.to_shell_variable(arg));
+ command
+ });
+
+ self.args
+ .extend(self.kind.args_for_shell(self.interactive, combined_command));
+ }
+
+ (self.program, self.args)
+ }
+}
@@ -3,6 +3,7 @@
mod adapter_schema;
mod debug_format;
mod serde_helpers;
+mod shell_builder;
pub mod static_source;
mod task_template;
mod vscode_debug_format;
@@ -21,6 +22,7 @@ pub use debug_format::{
AttachRequest, BuildTaskDefinition, DebugRequest, DebugScenario, DebugTaskFile, LaunchRequest,
Request, TcpArgumentsTemplate, ZedDebugConfig,
};
+pub use shell_builder::{DEFAULT_REMOTE_SHELL, ShellBuilder};
pub use task_template::{
DebugArgsRequest, HideStrategy, RevealStrategy, TaskTemplate, TaskTemplates,
substitute_variables_in_map, substitute_variables_in_str,
@@ -44,7 +46,7 @@ pub struct SpawnInTerminal {
/// Human readable name of the terminal tab.
pub label: String,
/// Executable command to spawn.
- pub command: String,
+ pub command: Option<String>,
/// Arguments to the command, potentially unsubstituted,
/// to let the shell that spawns the command to do the substitution, if needed.
pub args: Vec<String>,
@@ -334,191 +336,6 @@ pub enum Shell {
},
}
-#[cfg(target_os = "windows")]
-#[derive(Debug, Clone, Copy, PartialEq, Eq)]
-enum WindowsShellType {
- Powershell,
- Cmd,
- Other,
-}
-
-/// ShellBuilder is used to turn a user-requested task into a
-/// program that can be executed by the shell.
-pub struct ShellBuilder {
- program: String,
- args: Vec<String>,
- interactive: bool,
-}
-
-pub static DEFAULT_REMOTE_SHELL: &str = "\"${SHELL:-sh}\"";
-
-impl ShellBuilder {
- /// Create a new ShellBuilder as configured.
- pub fn new(is_local: bool, shell: &Shell) -> Self {
- let (program, args) = match shell {
- Shell::System => {
- if is_local {
- (Self::system_shell(), Vec::new())
- } else {
- (DEFAULT_REMOTE_SHELL.to_string(), Vec::new())
- }
- }
- Shell::Program(shell) => (shell.clone(), Vec::new()),
- Shell::WithArguments { program, args, .. } => (program.clone(), args.clone()),
- };
- Self {
- program,
- args,
- interactive: true,
- }
- }
- pub fn non_interactive(mut self) -> Self {
- self.interactive = false;
- self
- }
-}
-
-#[cfg(not(target_os = "windows"))]
-impl ShellBuilder {
- /// Returns the label to show in the terminal tab
- pub fn command_label(&self, command_label: &str) -> String {
- let interactivity = self.interactive.then_some("-i ").unwrap_or_default();
- format!("{} {interactivity}-c '{}'", self.program, command_label)
- }
-
- /// Returns the program and arguments to run this task in a shell.
- pub fn build(mut self, task_command: String, task_args: &Vec<String>) -> (String, Vec<String>) {
- let combined_command = task_args
- .into_iter()
- .fold(task_command, |mut command, arg| {
- command.push(' ');
- command.push_str(&arg);
- command
- });
- self.args.extend(
- self.interactive
- .then(|| "-i".to_owned())
- .into_iter()
- .chain(["-c".to_owned(), combined_command]),
- );
-
- (self.program, self.args)
- }
-
- fn system_shell() -> String {
- std::env::var("SHELL").unwrap_or("/bin/sh".to_string())
- }
-}
-
-#[cfg(target_os = "windows")]
-impl ShellBuilder {
- /// Returns the label to show in the terminal tab
- pub fn command_label(&self, command_label: &str) -> String {
- match self.windows_shell_type() {
- WindowsShellType::Powershell => {
- format!("{} -C '{}'", self.program, command_label)
- }
- WindowsShellType::Cmd => {
- format!("{} /C '{}'", self.program, command_label)
- }
- WindowsShellType::Other => {
- format!("{} -i -c '{}'", self.program, command_label)
- }
- }
- }
-
- /// Returns the program and arguments to run this task in a shell.
- pub fn build(mut self, task_command: String, task_args: &Vec<String>) -> (String, Vec<String>) {
- let combined_command = task_args
- .into_iter()
- .fold(task_command, |mut command, arg| {
- command.push(' ');
- command.push_str(&self.to_windows_shell_variable(arg.to_string()));
- command
- });
-
- match self.windows_shell_type() {
- WindowsShellType::Powershell => self.args.extend(["-C".to_owned(), combined_command]),
- WindowsShellType::Cmd => self.args.extend(["/C".to_owned(), combined_command]),
- WindowsShellType::Other => {
- self.args
- .extend(["-i".to_owned(), "-c".to_owned(), combined_command])
- }
- }
-
- (self.program, self.args)
- }
- fn windows_shell_type(&self) -> WindowsShellType {
- if self.program == "powershell"
- || self.program.ends_with("powershell.exe")
- || self.program == "pwsh"
- || self.program.ends_with("pwsh.exe")
- {
- WindowsShellType::Powershell
- } else if self.program == "cmd" || self.program.ends_with("cmd.exe") {
- WindowsShellType::Cmd
- } else {
- // Someother shell detected, the user might install and use a
- // unix-like shell.
- WindowsShellType::Other
- }
- }
-
- // `alacritty_terminal` uses this as default on Windows. See:
- // https://github.com/alacritty/alacritty/blob/0d4ab7bca43213d96ddfe40048fc0f922543c6f8/alacritty_terminal/src/tty/windows/mod.rs#L130
- // We could use `util::get_windows_system_shell()` here, but we are running tasks here, so leave it to `powershell.exe`
- // should be okay.
- fn system_shell() -> String {
- "powershell.exe".to_string()
- }
-
- fn to_windows_shell_variable(&self, input: String) -> String {
- match self.windows_shell_type() {
- WindowsShellType::Powershell => Self::to_powershell_variable(input),
- WindowsShellType::Cmd => Self::to_cmd_variable(input),
- WindowsShellType::Other => input,
- }
- }
-
- fn to_cmd_variable(input: String) -> String {
- if let Some(var_str) = input.strip_prefix("${") {
- if var_str.find(':').is_none() {
- // If the input starts with "${", remove the trailing "}"
- format!("%{}%", &var_str[..var_str.len() - 1])
- } else {
- // `${SOME_VAR:-SOME_DEFAULT}`, we currently do not handle this situation,
- // which will result in the task failing to run in such cases.
- input
- }
- } else if let Some(var_str) = input.strip_prefix('$') {
- // If the input starts with "$", directly append to "$env:"
- format!("%{}%", var_str)
- } else {
- // If no prefix is found, return the input as is
- input
- }
- }
-
- fn to_powershell_variable(input: String) -> String {
- if let Some(var_str) = input.strip_prefix("${") {
- if var_str.find(':').is_none() {
- // If the input starts with "${", remove the trailing "}"
- format!("$env:{}", &var_str[..var_str.len() - 1])
- } else {
- // `${SOME_VAR:-SOME_DEFAULT}`, we currently do not handle this situation,
- // which will result in the task failing to run in such cases.
- input
- }
- } else if let Some(var_str) = input.strip_prefix('$') {
- // If the input starts with "$", directly append to "$env:"
- format!("$env:{}", var_str)
- } else {
- // If no prefix is found, return the input as is
- input
- }
- }
-}
-
type VsCodeEnvVariable = String;
type ZedEnvVariable = String;
@@ -1,16 +1,16 @@
use anyhow::{Context as _, bail};
use collections::{HashMap, HashSet};
-use schemars::{JsonSchema, r#gen::SchemaSettings};
+use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
use std::path::PathBuf;
+use util::schemars::DefaultDenyUnknownFields;
use util::serde::default_true;
use util::{ResultExt, truncate_and_remove_front};
use crate::{
AttachRequest, ResolvedTask, RevealTarget, Shell, SpawnInTerminal, TaskContext, TaskId,
- VariableName, ZED_VARIABLE_NAME_PREFIX,
- serde_helpers::{non_empty_string_vec, non_empty_string_vec_json_schema},
+ VariableName, ZED_VARIABLE_NAME_PREFIX, serde_helpers::non_empty_string_vec,
};
/// A template definition of a Zed task to run.
@@ -61,7 +61,7 @@ pub struct TaskTemplate {
/// Represents the tags which this template attaches to.
/// Adding this removes this task from other UI and gives you ability to run it by tag.
#[serde(default, deserialize_with = "non_empty_string_vec")]
- #[schemars(schema_with = "non_empty_string_vec_json_schema")]
+ #[schemars(length(min = 1))]
pub tags: Vec<String>,
/// Which shell to use when spawning the task.
#[serde(default)]
@@ -116,10 +116,10 @@ pub struct TaskTemplates(pub Vec<TaskTemplate>);
impl TaskTemplates {
/// Generates JSON schema of Tasks JSON template format.
pub fn generate_json_schema() -> serde_json_lenient::Value {
- let schema = SchemaSettings::draft07()
- .with(|settings| settings.option_add_null_type = false)
+ let schema = schemars::generate::SchemaSettings::draft2019_09()
+ .with_transform(DefaultDenyUnknownFields)
.into_generator()
- .into_root_schema_for::<Self>();
+ .root_schema_for::<Self>();
serde_json_lenient::to_value(schema).unwrap()
}
@@ -255,7 +255,7 @@ impl TaskTemplate {
command_label
},
),
- command,
+ command: Some(command),
args: self.args.clone(),
env,
use_new_terminal: self.use_new_terminal,
@@ -635,7 +635,7 @@ mod tests {
"Human-readable label should have long substitutions trimmed"
);
assert_eq!(
- spawn_in_terminal.command,
+ spawn_in_terminal.command.clone().unwrap(),
format!("echo test_file {long_value}"),
"Command should be substituted with variables and those should not be shortened"
);
@@ -652,7 +652,7 @@ mod tests {
spawn_in_terminal.command_label,
format!(
"{} arg1 test_selected_text arg2 5678 arg3 {long_value}",
- spawn_in_terminal.command
+ spawn_in_terminal.command.clone().unwrap()
),
"Command label args should be substituted with variables and those should not be shortened"
);
@@ -711,7 +711,7 @@ mod tests {
assert_substituted_variables(&resolved_task, Vec::new());
let resolved = resolved_task.resolved;
assert_eq!(resolved.label, task.label);
- assert_eq!(resolved.command, task.command);
+ assert_eq!(resolved.command, Some(task.command));
assert_eq!(resolved.args, task.args);
}
@@ -93,7 +93,7 @@ fn task_type_to_adapter_name(task_type: &str) -> String {
"php" => "PHP",
"cppdbg" | "lldb" => "CodeLLDB",
"debugpy" => "Debugpy",
- "rdbg" => "Ruby",
+ "rdbg" => "rdbg",
_ => task_type,
}
.to_owned()
@@ -47,7 +47,10 @@ impl VsCodeTaskDefinition {
replacer: &EnvVariableReplacer,
) -> anyhow::Result<Option<TaskTemplate>> {
if self.other_attributes.contains_key("dependsOn") {
- log::warn!("Skipping deserializing of a task with the unsupported `dependsOn` key");
+ log::warn!(
+ "Skipping deserializing of a task `{}` with the unsupported `dependsOn` key",
+ self.label
+ );
return Ok(None);
}
// `type` might not be set in e.g. tasks that use `dependsOn`; we still want to deserialize the whole object though (hence command is an Option),
@@ -56,7 +56,7 @@ pub fn to_esc_str(
("tab", AlacModifiers::None) => Some("\x09"),
("escape", AlacModifiers::None) => Some("\x1b"),
("enter", AlacModifiers::None) => Some("\x0d"),
- ("enter", AlacModifiers::Shift) => Some("\x0d"),
+ ("enter", AlacModifiers::Shift) => Some("\x0a"),
("enter", AlacModifiers::Alt) => Some("\x1b\x0d"),
("backspace", AlacModifiers::None) => Some("\x7f"),
//Interesting escape codes
@@ -406,6 +406,22 @@ mod test {
}
}
+ #[test]
+ fn test_shift_enter_newline() {
+ let shift_enter = Keystroke::parse("shift-enter").unwrap();
+ let regular_enter = Keystroke::parse("enter").unwrap();
+ let mode = TermMode::NONE;
+
+ // Shift-enter should send line feed (newline)
+ assert_eq!(to_esc_str(&shift_enter, &mode, false), Some("\x0a".into()));
+
+ // Regular enter should still send carriage return
+ assert_eq!(
+ to_esc_str(®ular_enter, &mode, false),
+ Some("\x0d".into())
+ );
+ }
+
#[test]
fn test_modifier_code_calc() {
// Code Modifiers
@@ -73,18 +73,36 @@ use crate::mappings::{colors::to_alac_rgb, keys::to_esc_str};
actions!(
terminal,
[
+ /// Clears the terminal screen.
Clear,
+ /// Copies selected text to the clipboard.
Copy,
+ /// Pastes from the clipboard.
Paste,
+ /// Shows the character palette for special characters.
ShowCharacterPalette,
+ /// Searches for text in the terminal.
SearchTest,
+ /// Scrolls up by one line.
ScrollLineUp,
+ /// Scrolls down by one line.
ScrollLineDown,
+ /// Scrolls up by one page.
ScrollPageUp,
+ /// Scrolls down by one page.
ScrollPageDown,
+ /// Scrolls up by half a page.
+ ScrollHalfPageUp,
+ /// Scrolls down by half a page.
+ ScrollHalfPageDown,
+ /// Scrolls to the top of the terminal buffer.
ScrollToTop,
+ /// Scrolls to the bottom of the terminal buffer.
ScrollToBottom,
+ /// Toggles vi mode in the terminal.
ToggleViMode,
+ /// Selects all text in the terminal.
+ SelectAll,
]
);
@@ -880,7 +898,13 @@ impl Terminal {
InternalEvent::Copy => {
if let Some(txt) = term.selection_to_string() {
- cx.write_to_clipboard(ClipboardItem::new_string(txt))
+ cx.write_to_clipboard(ClipboardItem::new_string(txt));
+
+ let settings = TerminalSettings::get_global(cx);
+
+ if !settings.keep_selection_on_copy {
+ self.events.push_back(InternalEvent::SetSelection(None));
+ }
}
}
InternalEvent::ScrollToAlacPoint(point) => {
@@ -2,14 +2,14 @@ use alacritty_terminal::vte::ansi::{
CursorShape as AlacCursorShape, CursorStyle as AlacCursorStyle,
};
use collections::HashMap;
-use gpui::{
- AbsoluteLength, App, FontFallbacks, FontFeatures, FontWeight, Pixels, SharedString, px,
-};
-use schemars::{JsonSchema, r#gen::SchemaGenerator, schema::RootSchema};
+use gpui::{AbsoluteLength, App, FontFallbacks, FontFeatures, FontWeight, Pixels, px};
+use schemars::JsonSchema;
use serde_derive::{Deserialize, Serialize};
-use settings::{SettingsJsonSchemaParams, SettingsSources, add_references_to_properties};
+
+use settings::SettingsSources;
use std::path::PathBuf;
use task::Shell;
+use theme::FontFamilyName;
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
@@ -29,7 +29,7 @@ pub struct TerminalSettings {
pub shell: Shell,
pub working_directory: WorkingDirectory,
pub font_size: Option<Pixels>,
- pub font_family: Option<SharedString>,
+ pub font_family: Option<FontFamilyName>,
pub font_fallbacks: Option<FontFallbacks>,
pub font_features: Option<FontFeatures>,
pub font_weight: Option<FontWeight>,
@@ -40,6 +40,7 @@ pub struct TerminalSettings {
pub alternate_scroll: AlternateScroll,
pub option_as_meta: bool,
pub copy_on_select: bool,
+ pub keep_selection_on_copy: bool,
pub button: bool,
pub dock: TerminalDockPosition,
pub default_width: Pixels,
@@ -147,13 +148,14 @@ pub struct TerminalSettingsContent {
///
/// If this option is not included,
/// the terminal will default to matching the buffer's font family.
- pub font_family: Option<String>,
+ pub font_family: Option<FontFamilyName>,
/// Sets the terminal's font fallbacks.
///
/// If this option is not included,
/// the terminal will default to matching the buffer's font fallbacks.
- pub font_fallbacks: Option<Vec<String>>,
+ #[schemars(extend("uniqueItems" = true))]
+ pub font_fallbacks: Option<Vec<FontFamilyName>>,
/// Sets the terminal's line height.
///
@@ -192,6 +194,10 @@ pub struct TerminalSettingsContent {
///
/// Default: false
pub copy_on_select: Option<bool>,
+ /// Whether to keep the text selection after copying it to the clipboard.
+ ///
+ /// Default: false
+ pub keep_selection_on_copy: Option<bool>,
/// Whether to show the terminal button in the status bar.
///
/// Default: true
@@ -234,33 +240,13 @@ impl settings::Settings for TerminalSettings {
sources.json_merge()
}
- fn json_schema(
- generator: &mut SchemaGenerator,
- params: &SettingsJsonSchemaParams,
- _: &App,
- ) -> RootSchema {
- let mut root_schema = generator.root_schema_for::<Self::FileContent>();
- root_schema.definitions.extend([
- ("FontFamilies".into(), params.font_family_schema()),
- ("FontFallbacks".into(), params.font_fallback_schema()),
- ]);
-
- add_references_to_properties(
- &mut root_schema,
- &[
- ("font_family", "#/definitions/FontFamilies"),
- ("font_fallbacks", "#/definitions/FontFallbacks"),
- ],
- );
-
- root_schema
- }
-
fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut Self::FileContent) {
let name = |s| format!("terminal.integrated.{s}");
vscode.f32_setting(&name("fontSize"), &mut current.font_size);
- vscode.string_setting(&name("fontFamily"), &mut current.font_family);
+ if let Some(font_family) = vscode.read_string(&name("fontFamily")) {
+ current.font_family = Some(FontFamilyName(font_family.into()));
+ }
vscode.bool_setting(&name("copyOnSelection"), &mut current.copy_on_select);
vscode.bool_setting("macOptionIsMeta", &mut current.option_as_meta);
vscode.usize_setting("scrollback", &mut current.max_scroll_history_lines);
@@ -196,7 +196,6 @@ impl TerminalElement {
interactivity: Default::default(),
}
.track_focus(&focus)
- .element
}
//Vec<Range<AlacPoint>> -> Clip out the parts of the ranges
@@ -682,11 +681,10 @@ impl Element for TerminalElement {
let terminal_settings = TerminalSettings::get_global(cx);
- let font_family = terminal_settings
- .font_family
- .as_ref()
- .unwrap_or(&settings.buffer_font.family)
- .clone();
+ let font_family = terminal_settings.font_family.as_ref().map_or_else(
+ || settings.buffer_font.family.clone(),
+ |font_family| font_family.0.clone().into(),
+ );
let font_fallbacks = terminal_settings
.font_fallbacks
@@ -46,7 +46,13 @@ use zed_actions::assistant::InlineAssist;
const TERMINAL_PANEL_KEY: &str = "TerminalPanel";
-actions!(terminal_panel, [ToggleFocus]);
+actions!(
+ terminal_panel,
+ [
+ /// Toggles focus on the terminal panel.
+ ToggleFocus
+ ]
+);
pub fn init(cx: &mut App) {
cx.observe_new(
@@ -499,7 +505,7 @@ impl TerminalPanel {
let task = SpawnInTerminal {
command_label,
- command,
+ command: Some(command),
args,
..task.clone()
};
@@ -70,15 +70,23 @@ const GIT_DIFF_PATH_PREFIXES: &[&str] = &["a", "b"];
#[derive(Clone, Debug, PartialEq)]
pub struct ScrollTerminal(pub i32);
+/// Sends the specified text directly to the terminal.
#[derive(Clone, Debug, Default, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = terminal)]
pub struct SendText(String);
+/// Sends a keystroke sequence to the terminal.
#[derive(Clone, Debug, Default, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = terminal)]
pub struct SendKeystroke(String);
-actions!(terminal, [RerunTask]);
+actions!(
+ terminal,
+ [
+ /// Reruns the last executed task in the terminal.
+ RerunTask
+ ]
+);
pub fn init(cx: &mut App) {
assistant_slash_command::init(cx);
@@ -815,6 +823,11 @@ impl TerminalView {
};
dispatch_context.set("mouse_format", format);
};
+
+ if self.terminal.read(cx).last_content.selection.is_some() {
+ dispatch_context.add("selection");
+ }
+
dispatch_context
}
@@ -24,6 +24,7 @@ fs.workspace = true
futures.workspace = true
gpui.workspace = true
indexmap.workspace = true
+inventory.workspace = true
log.workspace = true
palette = { workspace = true, default-features = false, features = ["std"] }
parking_lot.workspace = true
@@ -4,12 +4,11 @@ use anyhow::Result;
use gpui::{FontStyle, FontWeight, HighlightStyle, Hsla, WindowBackgroundAppearance};
use indexmap::IndexMap;
use palette::FromColor;
-use schemars::JsonSchema;
-use schemars::r#gen::SchemaGenerator;
-use schemars::schema::{Schema, SchemaObject};
+use schemars::{JsonSchema, json_schema};
use serde::{Deserialize, Deserializer, Serialize};
use serde_json::Value;
use serde_repr::{Deserialize_repr, Serialize_repr};
+use std::borrow::Cow;
use crate::{StatusColorsRefinement, ThemeColorsRefinement};
@@ -1502,30 +1501,15 @@ pub enum FontWeightContent {
}
impl JsonSchema for FontWeightContent {
- fn schema_name() -> String {
- "FontWeightContent".to_owned()
+ fn schema_name() -> Cow<'static, str> {
+ "FontWeightContent".into()
}
- fn is_referenceable() -> bool {
- false
- }
-
- fn json_schema(_: &mut SchemaGenerator) -> Schema {
- SchemaObject {
- enum_values: Some(vec![
- 100.into(),
- 200.into(),
- 300.into(),
- 400.into(),
- 500.into(),
- 600.into(),
- 700.into(),
- 800.into(),
- 900.into(),
- ]),
- ..Default::default()
- }
- .into()
+ fn json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema {
+ json_schema!({
+ "type": "integer",
+ "enum": [100, 200, 300, 400, 500, 600, 700, 800, 900]
+ })
}
}
@@ -7,19 +7,15 @@ use anyhow::Result;
use derive_more::{Deref, DerefMut};
use gpui::{
App, Context, Font, FontFallbacks, FontFeatures, FontStyle, FontWeight, Global, Pixels,
- Subscription, Window, px,
+ SharedString, Subscription, Window, px,
};
use refineable::Refineable;
-use schemars::{
- JsonSchema,
- r#gen::SchemaGenerator,
- schema::{InstanceType, Schema, SchemaObject},
-};
+use schemars::{JsonSchema, json_schema};
use serde::{Deserialize, Serialize};
-use serde_json::Value;
-use settings::{Settings, SettingsJsonSchemaParams, SettingsSources, add_references_to_properties};
+use settings::{ParameterizedJsonSchema, Settings, SettingsSources};
use std::sync::Arc;
use util::ResultExt as _;
+use util::schemars::replace_subschema;
const MIN_FONT_SIZE: Pixels = px(6.0);
const MIN_LINE_HEIGHT: f32 = 1.0;
@@ -263,25 +259,19 @@ impl Global for AgentFontSize {}
#[serde(untagged)]
pub enum ThemeSelection {
/// A static theme selection, represented by a single theme name.
- Static(#[schemars(schema_with = "theme_name_ref")] String),
+ Static(ThemeName),
/// A dynamic theme selection, which can change based the [ThemeMode].
Dynamic {
/// The mode used to determine which theme to use.
#[serde(default)]
mode: ThemeMode,
/// The theme to use for light mode.
- #[schemars(schema_with = "theme_name_ref")]
- light: String,
+ light: ThemeName,
/// The theme to use for dark mode.
- #[schemars(schema_with = "theme_name_ref")]
- dark: String,
+ dark: ThemeName,
},
}
-fn theme_name_ref(_: &mut SchemaGenerator) -> Schema {
- Schema::new_ref("#/definitions/ThemeName".into())
-}
-
// TODO: Rename ThemeMode -> ThemeAppearanceMode
/// The mode use to select a theme.
///
@@ -306,13 +296,13 @@ impl ThemeSelection {
/// Returns the theme name for the selected [ThemeMode].
pub fn theme(&self, system_appearance: Appearance) -> &str {
match self {
- Self::Static(theme) => theme,
+ Self::Static(theme) => &theme.0,
Self::Dynamic { mode, light, dark } => match mode {
- ThemeMode::Light => light,
- ThemeMode::Dark => dark,
+ ThemeMode::Light => &light.0,
+ ThemeMode::Dark => &dark.0,
ThemeMode::System => match system_appearance {
- Appearance::Light => light,
- Appearance::Dark => dark,
+ Appearance::Light => &light.0,
+ Appearance::Dark => &dark.0,
},
},
}
@@ -327,27 +317,21 @@ impl ThemeSelection {
}
}
-fn icon_theme_name_ref(_: &mut SchemaGenerator) -> Schema {
- Schema::new_ref("#/definitions/IconThemeName".into())
-}
-
/// Represents the selection of an icon theme, which can be either static or dynamic.
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
#[serde(untagged)]
pub enum IconThemeSelection {
/// A static icon theme selection, represented by a single icon theme name.
- Static(#[schemars(schema_with = "icon_theme_name_ref")] String),
+ Static(IconThemeName),
/// A dynamic icon theme selection, which can change based on the [`ThemeMode`].
Dynamic {
/// The mode used to determine which theme to use.
#[serde(default)]
mode: ThemeMode,
/// The icon theme to use for light mode.
- #[schemars(schema_with = "icon_theme_name_ref")]
- light: String,
+ light: IconThemeName,
/// The icon theme to use for dark mode.
- #[schemars(schema_with = "icon_theme_name_ref")]
- dark: String,
+ dark: IconThemeName,
},
}
@@ -355,13 +339,13 @@ impl IconThemeSelection {
/// Returns the icon theme name based on the given [`Appearance`].
pub fn icon_theme(&self, system_appearance: Appearance) -> &str {
match self {
- Self::Static(theme) => theme,
+ Self::Static(theme) => &theme.0,
Self::Dynamic { mode, light, dark } => match mode {
- ThemeMode::Light => light,
- ThemeMode::Dark => dark,
+ ThemeMode::Light => &light.0,
+ ThemeMode::Dark => &dark.0,
ThemeMode::System => match system_appearance {
- Appearance::Light => light,
- Appearance::Dark => dark,
+ Appearance::Light => &light.0,
+ Appearance::Dark => &dark.0,
},
},
}
@@ -384,11 +368,12 @@ pub struct ThemeSettingsContent {
pub ui_font_size: Option<f32>,
/// The name of a font to use for rendering in the UI.
#[serde(default)]
- pub ui_font_family: Option<String>,
+ pub ui_font_family: Option<FontFamilyName>,
/// The font fallbacks to use for rendering in the UI.
#[serde(default)]
#[schemars(default = "default_font_fallbacks")]
- pub ui_font_fallbacks: Option<Vec<String>>,
+ #[schemars(extend("uniqueItems" = true))]
+ pub ui_font_fallbacks: Option<Vec<FontFamilyName>>,
/// The OpenType features to enable for text in the UI.
#[serde(default)]
#[schemars(default = "default_font_features")]
@@ -398,11 +383,11 @@ pub struct ThemeSettingsContent {
pub ui_font_weight: Option<f32>,
/// The name of a font to use for rendering in text buffers.
#[serde(default)]
- pub buffer_font_family: Option<String>,
+ pub buffer_font_family: Option<FontFamilyName>,
/// The font fallbacks to use for rendering in text buffers.
#[serde(default)]
- #[schemars(default = "default_font_fallbacks")]
- pub buffer_font_fallbacks: Option<Vec<String>>,
+ #[schemars(extend("uniqueItems" = true))]
+ pub buffer_font_fallbacks: Option<Vec<FontFamilyName>>,
/// The default font size for rendering in text buffers.
#[serde(default)]
pub buffer_font_size: Option<f32>,
@@ -467,9 +452,9 @@ impl ThemeSettingsContent {
},
};
- *theme_to_update = theme_name.to_string();
+ *theme_to_update = ThemeName(theme_name.into());
} else {
- self.theme = Some(ThemeSelection::Static(theme_name.to_string()));
+ self.theme = Some(ThemeSelection::Static(ThemeName(theme_name.into())));
}
}
@@ -488,9 +473,11 @@ impl ThemeSettingsContent {
},
};
- *icon_theme_to_update = icon_theme_name.to_string();
+ *icon_theme_to_update = IconThemeName(icon_theme_name.into());
} else {
- self.icon_theme = Some(IconThemeSelection::Static(icon_theme_name.to_string()));
+ self.icon_theme = Some(IconThemeSelection::Static(IconThemeName(
+ icon_theme_name.into(),
+ )));
}
}
@@ -516,8 +503,8 @@ impl ThemeSettingsContent {
} else {
self.theme = Some(ThemeSelection::Dynamic {
mode,
- light: ThemeSettings::DEFAULT_LIGHT_THEME.into(),
- dark: ThemeSettings::DEFAULT_DARK_THEME.into(),
+ light: ThemeName(ThemeSettings::DEFAULT_LIGHT_THEME.into()),
+ dark: ThemeName(ThemeSettings::DEFAULT_DARK_THEME.into()),
});
}
@@ -539,7 +526,9 @@ impl ThemeSettingsContent {
} => *mode_to_update = mode,
}
} else {
- self.icon_theme = Some(IconThemeSelection::Static(DEFAULT_ICON_THEME_NAME.into()));
+ self.icon_theme = Some(IconThemeSelection::Static(IconThemeName(
+ DEFAULT_ICON_THEME_NAME.into(),
+ )));
}
}
}
@@ -815,26 +804,39 @@ impl settings::Settings for ThemeSettings {
let themes = ThemeRegistry::default_global(cx);
let system_appearance = SystemAppearance::default_global(cx);
+ fn font_fallbacks_from_settings(
+ fallbacks: Option<Vec<FontFamilyName>>,
+ ) -> Option<FontFallbacks> {
+ fallbacks.map(|fallbacks| {
+ FontFallbacks::from_fonts(
+ fallbacks
+ .into_iter()
+ .map(|font_family| font_family.0.to_string())
+ .collect(),
+ )
+ })
+ }
+
let defaults = sources.default;
let mut this = Self {
ui_font_size: defaults.ui_font_size.unwrap().into(),
ui_font: Font {
- family: defaults.ui_font_family.as_ref().unwrap().clone().into(),
+ family: defaults.ui_font_family.as_ref().unwrap().0.clone().into(),
features: defaults.ui_font_features.clone().unwrap(),
- fallbacks: defaults
- .ui_font_fallbacks
- .as_ref()
- .map(|fallbacks| FontFallbacks::from_fonts(fallbacks.clone())),
+ fallbacks: font_fallbacks_from_settings(defaults.ui_font_fallbacks.clone()),
weight: defaults.ui_font_weight.map(FontWeight).unwrap(),
style: Default::default(),
},
buffer_font: Font {
- family: defaults.buffer_font_family.as_ref().unwrap().clone().into(),
- features: defaults.buffer_font_features.clone().unwrap(),
- fallbacks: defaults
- .buffer_font_fallbacks
+ family: defaults
+ .buffer_font_family
.as_ref()
- .map(|fallbacks| FontFallbacks::from_fonts(fallbacks.clone())),
+ .unwrap()
+ .0
+ .clone()
+ .into(),
+ features: defaults.buffer_font_features.clone().unwrap(),
+ fallbacks: font_fallbacks_from_settings(defaults.buffer_font_fallbacks.clone()),
weight: defaults.buffer_font_weight.map(FontWeight).unwrap(),
style: FontStyle::default(),
},
@@ -872,26 +874,26 @@ impl settings::Settings for ThemeSettings {
}
if let Some(value) = value.buffer_font_family.clone() {
- this.buffer_font.family = value.into();
+ this.buffer_font.family = value.0.into();
}
if let Some(value) = value.buffer_font_features.clone() {
this.buffer_font.features = value;
}
if let Some(value) = value.buffer_font_fallbacks.clone() {
- this.buffer_font.fallbacks = Some(FontFallbacks::from_fonts(value));
+ this.buffer_font.fallbacks = font_fallbacks_from_settings(Some(value));
}
if let Some(value) = value.buffer_font_weight {
this.buffer_font.weight = clamp_font_weight(value);
}
if let Some(value) = value.ui_font_family.clone() {
- this.ui_font.family = value.into();
+ this.ui_font.family = value.0.into();
}
if let Some(value) = value.ui_font_features.clone() {
this.ui_font.features = value;
}
if let Some(value) = value.ui_font_fallbacks.clone() {
- this.ui_font.fallbacks = Some(FontFallbacks::from_fonts(value));
+ this.ui_font.fallbacks = font_fallbacks_from_settings(Some(value));
}
if let Some(value) = value.ui_font_weight {
this.ui_font.weight = clamp_font_weight(value);
@@ -959,64 +961,72 @@ impl settings::Settings for ThemeSettings {
Ok(this)
}
- fn json_schema(
- generator: &mut SchemaGenerator,
- params: &SettingsJsonSchemaParams,
- cx: &App,
- ) -> schemars::schema::RootSchema {
- let mut root_schema = generator.root_schema_for::<ThemeSettingsContent>();
- let theme_names = ThemeRegistry::global(cx)
- .list_names()
- .into_iter()
- .map(|theme_name| Value::String(theme_name.to_string()))
- .collect();
-
- let theme_name_schema = SchemaObject {
- instance_type: Some(InstanceType::String.into()),
- enum_values: Some(theme_names),
- ..Default::default()
- };
-
- let icon_theme_names = ThemeRegistry::global(cx)
- .list_icon_themes()
- .into_iter()
- .map(|icon_theme| Value::String(icon_theme.name.to_string()))
- .collect();
-
- let icon_theme_name_schema = SchemaObject {
- instance_type: Some(InstanceType::String.into()),
- enum_values: Some(icon_theme_names),
- ..Default::default()
- };
-
- root_schema.definitions.extend([
- ("ThemeName".into(), theme_name_schema.into()),
- ("IconThemeName".into(), icon_theme_name_schema.into()),
- ("FontFamilies".into(), params.font_family_schema()),
- ("FontFallbacks".into(), params.font_fallback_schema()),
- ]);
-
- add_references_to_properties(
- &mut root_schema,
- &[
- ("buffer_font_family", "#/definitions/FontFamilies"),
- ("buffer_font_fallbacks", "#/definitions/FontFallbacks"),
- ("ui_font_family", "#/definitions/FontFamilies"),
- ("ui_font_fallbacks", "#/definitions/FontFallbacks"),
- ],
- );
-
- root_schema
- }
-
fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut Self::FileContent) {
vscode.f32_setting("editor.fontWeight", &mut current.buffer_font_weight);
vscode.f32_setting("editor.fontSize", &mut current.buffer_font_size);
- vscode.string_setting("editor.font", &mut current.buffer_font_family);
+ if let Some(font) = vscode.read_string("editor.font") {
+ current.buffer_font_family = Some(FontFamilyName(font.into()));
+ }
// TODO: possibly map editor.fontLigatures to buffer_font_features?
}
}
+/// Newtype for a theme name. Its `ParameterizedJsonSchema` lists the theme names known at runtime.
+#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
+#[serde(transparent)]
+pub struct ThemeName(pub Arc<str>);
+
+inventory::submit! {
+ ParameterizedJsonSchema {
+ add_and_get_ref: |generator, _params, cx| {
+ replace_subschema::<ThemeName>(generator, || json_schema!({
+ "type": "string",
+ "enum": ThemeRegistry::global(cx).list_names(),
+ }))
+ }
+ }
+}
+
+/// Newtype for a icon theme name. Its `ParameterizedJsonSchema` lists the icon theme names known at
+/// runtime.
+#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
+#[serde(transparent)]
+pub struct IconThemeName(pub Arc<str>);
+
+inventory::submit! {
+ ParameterizedJsonSchema {
+ add_and_get_ref: |generator, _params, cx| {
+ replace_subschema::<IconThemeName>(generator, || json_schema!({
+ "type": "string",
+ "enum": ThemeRegistry::global(cx)
+ .list_icon_themes()
+ .into_iter()
+ .map(|icon_theme| icon_theme.name)
+ .collect::<Vec<SharedString>>(),
+ }))
+ }
+ }
+}
+
+/// Newtype for font family name. Its `ParameterizedJsonSchema` lists the font families known at
+/// runtime.
+#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
+#[serde(transparent)]
+pub struct FontFamilyName(pub Arc<str>);
+
+inventory::submit! {
+ ParameterizedJsonSchema {
+ add_and_get_ref: |generator, params, _cx| {
+ replace_subschema::<FontFamilyName>(generator, || {
+ json_schema!({
+ "type": "string",
+ "enum": params.font_names,
+ })
+ })
+ }
+ }
+}
+
fn merge<T: Copy>(target: &mut T, value: Option<T>) {
if let Some(value) = value {
*target = value;
@@ -7,7 +7,7 @@ use crate::{
};
/// A collection of colors that are used to color indent aware lines in the editor.
-#[derive(Clone, Deserialize, PartialEq)]
+#[derive(Clone, Debug, Deserialize, PartialEq)]
pub struct AccentColors(pub Vec<Hsla>);
impl Default for AccentColors {
@@ -535,7 +535,7 @@ pub fn all_theme_colors(cx: &mut App) -> Vec<(Hsla, SharedString)> {
.collect()
}
-#[derive(Refineable, Clone, PartialEq)]
+#[derive(Refineable, Clone, Debug, PartialEq)]
pub struct ThemeStyles {
/// The background appearance of the window.
pub window_background_appearance: WindowBackgroundAppearance,
@@ -20,7 +20,7 @@ pub struct PlayerColor {
///
/// The rest of the default colors crisscross back and forth on the
/// color wheel so that the colors are as distinct as possible.
-#[derive(Clone, Deserialize, PartialEq)]
+#[derive(Clone, Debug, Deserialize, PartialEq)]
pub struct PlayerColors(pub Vec<PlayerColor>);
impl Default for PlayerColors {
@@ -2,7 +2,7 @@
use gpui::{Hsla, hsla};
-#[derive(Clone, PartialEq)]
+#[derive(Clone, Debug, PartialEq)]
pub struct SystemColors {
pub transparent: Hsla,
pub mac_os_traffic_light_red: Hsla,
@@ -268,7 +268,7 @@ pub fn refine_theme_family(theme_family_content: ThemeFamilyContent) -> ThemeFam
}
/// A theme is the primary mechanism for defining the appearance of the UI.
-#[derive(Clone, PartialEq)]
+#[derive(Clone, Debug, PartialEq)]
pub struct Theme {
/// The unique identifier for the theme.
pub id: String,
@@ -17,7 +17,13 @@ use zed_actions::{ExtensionCategoryFilter, Extensions};
use crate::icon_theme_selector::{IconThemeSelector, IconThemeSelectorDelegate};
-actions!(theme_selector, [Reload]);
+actions!(
+ theme_selector,
+ [
+ /// Reloads all themes from disk.
+ Reload
+ ]
+);
pub fn init(cx: &mut App) {
cx.on_action(|action: &zed_actions::theme_selector::Toggle, cx| {
@@ -32,7 +32,7 @@ call.workspace = true
chrono.workspace = true
client.workspace = true
db.workspace = true
-gpui.workspace = true
+gpui = { workspace = true, features = ["screen-capture"] }
notifications.workspace = true
project.workspace = true
remote.workspace = true
@@ -12,7 +12,15 @@ use smallvec::SmallVec;
use ui::{ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip, prelude::*};
#[cfg(not(target_os = "macos"))]
-actions!(app_menu, [ActivateMenuRight, ActivateMenuLeft]);
+actions!(
+ app_menu,
+ [
+ /// Navigates to the menu item on the right.
+ ActivateMenuRight,
+ /// Navigates to the menu item on the left.
+ ActivateMenuLeft
+ ]
+);
#[cfg(not(target_os = "macos"))]
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Default, Action)]
@@ -11,7 +11,17 @@ use workspace::notifications::DetachAndPromptErr;
use crate::TitleBar;
-actions!(collab, [ToggleScreenSharing, ToggleMute, ToggleDeafen]);
+actions!(
+ collab,
+ [
+ /// Toggles screen sharing on or off.
+ ToggleScreenSharing,
+ /// Toggles microphone mute.
+ ToggleMute,
+ /// Toggles deafen mode (mute both microphone and speakers).
+ ToggleDeafen
+ ]
+);
fn toggle_screen_sharing(_: &ToggleScreenSharing, window: &mut Window, cx: &mut App) {
let call = ActiveCall::global(cx).read(cx);
@@ -47,7 +47,17 @@ const MAX_PROJECT_NAME_LENGTH: usize = 40;
const MAX_BRANCH_NAME_LENGTH: usize = 40;
const MAX_SHORT_SHA_LENGTH: usize = 8;
-actions!(collab, [ToggleUserMenu, ToggleProjectMenu, SwitchBranch]);
+actions!(
+ collab,
+ [
+ /// Toggles the user menu dropdown.
+ ToggleUserMenu,
+ /// Toggles the project menu dropdown.
+ ToggleProjectMenu,
+ /// Switches to a different git branch.
+ SwitchBranch
+ ]
+);
pub fn init(cx: &mut App) {
TitleBarSettings::register(cx);
@@ -15,7 +15,13 @@ use ui::{HighlightedLabel, ListItem, ListItemSpacing, prelude::*};
use util::ResultExt;
use workspace::{ModalView, Workspace};
-actions!(toolchain, [Select]);
+actions!(
+ toolchain,
+ [
+ /// Selects a toolchain for the current project.
+ Select
+ ]
+);
pub fn init(cx: &mut App) {
cx.observe_new(ToolchainSelector::register).detach();
@@ -32,9 +32,9 @@ mod scrollbar;
mod settings_container;
mod settings_group;
mod stack;
+mod sticky_items;
mod tab;
mod tab_bar;
-mod table;
mod toggle;
mod tooltip;
@@ -75,9 +75,9 @@ pub use scrollbar::*;
pub use settings_container::*;
pub use settings_group::*;
pub use stack::*;
+pub use sticky_items::*;
pub use tab::*;
pub use tab_bar::*;
-pub use table::*;
pub use toggle::*;
pub use tooltip::*;
@@ -1,4 +1,4 @@
-use gpui::AnyElement;
+use gpui::{AnyElement, Hsla};
use crate::prelude::*;
@@ -24,7 +24,9 @@ pub struct Callout {
description: Option<SharedString>,
primary_action: Option<AnyElement>,
secondary_action: Option<AnyElement>,
+ tertiary_action: Option<AnyElement>,
line_height: Option<Pixels>,
+ bg_color: Option<Hsla>,
}
impl Callout {
@@ -36,7 +38,9 @@ impl Callout {
description: None,
primary_action: None,
secondary_action: None,
+ tertiary_action: None,
line_height: None,
+ bg_color: None,
}
}
@@ -71,64 +75,81 @@ impl Callout {
self
}
+ /// Sets an optional tertiary call-to-action button.
+ pub fn tertiary_action(mut self, action: impl IntoElement) -> Self {
+ self.tertiary_action = Some(action.into_any_element());
+ self
+ }
+
/// Sets a custom line height for the callout content.
pub fn line_height(mut self, line_height: Pixels) -> Self {
self.line_height = Some(line_height);
self
}
+
+ /// Sets a custom background color for the callout content.
+ pub fn bg_color(mut self, color: Hsla) -> Self {
+ self.bg_color = Some(color);
+ self
+ }
}
impl RenderOnce for Callout {
fn render(self, window: &mut Window, cx: &mut App) -> impl IntoElement {
let line_height = self.line_height.unwrap_or(window.line_height());
+ let bg_color = self
+ .bg_color
+ .unwrap_or(cx.theme().colors().panel_background);
+ let has_actions = self.primary_action.is_some()
+ || self.secondary_action.is_some()
+ || self.tertiary_action.is_some();
h_flex()
- .w_full()
.p_2()
.gap_2()
.items_start()
- .bg(cx.theme().colors().panel_background)
+ .bg(bg_color)
.overflow_x_hidden()
.when_some(self.icon, |this, icon| {
this.child(h_flex().h(line_height).justify_center().child(icon))
})
.child(
v_flex()
+ .min_w_0()
.w_full()
.child(
h_flex()
.h(line_height)
.w_full()
.gap_1()
- .flex_wrap()
.justify_between()
.when_some(self.title, |this, title| {
this.child(h_flex().child(Label::new(title).size(LabelSize::Small)))
})
- .when(
- self.primary_action.is_some() || self.secondary_action.is_some(),
- |this| {
- this.child(
- h_flex()
- .gap_0p5()
- .when_some(self.secondary_action, |this, action| {
- this.child(action)
- })
- .when_some(self.primary_action, |this, action| {
- this.child(action)
- }),
- )
- },
- ),
+ .when(has_actions, |this| {
+ this.child(
+ h_flex()
+ .gap_0p5()
+ .when_some(self.tertiary_action, |this, action| {
+ this.child(action)
+ })
+ .when_some(self.secondary_action, |this, action| {
+ this.child(action)
+ })
+ .when_some(self.primary_action, |this, action| {
+ this.child(action)
+ }),
+ )
+ }),
)
.when_some(self.description, |this, description| {
this.child(
div()
.w_full()
.flex_1()
- .child(description)
.text_ui_sm(cx)
- .text_color(cx.theme().colors().text_muted),
+ .text_color(cx.theme().colors().text_muted)
+ .child(description),
)
}),
)
@@ -503,8 +503,9 @@ impl ContextMenu {
self
}
- pub fn disabled_action(
+ pub fn action_disabled_when(
mut self,
+ disabled: bool,
label: impl Into<SharedString>,
action: Box<dyn Action>,
) -> Self {
@@ -522,7 +523,7 @@ impl ContextMenu {
icon_size: IconSize::Small,
icon_position: IconPosition::End,
icon_color: None,
- disabled: true,
+ disabled,
documentation_aside: None,
end_slot_icon: None,
end_slot_title: None,
@@ -8,11 +8,12 @@ use itertools::Itertools;
#[derive(Debug, IntoElement, Clone, RegisterComponent)]
pub struct KeyBinding {
- /// A keybinding consists of a key and a set of modifier keys.
- /// More then one keybinding produces a chord.
+ /// A keybinding consists of a set of keystrokes,
+ /// where each keystroke is a key and a set of modifier keys.
+ /// More than one keystroke produces a chord.
///
- /// This should always contain at least one element.
- key_binding: gpui::KeyBinding,
+ /// This should always contain at least one keystroke.
+ pub keystrokes: Vec<Keystroke>,
/// The [`PlatformStyle`] to use when displaying this keybinding.
platform_style: PlatformStyle,
@@ -36,7 +37,7 @@ impl KeyBinding {
return Self::for_action_in(action, &focused, window, cx);
}
let key_binding = window.highest_precedence_binding_for_action(action)?;
- Some(Self::new(key_binding, cx))
+ Some(Self::new_from_gpui(key_binding, cx))
}
/// Like `for_action`, but lets you specify the context from which keybindings are matched.
@@ -47,7 +48,7 @@ impl KeyBinding {
cx: &App,
) -> Option<Self> {
let key_binding = window.highest_precedence_binding_for_action_in(action, focus)?;
- Some(Self::new(key_binding, cx))
+ Some(Self::new_from_gpui(key_binding, cx))
}
pub fn set_vim_mode(cx: &mut App, enabled: bool) {
@@ -58,9 +59,9 @@ impl KeyBinding {
cx.try_global::<VimStyle>().is_some_and(|g| g.0)
}
- pub fn new(key_binding: gpui::KeyBinding, cx: &App) -> Self {
+ pub fn new(keystrokes: Vec<Keystroke>, cx: &App) -> Self {
Self {
- key_binding,
+ keystrokes,
platform_style: PlatformStyle::platform(),
size: None,
vim_mode: KeyBinding::is_vim_mode(cx),
@@ -68,6 +69,10 @@ impl KeyBinding {
}
}
+ pub fn new_from_gpui(key_binding: gpui::KeyBinding, cx: &App) -> Self {
+ Self::new(key_binding.keystrokes().to_vec(), cx)
+ }
+
/// Sets the [`PlatformStyle`] for this [`KeyBinding`].
pub fn platform_style(mut self, platform_style: PlatformStyle) -> Self {
self.platform_style = platform_style;
@@ -91,15 +96,20 @@ impl KeyBinding {
self.vim_mode = enabled;
self
}
+}
- fn render_key(&self, keystroke: &Keystroke, color: Option<Color>) -> AnyElement {
- let key_icon = icon_for_key(keystroke, self.platform_style);
- match key_icon {
- Some(icon) => KeyIcon::new(icon, color).size(self.size).into_any_element(),
- None => {
- let key = util::capitalize(&keystroke.key);
- Key::new(&key, color).size(self.size).into_any_element()
- }
+fn render_key(
+ keystroke: &Keystroke,
+ color: Option<Color>,
+ platform_style: PlatformStyle,
+ size: impl Into<Option<AbsoluteLength>>,
+) -> AnyElement {
+ let key_icon = icon_for_key(keystroke, platform_style);
+ match key_icon {
+ Some(icon) => KeyIcon::new(icon, color).size(size).into_any_element(),
+ None => {
+ let key = util::capitalize(&keystroke.key);
+ Key::new(&key, color).size(size).into_any_element()
}
}
}
@@ -107,17 +117,12 @@ impl KeyBinding {
impl RenderOnce for KeyBinding {
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
let color = self.disabled.then_some(Color::Disabled);
- let use_text = self.vim_mode
- || matches!(
- self.platform_style,
- PlatformStyle::Linux | PlatformStyle::Windows
- );
+
h_flex()
.debug_selector(|| {
format!(
"KEY_BINDING-{}",
- self.key_binding
- .keystrokes()
+ self.keystrokes
.iter()
.map(|k| k.key.to_string())
.collect::<Vec<_>>()
@@ -126,35 +131,56 @@ impl RenderOnce for KeyBinding {
})
.gap(DynamicSpacing::Base04.rems(cx))
.flex_none()
- .children(self.key_binding.keystrokes().iter().map(|keystroke| {
+ .children(self.keystrokes.iter().map(|keystroke| {
h_flex()
.flex_none()
.py_0p5()
.rounded_xs()
.text_color(cx.theme().colors().text_muted)
- .when(use_text, |el| {
- el.child(
- Key::new(
- keystroke_text(&keystroke, self.platform_style, self.vim_mode),
- color,
- )
- .size(self.size),
- )
- })
- .when(!use_text, |el| {
- el.children(render_modifiers(
- &keystroke.modifiers,
- self.platform_style,
- color,
- self.size,
- true,
- ))
- .map(|el| el.child(self.render_key(&keystroke, color)))
- })
+ .children(render_keystroke(
+ keystroke,
+ color,
+ self.size,
+ self.platform_style,
+ self.vim_mode,
+ ))
}))
}
}
+pub fn render_keystroke(
+ keystroke: &Keystroke,
+ color: Option<Color>,
+ size: impl Into<Option<AbsoluteLength>>,
+ platform_style: PlatformStyle,
+ vim_mode: bool,
+) -> Vec<AnyElement> {
+ let use_text = vim_mode
+ || matches!(
+ platform_style,
+ PlatformStyle::Linux | PlatformStyle::Windows
+ );
+ let size = size.into();
+
+ if use_text {
+ let element = Key::new(keystroke_text(&keystroke, platform_style, vim_mode), color)
+ .size(size)
+ .into_any_element();
+ vec![element]
+ } else {
+ let mut elements = Vec::new();
+ elements.extend(render_modifiers(
+ &keystroke.modifiers,
+ platform_style,
+ color,
+ size,
+ true,
+ ));
+ elements.push(render_key(&keystroke, color, platform_style, size));
+ elements
+ }
+}
+
fn icon_for_key(keystroke: &Keystroke, platform_style: PlatformStyle) -> Option<IconName> {
match keystroke.key.as_str() {
"left" => Some(IconName::ArrowLeft),
@@ -465,7 +491,7 @@ impl Component for KeyBinding {
vec![
single_example(
"Default",
- KeyBinding::new(
+ KeyBinding::new_from_gpui(
gpui::KeyBinding::new("ctrl-s", gpui::NoAction, None),
cx,
)
@@ -473,7 +499,7 @@ impl Component for KeyBinding {
),
single_example(
"Mac Style",
- KeyBinding::new(
+ KeyBinding::new_from_gpui(
gpui::KeyBinding::new("cmd-s", gpui::NoAction, None),
cx,
)
@@ -482,7 +508,7 @@ impl Component for KeyBinding {
),
single_example(
"Windows Style",
- KeyBinding::new(
+ KeyBinding::new_from_gpui(
gpui::KeyBinding::new("ctrl-s", gpui::NoAction, None),
cx,
)
@@ -495,9 +521,12 @@ impl Component for KeyBinding {
"Vim Mode",
vec![single_example(
"Vim Mode Enabled",
- KeyBinding::new(gpui::KeyBinding::new("dd", gpui::NoAction, None), cx)
- .vim_mode(true)
- .into_any_element(),
+ KeyBinding::new_from_gpui(
+ gpui::KeyBinding::new("dd", gpui::NoAction, None),
+ cx,
+ )
+ .vim_mode(true)
+ .into_any_element(),
)],
),
example_group_with_title(
@@ -505,7 +534,7 @@ impl Component for KeyBinding {
vec![
single_example(
"Multiple Keys",
- KeyBinding::new(
+ KeyBinding::new_from_gpui(
gpui::KeyBinding::new("ctrl-k ctrl-b", gpui::NoAction, None),
cx,
)
@@ -513,7 +542,7 @@ impl Component for KeyBinding {
),
single_example(
"With Shift",
- KeyBinding::new(
+ KeyBinding::new_from_gpui(
gpui::KeyBinding::new("shift-cmd-p", gpui::NoAction, None),
cx,
)
@@ -216,7 +216,7 @@ impl Component for KeybindingHint {
fn preview(window: &mut Window, cx: &mut App) -> Option<AnyElement> {
let enter_fallback = gpui::KeyBinding::new("enter", menu::Confirm, None);
let enter = KeyBinding::for_action(&menu::Confirm, window, cx)
- .unwrap_or(KeyBinding::new(enter_fallback, cx));
+ .unwrap_or(KeyBinding::new_from_gpui(enter_fallback, cx));
let bg_color = cx.theme().colors().surface_background;
@@ -1,7 +1,9 @@
mod highlighted_label;
mod label;
mod label_like;
+mod loading_label;
pub use highlighted_label::*;
pub use label::*;
pub use label_like::*;
+pub use loading_label::*;
@@ -1,24 +1,24 @@
+use crate::prelude::*;
use gpui::{Animation, AnimationExt, FontWeight, pulsating_between};
use std::time::Duration;
-use ui::prelude::*;
#[derive(IntoElement)]
-pub struct AnimatedLabel {
+pub struct LoadingLabel {
base: Label,
text: SharedString,
}
-impl AnimatedLabel {
+impl LoadingLabel {
pub fn new(text: impl Into<SharedString>) -> Self {
let text = text.into();
- AnimatedLabel {
+ LoadingLabel {
base: Label::new(text.clone()),
text,
}
}
}
-impl LabelCommon for AnimatedLabel {
+impl LabelCommon for LoadingLabel {
fn size(mut self, size: LabelSize) -> Self {
self.base = self.base.size(size);
self
@@ -80,14 +80,14 @@ impl LabelCommon for AnimatedLabel {
}
}
-impl RenderOnce for AnimatedLabel {
+impl RenderOnce for LoadingLabel {
fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement {
let text = self.text.clone();
self.base
.color(Color::Muted)
.with_animations(
- "animated-label",
+ "loading_label",
vec![
Animation::new(Duration::from_secs(1)),
Animation::new(Duration::from_secs(1)).repeat(),
@@ -9,7 +9,7 @@ use gpui::{
pub struct RightClickMenu<M: ManagedView> {
id: ElementId,
- child_builder: Option<Box<dyn FnOnce(bool) -> AnyElement + 'static>>,
+ child_builder: Option<Box<dyn FnOnce(bool, &mut Window, &mut App) -> AnyElement + 'static>>,
menu_builder: Option<Rc<dyn Fn(&mut Window, &mut App) -> Entity<M> + 'static>>,
anchor: Option<Corner>,
attach: Option<Corner>,
@@ -23,11 +23,11 @@ impl<M: ManagedView> RightClickMenu<M> {
pub fn trigger<F, E>(mut self, e: F) -> Self
where
- F: FnOnce(bool) -> E + 'static,
+ F: FnOnce(bool, &mut Window, &mut App) -> E + 'static,
E: IntoElement + 'static,
{
- self.child_builder = Some(Box::new(move |is_menu_active| {
- e(is_menu_active).into_any_element()
+ self.child_builder = Some(Box::new(move |is_menu_active, window, cx| {
+ e(is_menu_active, window, cx).into_any_element()
}));
self
}
@@ -149,10 +149,9 @@ impl<M: ManagedView> Element for RightClickMenu<M> {
element
});
- let mut child_element = this
- .child_builder
- .take()
- .map(|child_builder| (child_builder)(element_state.menu.borrow().is_some()));
+ let mut child_element = this.child_builder.take().map(|child_builder| {
+ (child_builder)(element_state.menu.borrow().is_some(), window, cx)
+ });
let child_layout_id = child_element
.as_mut()
@@ -0,0 +1,150 @@
+use std::ops::Range;
+
+use gpui::{
+ AnyElement, App, AvailableSpace, Bounds, Context, Entity, Pixels, Render, UniformListTopSlot,
+ Window, point, size,
+};
+use smallvec::SmallVec;
+
+pub trait StickyCandidate {
+ fn depth(&self) -> usize;
+}
+
+pub struct StickyItems<T> {
+ compute_fn: Box<dyn Fn(Range<usize>, &mut Window, &mut App) -> Vec<T>>,
+ render_fn: Box<dyn Fn(T, &mut Window, &mut App) -> SmallVec<[AnyElement; 8]>>,
+ last_item_is_drifting: bool,
+ anchor_index: Option<usize>,
+}
+
+pub fn sticky_items<V, T>(
+ entity: Entity<V>,
+ compute_fn: impl Fn(&mut V, Range<usize>, &mut Window, &mut Context<V>) -> Vec<T> + 'static,
+ render_fn: impl Fn(&mut V, T, &mut Window, &mut Context<V>) -> SmallVec<[AnyElement; 8]> + 'static,
+) -> StickyItems<T>
+where
+ V: Render,
+ T: StickyCandidate + Clone + 'static,
+{
+ let entity_compute = entity.clone();
+ let entity_render = entity.clone();
+
+ let compute_fn = Box::new(
+ move |range: Range<usize>, window: &mut Window, cx: &mut App| -> Vec<T> {
+ entity_compute.update(cx, |view, cx| compute_fn(view, range, window, cx))
+ },
+ );
+ let render_fn = Box::new(
+ move |entry: T, window: &mut Window, cx: &mut App| -> SmallVec<[AnyElement; 8]> {
+ entity_render.update(cx, |view, cx| render_fn(view, entry, window, cx))
+ },
+ );
+ StickyItems {
+ compute_fn,
+ render_fn,
+ last_item_is_drifting: false,
+ anchor_index: None,
+ }
+}
+
+impl<T> UniformListTopSlot for StickyItems<T>
+where
+ T: StickyCandidate + Clone + 'static,
+{
+ fn compute(
+ &mut self,
+ visible_range: Range<usize>,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> SmallVec<[AnyElement; 8]> {
+ let entries = (self.compute_fn)(visible_range.clone(), window, cx);
+
+ let mut anchor_entry = None;
+
+ let mut iter = entries.iter().enumerate().peekable();
+ while let Some((ix, current_entry)) = iter.next() {
+ let current_depth = current_entry.depth();
+ let index_in_range = ix;
+
+ if current_depth < index_in_range {
+ anchor_entry = Some(current_entry.clone());
+ break;
+ }
+
+ if let Some(&(_next_ix, next_entry)) = iter.peek() {
+ let next_depth = next_entry.depth();
+
+ if next_depth < current_depth && next_depth < index_in_range {
+ self.last_item_is_drifting = true;
+ self.anchor_index = Some(visible_range.start + ix);
+ anchor_entry = Some(current_entry.clone());
+ break;
+ }
+ }
+ }
+
+ if let Some(anchor_entry) = anchor_entry {
+ (self.render_fn)(anchor_entry, window, cx)
+ } else {
+ SmallVec::new()
+ }
+ }
+
+ fn prepaint(
+ &self,
+ items: &mut SmallVec<[AnyElement; 8]>,
+ bounds: Bounds<Pixels>,
+ item_height: Pixels,
+ scroll_offset: gpui::Point<Pixels>,
+ padding: gpui::Edges<Pixels>,
+ can_scroll_horizontally: bool,
+ window: &mut Window,
+ cx: &mut App,
+ ) {
+ let items_count = items.len();
+
+ for (ix, item) in items.iter_mut().enumerate() {
+ let mut item_y_offset = None;
+ if ix == items_count - 1 && self.last_item_is_drifting {
+ if let Some(anchor_index) = self.anchor_index {
+ let scroll_top = -scroll_offset.y;
+ let anchor_top = item_height * anchor_index;
+ let sticky_area_height = item_height * items_count;
+ item_y_offset =
+ Some((anchor_top - scroll_top - sticky_area_height).min(Pixels::ZERO));
+ };
+ }
+
+ let sticky_origin = bounds.origin
+ + point(
+ if can_scroll_horizontally {
+ scroll_offset.x + padding.left
+ } else {
+ scroll_offset.x
+ },
+ item_height * ix + padding.top + item_y_offset.unwrap_or(Pixels::ZERO),
+ );
+
+ let available_width = if can_scroll_horizontally {
+ bounds.size.width + scroll_offset.x.abs()
+ } else {
+ bounds.size.width
+ };
+
+ let available_space = size(
+ AvailableSpace::Definite(available_width),
+ AvailableSpace::Definite(item_height),
+ );
+
+ item.layout_as_root(available_space, window, cx);
+ item.prepaint_at(sticky_origin, window, cx);
+ }
+ }
+
+ fn paint(&self, items: &mut SmallVec<[AnyElement; 8]>, window: &mut Window, cx: &mut App) {
+ // reverse so that last item is bottom most among sticky items
+ for item in items.iter_mut().rev() {
+ item.paint(window, cx);
+ }
+ }
+}
@@ -47,12 +47,12 @@ impl Render for ContextMenuStory {
.justify_between()
.child(
right_click_menu("test2")
- .trigger(|_| Label::new("TOP LEFT"))
+ .trigger(|_, _, _| Label::new("TOP LEFT"))
.menu(move |window, cx| build_menu(window, cx, "top left")),
)
.child(
right_click_menu("test1")
- .trigger(|_| Label::new("BOTTOM LEFT"))
+ .trigger(|_, _, _| Label::new("BOTTOM LEFT"))
.anchor(Corner::BottomLeft)
.attach(Corner::TopLeft)
.menu(move |window, cx| build_menu(window, cx, "bottom left")),
@@ -65,13 +65,13 @@ impl Render for ContextMenuStory {
.justify_between()
.child(
right_click_menu("test3")
- .trigger(|_| Label::new("TOP RIGHT"))
+ .trigger(|_, _, _| Label::new("TOP RIGHT"))
.anchor(Corner::TopRight)
.menu(move |window, cx| build_menu(window, cx, "top right")),
)
.child(
right_click_menu("test4")
- .trigger(|_| Label::new("BOTTOM RIGHT"))
+ .trigger(|_, _, _| Label::new("BOTTOM RIGHT"))
.anchor(Corner::BottomRight)
.attach(Corner::TopRight)
.menu(move |window, cx| build_menu(window, cx, "bottom right")),
@@ -18,16 +18,16 @@ impl Render for KeybindingStory {
Story::container(cx)
.child(Story::title_for::<KeyBinding>(cx))
.child(Story::label("Single Key", cx))
- .child(KeyBinding::new(binding("Z"), cx))
+ .child(KeyBinding::new_from_gpui(binding("Z"), cx))
.child(Story::label("Single Key with Modifier", cx))
.child(
div()
.flex()
.gap_3()
- .child(KeyBinding::new(binding("ctrl-c"), cx))
- .child(KeyBinding::new(binding("alt-c"), cx))
- .child(KeyBinding::new(binding("cmd-c"), cx))
- .child(KeyBinding::new(binding("shift-c"), cx)),
+ .child(KeyBinding::new_from_gpui(binding("ctrl-c"), cx))
+ .child(KeyBinding::new_from_gpui(binding("alt-c"), cx))
+ .child(KeyBinding::new_from_gpui(binding("cmd-c"), cx))
+ .child(KeyBinding::new_from_gpui(binding("shift-c"), cx)),
)
.child(Story::label("Single Key with Modifier (Permuted)", cx))
.child(
@@ -41,42 +41,59 @@ impl Render for KeybindingStory {
.gap_4()
.py_3()
.children(chunk.map(|permutation| {
- KeyBinding::new(binding(&(permutation.join("-") + "-x")), cx)
+ KeyBinding::new_from_gpui(
+ binding(&(permutation.join("-") + "-x")),
+ cx,
+ )
}))
}),
),
)
.child(Story::label("Single Key with All Modifiers", cx))
- .child(KeyBinding::new(binding("ctrl-alt-cmd-shift-z"), cx))
+ .child(KeyBinding::new_from_gpui(
+ binding("ctrl-alt-cmd-shift-z"),
+ cx,
+ ))
.child(Story::label("Chord", cx))
- .child(KeyBinding::new(binding("a z"), cx))
+ .child(KeyBinding::new_from_gpui(binding("a z"), cx))
.child(Story::label("Chord with Modifier", cx))
- .child(KeyBinding::new(binding("ctrl-a shift-z"), cx))
- .child(KeyBinding::new(binding("fn-s"), cx))
+ .child(KeyBinding::new_from_gpui(binding("ctrl-a shift-z"), cx))
+ .child(KeyBinding::new_from_gpui(binding("fn-s"), cx))
.child(Story::label("Single Key with All Modifiers (Linux)", cx))
.child(
- KeyBinding::new(binding("ctrl-alt-cmd-shift-z"), cx)
+ KeyBinding::new_from_gpui(binding("ctrl-alt-cmd-shift-z"), cx)
.platform_style(PlatformStyle::Linux),
)
.child(Story::label("Chord (Linux)", cx))
- .child(KeyBinding::new(binding("a z"), cx).platform_style(PlatformStyle::Linux))
+ .child(
+ KeyBinding::new_from_gpui(binding("a z"), cx).platform_style(PlatformStyle::Linux),
+ )
.child(Story::label("Chord with Modifier (Linux)", cx))
.child(
- KeyBinding::new(binding("ctrl-a shift-z"), cx).platform_style(PlatformStyle::Linux),
+ KeyBinding::new_from_gpui(binding("ctrl-a shift-z"), cx)
+ .platform_style(PlatformStyle::Linux),
+ )
+ .child(
+ KeyBinding::new_from_gpui(binding("fn-s"), cx).platform_style(PlatformStyle::Linux),
)
- .child(KeyBinding::new(binding("fn-s"), cx).platform_style(PlatformStyle::Linux))
.child(Story::label("Single Key with All Modifiers (Windows)", cx))
.child(
- KeyBinding::new(binding("ctrl-alt-cmd-shift-z"), cx)
+ KeyBinding::new_from_gpui(binding("ctrl-alt-cmd-shift-z"), cx)
.platform_style(PlatformStyle::Windows),
)
.child(Story::label("Chord (Windows)", cx))
- .child(KeyBinding::new(binding("a z"), cx).platform_style(PlatformStyle::Windows))
+ .child(
+ KeyBinding::new_from_gpui(binding("a z"), cx)
+ .platform_style(PlatformStyle::Windows),
+ )
.child(Story::label("Chord with Modifier (Windows)", cx))
.child(
- KeyBinding::new(binding("ctrl-a shift-z"), cx)
+ KeyBinding::new_from_gpui(binding("ctrl-a shift-z"), cx)
+ .platform_style(PlatformStyle::Windows),
+ )
+ .child(
+ KeyBinding::new_from_gpui(binding("fn-s"), cx)
.platform_style(PlatformStyle::Windows),
)
- .child(KeyBinding::new(binding("fn-s"), cx).platform_style(PlatformStyle::Windows))
}
}
@@ -1,271 +0,0 @@
-use crate::{Indicator, prelude::*};
-use gpui::{AnyElement, FontWeight, IntoElement, Length, div};
-
-/// A table component
-#[derive(IntoElement, RegisterComponent)]
-pub struct Table {
- column_headers: Vec<SharedString>,
- rows: Vec<Vec<TableCell>>,
- column_count: usize,
- striped: bool,
- width: Length,
-}
-
-impl Table {
- /// Create a new table with a column count equal to the
- /// number of headers provided.
- pub fn new(headers: Vec<impl Into<SharedString>>) -> Self {
- let column_count = headers.len();
-
- Table {
- column_headers: headers.into_iter().map(Into::into).collect(),
- column_count,
- rows: Vec::new(),
- striped: false,
- width: Length::Auto,
- }
- }
-
- /// Adds a row to the table.
- ///
- /// The row must have the same number of columns as the table.
- pub fn row(mut self, items: Vec<impl Into<TableCell>>) -> Self {
- if items.len() == self.column_count {
- self.rows.push(items.into_iter().map(Into::into).collect());
- } else {
- // TODO: Log error: Row length mismatch
- }
- self
- }
-
- /// Adds multiple rows to the table.
- ///
- /// Each row must have the same number of columns as the table.
- /// Rows that don't match the column count are ignored.
- pub fn rows(mut self, rows: Vec<Vec<impl Into<TableCell>>>) -> Self {
- for row in rows {
- self = self.row(row);
- }
- self
- }
-
- fn base_cell_style(cx: &mut App) -> Div {
- div()
- .px_1p5()
- .flex_1()
- .justify_start()
- .text_ui(cx)
- .whitespace_nowrap()
- .text_ellipsis()
- .overflow_hidden()
- }
-
- /// Enables row striping.
- pub fn striped(mut self) -> Self {
- self.striped = true;
- self
- }
-
- /// Sets the width of the table.
- pub fn width(mut self, width: impl Into<Length>) -> Self {
- self.width = width.into();
- self
- }
-}
-
-impl RenderOnce for Table {
- fn render(self, _: &mut Window, cx: &mut App) -> impl IntoElement {
- let header = div()
- .flex()
- .flex_row()
- .items_center()
- .justify_between()
- .w_full()
- .p_2()
- .border_b_1()
- .border_color(cx.theme().colors().border)
- .children(self.column_headers.into_iter().map(|h| {
- Self::base_cell_style(cx)
- .font_weight(FontWeight::SEMIBOLD)
- .child(h)
- }));
-
- let row_count = self.rows.len();
- let rows = self.rows.into_iter().enumerate().map(|(ix, row)| {
- let is_last = ix == row_count - 1;
- let bg = if ix % 2 == 1 && self.striped {
- Some(cx.theme().colors().text.opacity(0.05))
- } else {
- None
- };
- div()
- .w_full()
- .flex()
- .flex_row()
- .items_center()
- .justify_between()
- .px_1p5()
- .py_1()
- .when_some(bg, |row, bg| row.bg(bg))
- .when(!is_last, |row| {
- row.border_b_1().border_color(cx.theme().colors().border)
- })
- .children(row.into_iter().map(|cell| match cell {
- TableCell::String(s) => Self::base_cell_style(cx).child(s),
- TableCell::Element(e) => Self::base_cell_style(cx).child(e),
- }))
- });
-
- div()
- .w(self.width)
- .overflow_hidden()
- .child(header)
- .children(rows)
- }
-}
-
-/// Represents a cell in a table.
-pub enum TableCell {
- /// A cell containing a string value.
- String(SharedString),
- /// A cell containing a UI element.
- Element(AnyElement),
-}
-
-/// Creates a `TableCell` containing a string value.
-pub fn string_cell(s: impl Into<SharedString>) -> TableCell {
- TableCell::String(s.into())
-}
-
-/// Creates a `TableCell` containing an element.
-pub fn element_cell(e: impl Into<AnyElement>) -> TableCell {
- TableCell::Element(e.into())
-}
-
-impl<E> From<E> for TableCell
-where
- E: Into<SharedString>,
-{
- fn from(e: E) -> Self {
- TableCell::String(e.into())
- }
-}
-
-impl Component for Table {
- fn scope() -> ComponentScope {
- ComponentScope::Layout
- }
-
- fn description() -> Option<&'static str> {
- Some("A table component for displaying data in rows and columns with optional styling.")
- }
-
- fn preview(_window: &mut Window, _cx: &mut App) -> Option<AnyElement> {
- Some(
- v_flex()
- .gap_6()
- .children(vec![
- example_group_with_title(
- "Basic Tables",
- vec![
- single_example(
- "Simple Table",
- Table::new(vec!["Name", "Age", "City"])
- .width(px(400.))
- .row(vec!["Alice", "28", "New York"])
- .row(vec!["Bob", "32", "San Francisco"])
- .row(vec!["Charlie", "25", "London"])
- .into_any_element(),
- ),
- single_example(
- "Two Column Table",
- Table::new(vec!["Category", "Value"])
- .width(px(300.))
- .row(vec!["Revenue", "$100,000"])
- .row(vec!["Expenses", "$75,000"])
- .row(vec!["Profit", "$25,000"])
- .into_any_element(),
- ),
- ],
- ),
- example_group_with_title(
- "Styled Tables",
- vec![
- single_example(
- "Default",
- Table::new(vec!["Product", "Price", "Stock"])
- .width(px(400.))
- .row(vec!["Laptop", "$999", "In Stock"])
- .row(vec!["Phone", "$599", "Low Stock"])
- .row(vec!["Tablet", "$399", "Out of Stock"])
- .into_any_element(),
- ),
- single_example(
- "Striped",
- Table::new(vec!["Product", "Price", "Stock"])
- .width(px(400.))
- .striped()
- .row(vec!["Laptop", "$999", "In Stock"])
- .row(vec!["Phone", "$599", "Low Stock"])
- .row(vec!["Tablet", "$399", "Out of Stock"])
- .row(vec!["Headphones", "$199", "In Stock"])
- .into_any_element(),
- ),
- ],
- ),
- example_group_with_title(
- "Mixed Content Table",
- vec![single_example(
- "Table with Elements",
- Table::new(vec!["Status", "Name", "Priority", "Deadline", "Action"])
- .width(px(840.))
- .row(vec![
- element_cell(
- Indicator::dot().color(Color::Success).into_any_element(),
- ),
- string_cell("Project A"),
- string_cell("High"),
- string_cell("2023-12-31"),
- element_cell(
- Button::new("view_a", "View")
- .style(ButtonStyle::Filled)
- .full_width()
- .into_any_element(),
- ),
- ])
- .row(vec![
- element_cell(
- Indicator::dot().color(Color::Warning).into_any_element(),
- ),
- string_cell("Project B"),
- string_cell("Medium"),
- string_cell("2024-03-15"),
- element_cell(
- Button::new("view_b", "View")
- .style(ButtonStyle::Filled)
- .full_width()
- .into_any_element(),
- ),
- ])
- .row(vec![
- element_cell(
- Indicator::dot().color(Color::Error).into_any_element(),
- ),
- string_cell("Project C"),
- string_cell("Low"),
- string_cell("2024-06-30"),
- element_cell(
- Button::new("view_c", "View")
- .style(ButtonStyle::Filled)
- .full_width()
- .into_any_element(),
- ),
- ])
- .into_any_element(),
- )],
- ),
- ])
- .into_any_element(),
- )
- }
-}
@@ -25,7 +25,7 @@ pub use crate::{Button, ButtonSize, ButtonStyle, IconButton, SelectableButton};
pub use crate::{ButtonCommon, Color};
pub use crate::{Headline, HeadlineSize};
pub use crate::{Icon, IconName, IconPosition, IconSize};
-pub use crate::{Label, LabelCommon, LabelSize, LineHeightStyle};
+pub use crate::{Label, LabelCommon, LabelSize, LineHeightStyle, LoadingLabel};
pub use crate::{h_container, h_flex, v_container, v_flex};
pub use crate::{
h_group, h_group_lg, h_group_sm, h_group_xl, v_group, v_group_lg, v_group_sm, v_group_xl,
@@ -29,7 +29,7 @@ pub struct SingleLineInput {
label: Option<SharedString>,
/// The placeholder text for the text field.
placeholder: SharedString,
- /// Exposes the underlying [`Model<Editor>`] to allow for customizing the editor beyond the provided API.
+ /// Exposes the underlying [`Entity<Editor>`] to allow for customizing the editor beyond the provided API.
///
/// This likely will only be public in the short term, ideally the API will be expanded to cover necessary use cases.
pub editor: Entity<Editor>,
@@ -30,6 +30,7 @@ log.workspace = true
rand = { workspace = true, optional = true }
regex.workspace = true
rust-embed.workspace = true
+schemars.workspace = true
serde.workspace = true
serde_json.workspace = true
serde_json_lenient.workspace = true
@@ -170,6 +170,12 @@ impl<T: AsRef<Path>> From<T> for SanitizedPath {
pub const FILE_ROW_COLUMN_DELIMITER: char = ':';
const ROW_COL_CAPTURE_REGEX: &str = r"(?xs)
+ ([^\(]+)\:(?:
+ \((\d+)[,:](\d+)\) # filename:(row,column), filename:(row:column)
+ |
+ \((\d+)\)() # filename:(row)
+ )
+ |
([^\(]+)(?:
\((\d+)[,:](\d+)\) # filename(row,column), filename(row:column)
|
@@ -674,6 +680,15 @@ mod tests {
column: None
}
);
+
+ assert_eq!(
+ PathWithPosition::parse_str("Types.hs:(617,9)-(670,28):"),
+ PathWithPosition {
+ path: PathBuf::from("Types.hs"),
+ row: Some(617),
+ column: Some(9),
+ }
+ );
}
#[test]
@@ -0,0 +1,58 @@
+use schemars::{JsonSchema, transform::transform_subschemas};
+
+const DEFS_PATH: &str = "#/$defs/";
+
+/// Replaces the JSON schema definition for some type if it is in use (in the definitions list), and
+/// returns a reference to it.
+///
+/// This asserts that JsonSchema::schema_name() + "2" does not exist because this indicates that
+/// there are multiple types that use this name, and unfortunately schemars APIs do not support
+/// resolving this ambiguity - see https://github.com/GREsau/schemars/issues/449
+///
+/// This takes a closure for `schema` because some settings types are not available on the remote
+/// server, and so will crash when attempting to access e.g. GlobalThemeRegistry.
+pub fn replace_subschema<T: JsonSchema>(
+ generator: &mut schemars::SchemaGenerator,
+ schema: impl Fn() -> schemars::Schema,
+) -> schemars::Schema {
+ // fallback on just using the schema name, which could collide.
+ let schema_name = T::schema_name();
+ let definitions = generator.definitions_mut();
+ assert!(!definitions.contains_key(&format!("{schema_name}2")));
+ if definitions.contains_key(schema_name.as_ref()) {
+ definitions.insert(schema_name.to_string(), schema().to_value());
+ }
+ schemars::Schema::new_ref(format!("{DEFS_PATH}{schema_name}"))
+}
+
+/// Adds a new JSON schema definition and returns a reference to it. **Panics** if the name is
+/// already in use.
+pub fn add_new_subschema(
+ generator: &mut schemars::SchemaGenerator,
+ name: &str,
+ schema: serde_json::Value,
+) -> schemars::Schema {
+ let old_definition = generator.definitions_mut().insert(name.to_string(), schema);
+ assert_eq!(old_definition, None);
+ schemars::Schema::new_ref(format!("{DEFS_PATH}{name}"))
+}
+
+/// Defaults `additionalProperties` to `true`, as if `#[schemars(deny_unknown_fields)]` was on every
+/// struct. Skips structs that have `additionalProperties` set (such as if #[serde(flatten)] is used
+/// on a map).
+#[derive(Clone)]
+pub struct DefaultDenyUnknownFields;
+
+impl schemars::transform::Transform for DefaultDenyUnknownFields {
+ fn transform(&mut self, schema: &mut schemars::Schema) {
+ if let Some(object) = schema.as_object_mut() {
+ if object.contains_key("properties")
+ && !object.contains_key("additionalProperties")
+ && !object.contains_key("unevaluatedProperties")
+ {
+ object.insert("additionalProperties".to_string(), false.into());
+ }
+ }
+ transform_subschemas(self, schema);
+ }
+}
@@ -5,6 +5,7 @@ pub mod fs;
pub mod markdown;
pub mod paths;
pub mod redact;
+pub mod schemars;
pub mod serde;
pub mod shell_env;
pub mod size;
@@ -3,7 +3,15 @@ use gpui::{Context, Window, actions};
use crate::{Vim, state::Mode};
-actions!(vim, [ChangeListOlder, ChangeListNewer]);
+actions!(
+ vim,
+ [
+ /// Navigates to an older position in the change list.
+ ChangeListOlder,
+ /// Navigates to a newer position in the change list.
+ ChangeListNewer
+ ]
+);
pub(crate) fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
Vim::action(editor, cx, |vim, _: &ChangeListOlder, window, cx| {
@@ -44,18 +44,21 @@ use crate::{
visual::VisualDeleteLine,
};
+/// Goes to the specified line number in the editor.
#[derive(Clone, Debug, PartialEq, Action)]
#[action(namespace = vim, no_json, no_register)]
pub struct GoToLine {
range: CommandRange,
}
+/// Yanks (copies) text based on the specified range.
#[derive(Clone, Debug, PartialEq, Action)]
#[action(namespace = vim, no_json, no_register)]
pub struct YankCommand {
range: CommandRange,
}
+/// Executes a command with the specified range.
#[derive(Clone, Debug, PartialEq, Action)]
#[action(namespace = vim, no_json, no_register)]
pub struct WithRange {
@@ -64,6 +67,7 @@ pub struct WithRange {
action: WrappedAction,
}
+/// Executes a command with the specified count.
#[derive(Clone, Debug, PartialEq, Action)]
#[action(namespace = vim, no_json, no_register)]
pub struct WithCount {
@@ -155,12 +159,14 @@ impl VimOption {
}
}
+/// Sets vim options and configuration values.
#[derive(Clone, PartialEq, Action)]
#[action(namespace = vim, no_json, no_register)]
pub struct VimSet {
options: Vec<VimOption>,
}
+/// Saves the current file with optional save intent.
#[derive(Clone, PartialEq, Action)]
#[action(namespace = vim, no_json, no_register)]
struct VimSave {
@@ -168,6 +174,7 @@ struct VimSave {
pub filename: String,
}
+/// Deletes the specified marks from the editor.
#[derive(Clone, PartialEq, Action)]
#[action(namespace = vim, no_json, no_register)]
enum DeleteMarks {
@@ -177,8 +184,18 @@ enum DeleteMarks {
actions!(
vim,
- [VisualCommand, CountCommand, ShellCommand, ArgumentRequired]
+ [
+ /// Executes a command in visual mode.
+ VisualCommand,
+ /// Executes a command with a count prefix.
+ CountCommand,
+ /// Executes a shell command.
+ ShellCommand,
+ /// Indicates that an argument is required for the command.
+ ArgumentRequired
+ ]
);
+/// Opens the specified file for editing.
#[derive(Clone, PartialEq, Action)]
#[action(namespace = vim, no_json, no_register)]
struct VimEdit {
@@ -1282,6 +1299,7 @@ fn generate_positions(string: &str, query: &str) -> Vec<usize> {
positions
}
+/// Applies a command to all lines matching a pattern.
#[derive(Debug, PartialEq, Clone, Action)]
#[action(namespace = vim, no_json, no_register)]
pub(crate) struct OnMatchingLines {
@@ -1480,6 +1498,7 @@ impl OnMatchingLines {
}
}
+/// Executes a shell command and returns the output.
#[derive(Clone, Debug, PartialEq, Action)]
#[action(namespace = vim, no_json, no_register)]
pub struct ShellExec {
@@ -1669,7 +1688,7 @@ impl ShellExec {
id: TaskId("vim".to_string()),
full_label: command.clone(),
label: command.clone(),
- command: command.clone(),
+ command: Some(command.clone()),
args: Vec::new(),
command_label: command.clone(),
cwd,
@@ -6,7 +6,13 @@ use text::SelectionGoal;
use crate::{Vim, motion::Motion, state::Mode};
-actions!(vim, [HelixNormalAfter]);
+actions!(
+ vim,
+ [
+ /// Switches to normal mode after the cursor (Helix-style).
+ HelixNormalAfter
+ ]
+);
pub fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
Vim::action(editor, cx, Vim::helix_normal_after);
@@ -302,14 +308,14 @@ mod test {
use crate::{state::Mode, test::VimTestContext};
#[gpui::test]
- async fn test_next_word_start(cx: &mut gpui::TestAppContext) {
+ async fn test_word_motions(cx: &mut gpui::TestAppContext) {
let mut cx = VimTestContext::new(cx, true).await;
// «
// ˇ
// »
cx.set_state(
indoc! {"
- The quˇick brown
+ Th«e quiˇ»ck brown
fox jumps over
the lazy dog."},
Mode::HelixNormal,
@@ -334,6 +340,32 @@ mod test {
the lazy dog."},
Mode::HelixNormal,
);
+
+ cx.simulate_keystrokes("2 b");
+
+ cx.assert_state(
+ indoc! {"
+ The «ˇquick »brown
+ fox jumps over
+ the lazy dog."},
+ Mode::HelixNormal,
+ );
+
+ cx.simulate_keystrokes("down e up");
+
+ cx.assert_state(
+ indoc! {"
+ The quicˇk brown
+ fox jumps over
+ the lazy dog."},
+ Mode::HelixNormal,
+ );
+
+ cx.set_state("aa\n «ˇbb»", Mode::HelixNormal);
+
+ cx.simulate_keystroke("b");
+
+ cx.assert_state("aa\n«ˇ »bb", Mode::HelixNormal);
}
// #[gpui::test]
@@ -448,4 +480,21 @@ mod test {
Mode::HelixNormal,
);
}
+
+ #[gpui::test]
+ async fn test_newline_char(cx: &mut gpui::TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+
+ cx.set_state("aa«\nˇ»bb cc", Mode::HelixNormal);
+
+ cx.simulate_keystroke("w");
+
+ cx.assert_state("aa\n«bb ˇ»cc", Mode::HelixNormal);
+
+ cx.set_state("aa«\nˇ»", Mode::HelixNormal);
+
+ cx.simulate_keystroke("b");
+
+ cx.assert_state("«ˇaa»\n", Mode::HelixNormal);
+ }
}
@@ -13,7 +13,17 @@ pub(crate) enum IndentDirection {
Auto,
}
-actions!(vim, [Indent, Outdent, AutoIndent]);
+actions!(
+ vim,
+ [
+ /// Increases indentation of selected lines.
+ Indent,
+ /// Decreases indentation of selected lines.
+ Outdent,
+ /// Automatically adjusts indentation based on syntax.
+ AutoIndent
+ ]
+);
pub(crate) fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
Vim::action(editor, cx, |vim, _: &Indent, window, cx| {
@@ -5,7 +5,15 @@ use language::SelectionGoal;
use settings::Settings;
use vim_mode_setting::HelixModeSetting;
-actions!(vim, [NormalBefore, TemporaryNormal]);
+actions!(
+ vim,
+ [
+ /// Switches to normal mode with cursor positioned before the current character.
+ NormalBefore,
+ /// Temporarily switches to normal mode for one command.
+ TemporaryNormal
+ ]
+);
pub fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
Vim::action(editor, cx, Vim::normal_before);
@@ -176,6 +176,7 @@ enum IndentType {
Same,
}
+/// Moves to the start of the next word.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -184,6 +185,7 @@ struct NextWordStart {
ignore_punctuation: bool,
}
+/// Moves to the end of the next word.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -192,6 +194,7 @@ struct NextWordEnd {
ignore_punctuation: bool,
}
+/// Moves to the start of the previous word.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -200,6 +203,7 @@ struct PreviousWordStart {
ignore_punctuation: bool,
}
+/// Moves to the end of the previous word.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -208,6 +212,7 @@ struct PreviousWordEnd {
ignore_punctuation: bool,
}
+/// Moves to the start of the next subword.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -216,6 +221,7 @@ pub(crate) struct NextSubwordStart {
pub(crate) ignore_punctuation: bool,
}
+/// Moves to the end of the next subword.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -224,6 +230,7 @@ pub(crate) struct NextSubwordEnd {
pub(crate) ignore_punctuation: bool,
}
+/// Moves to the start of the previous subword.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -232,6 +239,7 @@ pub(crate) struct PreviousSubwordStart {
pub(crate) ignore_punctuation: bool,
}
+/// Moves to the end of the previous subword.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -240,6 +248,7 @@ pub(crate) struct PreviousSubwordEnd {
pub(crate) ignore_punctuation: bool,
}
+/// Moves cursor up by the specified number of lines.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -248,6 +257,7 @@ pub(crate) struct Up {
pub(crate) display_lines: bool,
}
+/// Moves cursor down by the specified number of lines.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -256,6 +266,7 @@ pub(crate) struct Down {
pub(crate) display_lines: bool,
}
+/// Moves to the first non-whitespace character on the current line.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -264,6 +275,7 @@ struct FirstNonWhitespace {
display_lines: bool,
}
+/// Moves to the end of the current line.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -272,6 +284,7 @@ struct EndOfLine {
display_lines: bool,
}
+/// Moves to the start of the current line.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -280,6 +293,7 @@ pub struct StartOfLine {
pub(crate) display_lines: bool,
}
+/// Moves to the middle of the current line.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -288,6 +302,7 @@ struct MiddleOfLine {
display_lines: bool,
}
+/// Finds the next unmatched bracket or delimiter.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -296,6 +311,7 @@ struct UnmatchedForward {
char: char,
}
+/// Finds the previous unmatched bracket or delimiter.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -307,46 +323,85 @@ struct UnmatchedBackward {
actions!(
vim,
[
+ /// Moves cursor left one character.
Left,
+ /// Moves cursor left one character, wrapping to previous line.
#[action(deprecated_aliases = ["vim::Backspace"])]
WrappingLeft,
+ /// Moves cursor right one character.
Right,
+ /// Moves cursor right one character, wrapping to next line.
#[action(deprecated_aliases = ["vim::Space"])]
WrappingRight,
+ /// Selects the current line.
CurrentLine,
+ /// Moves to the start of the next sentence.
SentenceForward,
+ /// Moves to the start of the previous sentence.
SentenceBackward,
+ /// Moves to the start of the paragraph.
StartOfParagraph,
+ /// Moves to the end of the paragraph.
EndOfParagraph,
+ /// Moves to the start of the document.
StartOfDocument,
+ /// Moves to the end of the document.
EndOfDocument,
+ /// Moves to the matching bracket or delimiter.
Matching,
+ /// Goes to a percentage position in the file.
GoToPercentage,
+ /// Moves to the start of the next line.
NextLineStart,
+ /// Moves to the start of the previous line.
PreviousLineStart,
+ /// Moves to the start of a line downward.
StartOfLineDownward,
+ /// Moves to the end of a line downward.
EndOfLineDownward,
+ /// Goes to a specific column number.
GoToColumn,
+ /// Repeats the last character find.
RepeatFind,
+ /// Repeats the last character find in reverse.
RepeatFindReversed,
+ /// Moves to the top of the window.
WindowTop,
+ /// Moves to the middle of the window.
WindowMiddle,
+ /// Moves to the bottom of the window.
WindowBottom,
+ /// Moves to the start of the next section.
NextSectionStart,
+ /// Moves to the end of the next section.
NextSectionEnd,
+ /// Moves to the start of the previous section.
PreviousSectionStart,
+ /// Moves to the end of the previous section.
PreviousSectionEnd,
+ /// Moves to the start of the next method.
NextMethodStart,
+ /// Moves to the end of the next method.
NextMethodEnd,
+ /// Moves to the start of the previous method.
PreviousMethodStart,
+ /// Moves to the end of the previous method.
PreviousMethodEnd,
+ /// Moves to the next comment.
NextComment,
+ /// Moves to the previous comment.
PreviousComment,
+ /// Moves to the previous line with lesser indentation.
PreviousLesserIndent,
+ /// Moves to the previous line with greater indentation.
PreviousGreaterIndent,
+ /// Moves to the previous line with the same indentation.
PreviousSameIndent,
+ /// Moves to the next line with lesser indentation.
NextLesserIndent,
+ /// Moves to the next line with greater indentation.
NextGreaterIndent,
+ /// Moves to the next line with the same indentation.
NextSameIndent,
]
);
@@ -36,32 +36,59 @@ use multi_buffer::MultiBufferRow;
actions!(
vim,
[
+ /// Inserts text after the cursor.
InsertAfter,
+ /// Inserts text before the cursor.
InsertBefore,
+ /// Inserts at the first non-whitespace character.
InsertFirstNonWhitespace,
+ /// Inserts at the end of the line.
InsertEndOfLine,
+ /// Inserts a new line above the current line.
InsertLineAbove,
+ /// Inserts a new line below the current line.
InsertLineBelow,
+ /// Inserts an empty line above without entering insert mode.
InsertEmptyLineAbove,
+ /// Inserts an empty line below without entering insert mode.
InsertEmptyLineBelow,
+ /// Inserts at the previous insert position.
InsertAtPrevious,
+ /// Joins the current line with the next line.
JoinLines,
+ /// Joins lines without adding whitespace.
JoinLinesNoWhitespace,
+ /// Deletes character to the left.
DeleteLeft,
+ /// Deletes character to the right.
DeleteRight,
+ /// Deletes using Helix-style behavior.
HelixDelete,
+ /// Changes from cursor to end of line.
ChangeToEndOfLine,
+ /// Deletes from cursor to end of line.
DeleteToEndOfLine,
+ /// Yanks (copies) the selected text.
Yank,
+ /// Yanks the entire line.
YankLine,
+ /// Toggles the case of selected text.
ChangeCase,
+ /// Converts selected text to uppercase.
ConvertToUpperCase,
+ /// Converts selected text to lowercase.
ConvertToLowerCase,
+ /// Applies ROT13 cipher to selected text.
ConvertToRot13,
+ /// Applies ROT47 cipher to selected text.
ConvertToRot47,
+ /// Toggles comments for selected lines.
ToggleComments,
+ /// Shows the current location in the file.
ShowLocation,
+ /// Undoes the last change.
Undo,
+ /// Redoes the last undone change.
Redo,
]
);
@@ -9,6 +9,7 @@ use crate::{Vim, state::Mode};
const BOOLEAN_PAIRS: &[(&str, &str)] = &[("true", "false"), ("yes", "no"), ("on", "off")];
+/// Increments the number under the cursor or toggles boolean values.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -17,6 +18,7 @@ struct Increment {
step: bool,
}
+/// Decrements the number under the cursor or toggles boolean values.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -14,6 +14,7 @@ use crate::{
state::{Mode, Register},
};
+/// Pastes text from the specified register at the cursor position.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -711,7 +712,7 @@ mod test {
);
cx.update_global(|store: &mut SettingsStore, cx| {
store.update_user_settings::<AllLanguageSettings>(cx, |settings| {
- settings.languages.insert(
+ settings.languages.0.insert(
LanguageName::new("Rust"),
LanguageSettingsContent {
auto_indent_on_paste: Some(false),
@@ -11,7 +11,19 @@ use editor::Editor;
use gpui::{Action, App, Context, Window, actions};
use workspace::Workspace;
-actions!(vim, [Repeat, EndRepeat, ToggleRecord, ReplayLastRecording]);
+actions!(
+ vim,
+ [
+ /// Repeats the last change.
+ Repeat,
+ /// Ends the repeat recording.
+ EndRepeat,
+ /// Toggles macro recording.
+ ToggleRecord,
+ /// Replays the last recorded macro.
+ ReplayLastRecording
+ ]
+);
fn should_replay(action: &dyn Action) -> bool {
// skip so that we don't leave the character palette open
@@ -245,61 +257,63 @@ impl Vim {
}) else {
return;
};
- if let Some(mode) = mode {
- self.switch_mode(mode, false, window, cx)
- }
+ if mode != Some(self.mode) {
+ if let Some(mode) = mode {
+ self.switch_mode(mode, false, window, cx)
+ }
- match selection {
- RecordedSelection::SingleLine { cols } => {
- if cols > 1 {
- self.visual_motion(Motion::Right, Some(cols as usize - 1), window, cx)
+ match selection {
+ RecordedSelection::SingleLine { cols } => {
+ if cols > 1 {
+ self.visual_motion(Motion::Right, Some(cols as usize - 1), window, cx)
+ }
}
- }
- RecordedSelection::Visual { rows, cols } => {
- self.visual_motion(
- Motion::Down {
- display_lines: false,
- },
- Some(rows as usize),
- window,
- cx,
- );
- self.visual_motion(
- Motion::StartOfLine {
- display_lines: false,
- },
- None,
- window,
- cx,
- );
- if cols > 1 {
- self.visual_motion(Motion::Right, Some(cols as usize - 1), window, cx)
+ RecordedSelection::Visual { rows, cols } => {
+ self.visual_motion(
+ Motion::Down {
+ display_lines: false,
+ },
+ Some(rows as usize),
+ window,
+ cx,
+ );
+ self.visual_motion(
+ Motion::StartOfLine {
+ display_lines: false,
+ },
+ None,
+ window,
+ cx,
+ );
+ if cols > 1 {
+ self.visual_motion(Motion::Right, Some(cols as usize - 1), window, cx)
+ }
}
- }
- RecordedSelection::VisualBlock { rows, cols } => {
- self.visual_motion(
- Motion::Down {
- display_lines: false,
- },
- Some(rows as usize),
- window,
- cx,
- );
- if cols > 1 {
- self.visual_motion(Motion::Right, Some(cols as usize - 1), window, cx);
+ RecordedSelection::VisualBlock { rows, cols } => {
+ self.visual_motion(
+ Motion::Down {
+ display_lines: false,
+ },
+ Some(rows as usize),
+ window,
+ cx,
+ );
+ if cols > 1 {
+ self.visual_motion(Motion::Right, Some(cols as usize - 1), window, cx);
+ }
}
+ RecordedSelection::VisualLine { rows } => {
+ self.visual_motion(
+ Motion::Down {
+ display_lines: false,
+ },
+ Some(rows as usize),
+ window,
+ cx,
+ );
+ }
+ RecordedSelection::None => {}
}
- RecordedSelection::VisualLine { rows } => {
- self.visual_motion(
- Motion::Down {
- display_lines: false,
- },
- Some(rows as usize),
- window,
- cx,
- );
- }
- RecordedSelection::None => {}
}
// insert internally uses repeat to handle counts
@@ -11,13 +11,21 @@ use settings::Settings;
actions!(
vim,
[
+ /// Scrolls up by one line.
LineUp,
+ /// Scrolls down by one line.
LineDown,
+ /// Scrolls right by one column.
ColumnRight,
+ /// Scrolls left by one column.
ColumnLeft,
+ /// Scrolls up by half a page.
ScrollUp,
+ /// Scrolls down by half a page.
ScrollDown,
+ /// Scrolls up by one page.
PageUp,
+ /// Scrolls down by one page.
PageDown
]
);
@@ -16,6 +16,7 @@ use crate::{
state::{Mode, SearchState},
};
+/// Moves to the next search match.
#[derive(Clone, Debug, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -28,6 +29,7 @@ pub(crate) struct MoveToNext {
regex: bool,
}
+/// Moves to the previous search match.
#[derive(Clone, Debug, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -40,6 +42,7 @@ pub(crate) struct MoveToPrevious {
regex: bool,
}
+/// Initiates a search operation with the specified parameters.
#[derive(Clone, Debug, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -50,6 +53,7 @@ pub(crate) struct Search {
regex: bool,
}
+/// Executes a find command to search for patterns in the buffer.
#[derive(Clone, Debug, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -58,6 +62,7 @@ pub struct FindCommand {
pub backwards: bool,
}
+/// Executes a search and replace command within the specified range.
#[derive(Clone, Debug, PartialEq, Action)]
#[action(namespace = vim, no_json, no_register)]
pub struct ReplaceCommand {
@@ -73,7 +78,17 @@ pub(crate) struct Replacement {
is_case_sensitive: bool,
}
-actions!(vim, [SearchSubmit, MoveToNextMatch, MoveToPreviousMatch]);
+actions!(
+ vim,
+ [
+ /// Submits the current search query.
+ SearchSubmit,
+ /// Moves to the next search match.
+ MoveToNextMatch,
+ /// Moves to the previous search match.
+ MoveToPreviousMatch
+ ]
+);
pub(crate) fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
Vim::action(editor, cx, Vim::move_to_next);
@@ -7,7 +7,15 @@ use crate::{
motion::{Motion, MotionKind},
};
-actions!(vim, [Substitute, SubstituteLine]);
+actions!(
+ vim,
+ [
+ /// Substitutes characters in the current selection.
+ Substitute,
+ /// Substitutes the entire line.
+ SubstituteLine
+ ]
+);
pub(crate) fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
Vim::action(editor, cx, |vim, _: &Substitute, window, cx| {
@@ -196,7 +196,7 @@ impl Vim {
}
clipboard_selections.push(ClipboardSelection {
len: text.len() - initial_len,
- is_entire_line: kind.linewise(),
+ is_entire_line: false,
first_line_indent: buffer.indent_size_for_line(MultiBufferRow(start.row)).len,
});
}
@@ -46,6 +46,7 @@ pub enum Object {
EntireFile,
}
+/// Selects a word text object.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -54,6 +55,7 @@ struct Word {
ignore_punctuation: bool,
}
+/// Selects a subword text object.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -61,6 +63,7 @@ struct Subword {
#[serde(default)]
ignore_punctuation: bool,
}
+/// Selects text at the same indentation level.
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
#[action(namespace = vim)]
#[serde(deny_unknown_fields)]
@@ -258,25 +261,45 @@ fn find_mini_brackets(
actions!(
vim,
[
+ /// Selects a sentence text object.
Sentence,
+ /// Selects a paragraph text object.
Paragraph,
+ /// Selects text within single quotes.
Quotes,
+ /// Selects text within backticks.
BackQuotes,
+ /// Selects text within the nearest quotes (single or double).
MiniQuotes,
+ /// Selects text within any type of quotes.
AnyQuotes,
+ /// Selects text within double quotes.
DoubleQuotes,
+ /// Selects text within vertical bars (pipes).
VerticalBars,
+ /// Selects text within parentheses.
Parentheses,
+ /// Selects text within the nearest brackets.
MiniBrackets,
+ /// Selects text within any type of brackets.
AnyBrackets,
+ /// Selects text within square brackets.
SquareBrackets,
+ /// Selects text within curly brackets.
CurlyBrackets,
+ /// Selects text within angle brackets.
AngleBrackets,
+ /// Selects a function argument.
Argument,
+ /// Selects an HTML/XML tag.
Tag,
+ /// Selects a method or function.
Method,
+ /// Selects a class definition.
Class,
+ /// Selects a comment block.
Comment,
+ /// Selects the entire file.
EntireFile
]
);
@@ -13,7 +13,15 @@ use language::{Point, SelectionGoal};
use std::ops::Range;
use std::sync::Arc;
-actions!(vim, [ToggleReplace, UndoReplace]);
+actions!(
+ vim,
+ [
+ /// Toggles replace mode.
+ ToggleReplace,
+ /// Undoes the last replacement.
+ UndoReplace
+ ]
+);
pub fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
Vim::action(editor, cx, |vim, _: &ToggleReplace, window, cx| {
@@ -4,7 +4,13 @@ use editor::{Bias, Editor, RewrapOptions, SelectionEffects, display_map::ToDispl
use gpui::{Context, Window, actions};
use language::SelectionGoal;
-actions!(vim, [Rewrap]);
+actions!(
+ vim,
+ [
+ /// Rewraps the selected text to fit within the line width.
+ Rewrap
+ ]
+);
pub(crate) fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
Vim::action(editor, cx, |vim, _: &Rewrap, window, cx| {
@@ -2071,3 +2071,42 @@ async fn test_paragraph_multi_delete(cx: &mut gpui::TestAppContext) {
cx.simulate_shared_keystrokes("4 d a p").await;
cx.shared_state().await.assert_eq(indoc! {"ˇ"});
}
+
+#[gpui::test]
+async fn test_multi_cursor_replay(cx: &mut gpui::TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+ cx.set_state(
+ indoc! {
+ "
+ oˇne one one
+
+ two two two
+ "
+ },
+ Mode::Normal,
+ );
+
+ cx.simulate_keystrokes("3 g l s wow escape escape");
+ cx.assert_state(
+ indoc! {
+ "
+ woˇw wow wow
+
+ two two two
+ "
+ },
+ Mode::Normal,
+ );
+
+ cx.simulate_keystrokes("2 j 3 g l .");
+ cx.assert_state(
+ indoc! {
+ "
+ wow wow wow
+
+ woˇw woˇw woˇw
+ "
+ },
+ Mode::Normal,
+ );
+}
@@ -134,55 +134,105 @@ struct PushLiteral {
actions!(
vim,
[
+ /// Switches to normal mode.
SwitchToNormalMode,
+ /// Switches to insert mode.
SwitchToInsertMode,
+ /// Switches to replace mode.
SwitchToReplaceMode,
+ /// Switches to visual mode.
SwitchToVisualMode,
+ /// Switches to visual line mode.
SwitchToVisualLineMode,
+ /// Switches to visual block mode.
SwitchToVisualBlockMode,
+ /// Switches to Helix-style normal mode.
SwitchToHelixNormalMode,
+ /// Clears any pending operators.
ClearOperators,
+ /// Clears the exchange register.
ClearExchange,
+ /// Inserts a tab character.
Tab,
+ /// Inserts a newline.
Enter,
+ /// Selects inner text object.
InnerObject,
+ /// Maximizes the current pane.
MaximizePane,
+ /// Opens the default keymap file.
OpenDefaultKeymap,
+ /// Resets all pane sizes to default.
ResetPaneSizes,
+ /// Resizes the pane to the right.
ResizePaneRight,
+ /// Resizes the pane to the left.
ResizePaneLeft,
+ /// Resizes the pane upward.
ResizePaneUp,
+ /// Resizes the pane downward.
ResizePaneDown,
+ /// Starts a change operation.
PushChange,
+ /// Starts a delete operation.
PushDelete,
+ /// Exchanges text regions.
Exchange,
+ /// Starts a yank operation.
PushYank,
+ /// Starts a replace operation.
PushReplace,
+ /// Deletes surrounding characters.
PushDeleteSurrounds,
+ /// Sets a mark at the current position.
PushMark,
+ /// Toggles the marks view.
ToggleMarksView,
+ /// Starts a forced motion.
PushForcedMotion,
+ /// Starts an indent operation.
PushIndent,
+ /// Starts an outdent operation.
PushOutdent,
+ /// Starts an auto-indent operation.
PushAutoIndent,
+ /// Starts a rewrap operation.
PushRewrap,
+ /// Starts a shell command operation.
PushShellCommand,
+ /// Converts to lowercase.
PushLowercase,
+ /// Converts to uppercase.
PushUppercase,
+ /// Toggles case.
PushOppositeCase,
+ /// Applies ROT13 encoding.
PushRot13,
+ /// Applies ROT47 encoding.
PushRot47,
+ /// Toggles the registers view.
ToggleRegistersView,
+ /// Selects a register.
PushRegister,
+ /// Starts recording to a register.
PushRecordRegister,
+ /// Replays a register.
PushReplayRegister,
+ /// Replaces with register contents.
PushReplaceWithRegister,
+ /// Toggles comments.
PushToggleComments,
]
);
// in the workspace namespace so it's not filtered out when vim is disabled.
-actions!(workspace, [ToggleVimMode,]);
+actions!(
+ workspace,
+ [
+ /// Toggles Vim mode on or off.
+ ToggleVimMode,
+ ]
+);
/// Initializes the `vim` crate.
pub fn init(cx: &mut App) {
@@ -23,23 +23,41 @@ use crate::{
actions!(
vim,
[
+ /// Toggles visual mode.
ToggleVisual,
+ /// Toggles visual line mode.
ToggleVisualLine,
+ /// Toggles visual block mode.
ToggleVisualBlock,
+ /// Deletes the visual selection.
VisualDelete,
+ /// Deletes entire lines in visual selection.
VisualDeleteLine,
+ /// Yanks (copies) the visual selection.
VisualYank,
+ /// Yanks entire lines in visual selection.
VisualYankLine,
+ /// Moves cursor to the other end of the selection.
OtherEnd,
+ /// Moves cursor to the other end of the selection (row-aware).
OtherEndRowAware,
+ /// Selects the next occurrence of the current selection.
SelectNext,
+ /// Selects the previous occurrence of the current selection.
SelectPrevious,
+ /// Selects the next match of the current selection.
SelectNextMatch,
+ /// Selects the previous match of the current selection.
SelectPreviousMatch,
+ /// Selects the next smaller syntax node.
SelectSmallerSyntaxNode,
+ /// Selects the next larger syntax node.
SelectLargerSyntaxNode,
+ /// Restores the previous visual selection.
RestoreVisualSelection,
+ /// Inserts at the end of each line in visual selection.
VisualInsertEndOfLine,
+ /// Inserts at the first non-whitespace character of each line.
VisualInsertFirstNonWhiteSpace,
]
);
@@ -7,10 +7,7 @@ use gpui::{App, AppContext, Context, Entity, Subscription, Task};
use http_client::{HttpClient, Method};
use language_model::{LlmApiToken, RefreshLlmTokenListener};
use web_search::{WebSearchProvider, WebSearchProviderId};
-use zed_llm_client::{
- CLIENT_SUPPORTS_EXA_WEB_SEARCH_PROVIDER_HEADER_NAME, EXPIRED_LLM_TOKEN_HEADER_NAME,
- WebSearchBody, WebSearchResponse,
-};
+use zed_llm_client::{EXPIRED_LLM_TOKEN_HEADER_NAME, WebSearchBody, WebSearchResponse};
pub struct CloudWebSearchProvider {
state: Entity<State>,
@@ -92,7 +89,6 @@ async fn perform_web_search(
.uri(http_client.build_zed_llm_url("/web_search", &[])?.as_ref())
.header("Content-Type", "application/json")
.header("Authorization", format!("Bearer {token}"))
- .header(CLIENT_SUPPORTS_EXA_WEB_SEARCH_PROVIDER_HEADER_NAME, "true")
.body(serde_json::to_string(&body)?.into())?;
let mut response = http_client
.send(request)
@@ -12,7 +12,13 @@ use ui::{ListItem, ListItemSpacing, prelude::*};
use util::ResultExt;
use workspace::{ModalView, Workspace, ui::HighlightedLabel};
-actions!(welcome, [ToggleBaseKeymapSelector]);
+actions!(
+ welcome,
+ [
+ /// Toggles the base keymap selector modal.
+ ToggleBaseKeymapSelector
+ ]
+);
pub fn init(cx: &mut App) {
cx.observe_new(|workspace: &mut Workspace, _window, _cx| {
@@ -24,7 +24,13 @@ mod base_keymap_picker;
mod base_keymap_setting;
mod multibuffer_hint;
-actions!(welcome, [ResetHints]);
+actions!(
+ welcome,
+ [
+ /// Resets the welcome screen hints to their initial state.
+ ResetHints
+ ]
+);
pub const FIRST_OPEN: &str = "first_open";
pub const DOCS_URL: &str = "https://zed.dev/docs/";
@@ -902,7 +902,7 @@ impl Render for PanelButtons {
})
.anchor(menu_anchor)
.attach(menu_attach)
- .trigger(move |is_active| {
+ .trigger(move |is_active, _window, _cx| {
IconButton::new(name, icon)
.icon_size(IconSize::Small)
.toggle_state(is_active_button)
@@ -95,37 +95,45 @@ pub enum SaveIntent {
Skip,
}
+/// Activates a specific item in the pane by its index.
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default, Action)]
#[action(namespace = pane)]
pub struct ActivateItem(pub usize);
+/// Closes the currently active item in the pane.
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default, Action)]
#[action(namespace = pane)]
#[serde(deny_unknown_fields)]
pub struct CloseActiveItem {
+ #[serde(default)]
pub save_intent: Option<SaveIntent>,
#[serde(default)]
pub close_pinned: bool,
}
+/// Closes all inactive items in the pane.
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default, Action)]
#[action(namespace = pane)]
#[serde(deny_unknown_fields)]
pub struct CloseInactiveItems {
+ #[serde(default)]
pub save_intent: Option<SaveIntent>,
#[serde(default)]
pub close_pinned: bool,
}
+/// Closes all items in the pane.
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default, Action)]
#[action(namespace = pane)]
#[serde(deny_unknown_fields)]
pub struct CloseAllItems {
+ #[serde(default)]
pub save_intent: Option<SaveIntent>,
#[serde(default)]
pub close_pinned: bool,
}
+/// Closes all items that have no unsaved changes.
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default, Action)]
#[action(namespace = pane)]
#[serde(deny_unknown_fields)]
@@ -134,6 +142,7 @@ pub struct CloseCleanItems {
pub close_pinned: bool,
}
+/// Closes all items to the right of the current item.
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default, Action)]
#[action(namespace = pane)]
#[serde(deny_unknown_fields)]
@@ -142,6 +151,7 @@ pub struct CloseItemsToTheRight {
pub close_pinned: bool,
}
+/// Closes all items to the left of the current item.
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default, Action)]
#[action(namespace = pane)]
#[serde(deny_unknown_fields)]
@@ -150,6 +160,7 @@ pub struct CloseItemsToTheLeft {
pub close_pinned: bool,
}
+/// Reveals the current item in the project panel.
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default, Action)]
#[action(namespace = pane)]
#[serde(deny_unknown_fields)]
@@ -158,6 +169,7 @@ pub struct RevealInProjectPanel {
pub entry_id: Option<u64>,
}
+/// Opens the search interface with the specified configuration.
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Default, Action)]
#[action(namespace = pane)]
#[serde(deny_unknown_fields)]
@@ -173,25 +185,45 @@ pub struct DeploySearch {
actions!(
pane,
[
+ /// Activates the previous item in the pane.
ActivatePreviousItem,
+ /// Activates the next item in the pane.
ActivateNextItem,
+ /// Activates the last item in the pane.
ActivateLastItem,
+ /// Switches to the alternate file.
AlternateFile,
+ /// Navigates back in history.
GoBack,
+ /// Navigates forward in history.
GoForward,
+ /// Joins this pane into the next pane.
JoinIntoNext,
+ /// Joins all panes into one.
JoinAll,
+ /// Reopens the most recently closed item.
ReopenClosedItem,
+ /// Splits the pane to the left.
SplitLeft,
+ /// Splits the pane upward.
SplitUp,
+ /// Splits the pane to the right.
SplitRight,
+ /// Splits the pane downward.
SplitDown,
+ /// Splits the pane horizontally.
SplitHorizontal,
+ /// Splits the pane vertically.
SplitVertical,
+ /// Swaps the current item with the one to the left.
SwapItemLeft,
+ /// Swaps the current item with the one to the right.
SwapItemRight,
+ /// Toggles preview mode for the current tab.
TogglePreviewTab,
+ /// Toggles pin status for the current tab.
TogglePinTab,
+ /// Unpins all tabs in the pane.
UnpinAllTabs,
]
);
@@ -2521,7 +2553,7 @@ impl Pane {
let pane = cx.entity().downgrade();
let menu_context = item.item_focus_handle(cx);
right_click_menu(ix)
- .trigger(|_| tab)
+ .trigger(|_, _, _| tab)
.menu(move |window, cx| {
let pane = pane.clone();
let menu_context = menu_context.clone();
@@ -2703,9 +2735,7 @@ impl Pane {
.when(visible_in_project_panel, |menu| {
menu.entry(
"Reveal In Project Panel",
- Some(Box::new(RevealInProjectPanel {
- entry_id: Some(entry_id),
- })),
+ Some(Box::new(RevealInProjectPanel::default())),
window.handler_for(&pane, move |pane, _, cx| {
pane.project
.update(cx, |_, cx| {
@@ -3,7 +3,7 @@ use std::process::ExitStatus;
use anyhow::Result;
use gpui::{AppContext, Context, Entity, Task};
use language::Buffer;
-use project::TaskSourceKind;
+use project::{TaskSourceKind, WorktreeId};
use remote::ConnectionState;
use task::{DebugScenario, ResolvedTask, SpawnInTerminal, TaskContext, TaskTemplate};
use ui::Window;
@@ -95,11 +95,19 @@ impl Workspace {
scenario: DebugScenario,
task_context: TaskContext,
active_buffer: Option<Entity<Buffer>>,
+ worktree_id: Option<WorktreeId>,
window: &mut Window,
cx: &mut Context<Self>,
) {
if let Some(provider) = self.debugger_provider.as_mut() {
- provider.start_session(scenario, task_context, active_buffer, window, cx)
+ provider.start_session(
+ scenario,
+ task_context,
+ active_buffer,
+ worktree_id,
+ window,
+ cx,
+ )
}
}
@@ -5,13 +5,19 @@ use theme::all_theme_colors;
use ui::{
AudioStatus, Avatar, AvatarAudioStatusIndicator, AvatarAvailabilityIndicator, ButtonLike,
Checkbox, CheckboxWithLabel, CollaboratorAvailability, ContentGroup, DecoratedIcon,
- ElevationIndex, Facepile, IconDecoration, Indicator, KeybindingHint, Switch, Table, TintColor,
- Tooltip, element_cell, prelude::*, string_cell, utils::calculate_contrast_ratio,
+ ElevationIndex, Facepile, IconDecoration, Indicator, KeybindingHint, Switch, TintColor,
+ Tooltip, prelude::*, utils::calculate_contrast_ratio,
};
use crate::{Item, Workspace};
-actions!(dev, [OpenThemePreview]);
+actions!(
+ dev,
+ [
+ /// Opens the theme preview window.
+ OpenThemePreview
+ ]
+);
pub fn init(cx: &mut App) {
cx.observe_new(|workspace: &mut Workspace, _, _| {
@@ -146,6 +146,7 @@ pub trait DebuggerProvider {
definition: DebugScenario,
task_context: TaskContext,
active_buffer: Option<Entity<Buffer>>,
+ worktree_id: Option<WorktreeId>,
window: &mut Window,
cx: &mut App,
);
@@ -168,42 +169,83 @@ pub trait DebuggerProvider {
actions!(
workspace,
[
+ /// Activates the next pane in the workspace.
ActivateNextPane,
+ /// Activates the previous pane in the workspace.
ActivatePreviousPane,
+ /// Switches to the next window.
ActivateNextWindow,
+ /// Switches to the previous window.
ActivatePreviousWindow,
+ /// Adds a folder to the current project.
AddFolderToProject,
+ /// Clears all notifications.
ClearAllNotifications,
+ /// Closes the active dock.
CloseActiveDock,
+ /// Closes all docks.
CloseAllDocks,
+ /// Closes the current window.
CloseWindow,
+ /// Opens the feedback dialog.
Feedback,
+ /// Follows the next collaborator in the session.
FollowNextCollaborator,
+ /// Moves the focused panel to the next position.
MoveFocusedPanelToNextPosition,
+ /// Opens a new terminal in the center.
NewCenterTerminal,
+ /// Creates a new file.
NewFile,
+ /// Creates a new file in a vertical split.
NewFileSplitVertical,
+ /// Creates a new file in a horizontal split.
NewFileSplitHorizontal,
+ /// Opens a new search.
NewSearch,
+ /// Opens a new terminal.
NewTerminal,
+ /// Opens a new window.
NewWindow,
+ /// Opens a file or directory.
Open,
+ /// Opens multiple files.
OpenFiles,
+ /// Opens the current location in terminal.
OpenInTerminal,
+ /// Opens the component preview.
OpenComponentPreview,
+ /// Reloads the active item.
ReloadActiveItem,
+ /// Resets the active dock to its default size.
+ ResetActiveDockSize,
+ /// Resets all open docks to their default sizes.
+ ResetOpenDocksSize,
+ /// Saves the current file with a new name.
SaveAs,
+ /// Saves without formatting.
SaveWithoutFormat,
+ /// Shuts down all debug adapters.
ShutdownDebugAdapters,
+ /// Suppresses the current notification.
SuppressNotification,
+ /// Toggles the bottom dock.
ToggleBottomDock,
+ /// Toggles centered layout mode.
ToggleCenteredLayout,
+ /// Toggles the left dock.
ToggleLeftDock,
+ /// Toggles the right dock.
ToggleRightDock,
+ /// Toggles zoom on the active pane.
ToggleZoom,
+ /// Stops following a collaborator.
Unfollow,
+ /// Shows the welcome screen.
Welcome,
+ /// Restores the banner.
RestoreBanner,
+ /// Toggles expansion of the selected item.
ToggleExpandItem,
]
);
@@ -213,14 +255,17 @@ pub struct OpenPaths {
pub paths: Vec<PathBuf>,
}
+/// Activates a specific pane by its index.
#[derive(Clone, Deserialize, PartialEq, JsonSchema, Action)]
#[action(namespace = workspace)]
pub struct ActivatePane(pub usize);
+/// Moves an item to a specific pane by index.
#[derive(Clone, Deserialize, PartialEq, JsonSchema, Action)]
#[action(namespace = workspace)]
#[serde(deny_unknown_fields)]
pub struct MoveItemToPane {
+ #[serde(default = "default_1")]
pub destination: usize,
#[serde(default = "default_true")]
pub focus: bool,
@@ -228,10 +273,16 @@ pub struct MoveItemToPane {
pub clone: bool,
}
+fn default_1() -> usize {
+ 1
+}
+
+/// Moves an item to a pane in the specified direction.
#[derive(Clone, Deserialize, PartialEq, JsonSchema, Action)]
#[action(namespace = workspace)]
#[serde(deny_unknown_fields)]
pub struct MoveItemToPaneInDirection {
+ #[serde(default = "default_right")]
pub direction: SplitDirection,
#[serde(default = "default_true")]
pub focus: bool,
@@ -239,38 +290,52 @@ pub struct MoveItemToPaneInDirection {
pub clone: bool,
}
+fn default_right() -> SplitDirection {
+ SplitDirection::Right
+}
+
+/// Saves all open files in the workspace.
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Action)]
#[action(namespace = workspace)]
#[serde(deny_unknown_fields)]
pub struct SaveAll {
+ #[serde(default)]
pub save_intent: Option<SaveIntent>,
}
+/// Saves the current file with the specified options.
#[derive(Clone, PartialEq, Debug, Deserialize, JsonSchema, Action)]
#[action(namespace = workspace)]
#[serde(deny_unknown_fields)]
pub struct Save {
+ #[serde(default)]
pub save_intent: Option<SaveIntent>,
}
+/// Closes all items and panes in the workspace.
#[derive(Clone, PartialEq, Debug, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = workspace)]
#[serde(deny_unknown_fields)]
pub struct CloseAllItemsAndPanes {
+ #[serde(default)]
pub save_intent: Option<SaveIntent>,
}
+/// Closes all inactive tabs and panes in the workspace.
#[derive(Clone, PartialEq, Debug, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = workspace)]
#[serde(deny_unknown_fields)]
pub struct CloseInactiveTabsAndPanes {
+ #[serde(default)]
pub save_intent: Option<SaveIntent>,
}
+/// Sends a sequence of keystrokes to the active element.
#[derive(Clone, Deserialize, PartialEq, JsonSchema, Action)]
#[action(namespace = workspace)]
pub struct SendKeystrokes(pub String);
+/// Reloads the active item or workspace.
#[derive(Clone, Deserialize, PartialEq, Default, JsonSchema, Action)]
#[action(namespace = workspace)]
#[serde(deny_unknown_fields)]
@@ -281,28 +346,79 @@ pub struct Reload {
actions!(
project_symbols,
[
+ /// Toggles the project symbols search.
#[action(name = "Toggle")]
ToggleProjectSymbols
]
);
+/// Toggles the file finder interface.
#[derive(Default, PartialEq, Eq, Clone, Deserialize, JsonSchema, Action)]
#[action(namespace = file_finder, name = "Toggle")]
+#[serde(deny_unknown_fields)]
pub struct ToggleFileFinder {
#[serde(default)]
pub separate_history: bool,
}
+/// Increases size of a currently focused dock by a given amount of pixels.
+#[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)]
+#[action(namespace = workspace)]
+#[serde(deny_unknown_fields)]
+pub struct IncreaseActiveDockSize {
+ /// For 0px parameter, uses UI font size value.
+ #[serde(default)]
+ pub px: u32,
+}
+
+/// Decreases size of a currently focused dock by a given amount of pixels.
+#[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)]
+#[action(namespace = workspace)]
+#[serde(deny_unknown_fields)]
+pub struct DecreaseActiveDockSize {
+ /// For 0px parameter, uses UI font size value.
+ #[serde(default)]
+ pub px: u32,
+}
+
+/// Increases size of all currently visible docks uniformly, by a given amount of pixels.
+#[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)]
+#[action(namespace = workspace)]
+#[serde(deny_unknown_fields)]
+pub struct IncreaseOpenDocksSize {
+ /// For 0px parameter, uses UI font size value.
+ #[serde(default)]
+ pub px: u32,
+}
+
+/// Decreases size of all currently visible docks uniformly, by a given amount of pixels.
+#[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)]
+#[action(namespace = workspace)]
+#[serde(deny_unknown_fields)]
+pub struct DecreaseOpenDocksSize {
+ /// For 0px parameter, uses UI font size value.
+ #[serde(default)]
+ pub px: u32,
+}
+
actions!(
workspace,
[
+ /// Activates the pane to the left.
ActivatePaneLeft,
+ /// Activates the pane to the right.
ActivatePaneRight,
+ /// Activates the pane above.
ActivatePaneUp,
+ /// Activates the pane below.
ActivatePaneDown,
+ /// Swaps the current pane with the one to the left.
SwapPaneLeft,
+ /// Swaps the current pane with the one to the right.
SwapPaneRight,
+ /// Swaps the current pane with the one above.
SwapPaneUp,
+ /// Swaps the current pane with the one below.
SwapPaneDown,
]
);
@@ -358,6 +474,7 @@ impl PartialEq for Toast {
}
}
+/// Opens a new terminal with the specified working directory.
#[derive(Debug, Default, Clone, Deserialize, PartialEq, JsonSchema, Action)]
#[action(namespace = workspace)]
#[serde(deny_unknown_fields)]
@@ -2762,12 +2879,14 @@ impl Workspace {
})
}
- fn close_active_dock(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ fn close_active_dock(&mut self, window: &mut Window, cx: &mut Context<Self>) -> bool {
if let Some(dock) = self.active_dock(window, cx) {
dock.update(cx, |dock, cx| {
dock.set_open(false, window, cx);
});
+ return true;
}
+ false
}
pub fn close_all_docks(&mut self, window: &mut Window, cx: &mut Context<Self>) {
@@ -3615,9 +3734,9 @@ impl Workspace {
return;
};
match dock.read(cx).position() {
- DockPosition::Left => resize_left_dock(panel_size + amount, self, window, cx),
- DockPosition::Bottom => resize_bottom_dock(panel_size + amount, self, window, cx),
- DockPosition::Right => resize_right_dock(panel_size + amount, self, window, cx),
+ DockPosition::Left => self.resize_left_dock(panel_size + amount, window, cx),
+ DockPosition::Bottom => self.resize_bottom_dock(panel_size + amount, window, cx),
+ DockPosition::Right => self.resize_right_dock(panel_size + amount, window, cx),
}
} else {
self.center
@@ -5406,7 +5525,9 @@ impl Workspace {
))
.on_action(cx.listener(
|workspace: &mut Workspace, _: &CloseActiveDock, window, cx| {
- workspace.close_active_dock(window, cx);
+ if !workspace.close_active_dock(window, cx) {
+ cx.propagate();
+ }
},
))
.on_action(
@@ -5431,6 +5552,72 @@ impl Workspace {
workspace.reopen_closed_item(window, cx).detach();
},
))
+ .on_action(cx.listener(
+ |workspace: &mut Workspace, _: &ResetActiveDockSize, window, cx| {
+ for dock in workspace.all_docks() {
+ if dock.focus_handle(cx).contains_focused(window, cx) {
+ let Some(panel) = dock.read(cx).active_panel() else {
+ return;
+ };
+
+ // Set to `None`, then the size will fall back to the default.
+ panel.clone().set_size(None, window, cx);
+
+ return;
+ }
+ }
+ },
+ ))
+ .on_action(cx.listener(
+ |workspace: &mut Workspace, _: &ResetOpenDocksSize, window, cx| {
+ for dock in workspace.all_docks() {
+ if let Some(panel) = dock.read(cx).visible_panel() {
+ // Set to `None`, then the size will fall back to the default.
+ panel.clone().set_size(None, window, cx);
+ }
+ }
+ },
+ ))
+ .on_action(cx.listener(
+ |workspace: &mut Workspace, act: &IncreaseActiveDockSize, window, cx| {
+ adjust_active_dock_size_by_px(
+ px_with_ui_font_fallback(act.px, cx),
+ workspace,
+ window,
+ cx,
+ );
+ },
+ ))
+ .on_action(cx.listener(
+ |workspace: &mut Workspace, act: &DecreaseActiveDockSize, window, cx| {
+ adjust_active_dock_size_by_px(
+ px_with_ui_font_fallback(act.px, cx) * -1.,
+ workspace,
+ window,
+ cx,
+ );
+ },
+ ))
+ .on_action(cx.listener(
+ |workspace: &mut Workspace, act: &IncreaseOpenDocksSize, window, cx| {
+ adjust_open_docks_size_by_px(
+ px_with_ui_font_fallback(act.px, cx),
+ workspace,
+ window,
+ cx,
+ );
+ },
+ ))
+ .on_action(cx.listener(
+ |workspace: &mut Workspace, act: &DecreaseOpenDocksSize, window, cx| {
+ adjust_open_docks_size_by_px(
+ px_with_ui_font_fallback(act.px, cx) * -1.,
+ workspace,
+ window,
+ cx,
+ );
+ },
+ ))
.on_action(cx.listener(Workspace::toggle_centered_layout))
.on_action(cx.listener(Workspace::cancel))
}
@@ -5620,6 +5807,72 @@ impl Workspace {
cx.propagate();
}
}
+
+ fn adjust_dock_size_by_px(
+ &mut self,
+ panel_size: Pixels,
+ dock_pos: DockPosition,
+ px: Pixels,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ match dock_pos {
+ DockPosition::Left => self.resize_left_dock(panel_size + px, window, cx),
+ DockPosition::Right => self.resize_right_dock(panel_size + px, window, cx),
+ DockPosition::Bottom => self.resize_bottom_dock(panel_size + px, window, cx),
+ }
+ }
+
+ fn resize_left_dock(&mut self, new_size: Pixels, window: &mut Window, cx: &mut App) {
+ let size = new_size.min(self.bounds.right() - RESIZE_HANDLE_SIZE);
+
+ self.left_dock.update(cx, |left_dock, cx| {
+ if WorkspaceSettings::get_global(cx)
+ .resize_all_panels_in_dock
+ .contains(&DockPosition::Left)
+ {
+ left_dock.resize_all_panels(Some(size), window, cx);
+ } else {
+ left_dock.resize_active_panel(Some(size), window, cx);
+ }
+ });
+ }
+
+ fn resize_right_dock(&mut self, new_size: Pixels, window: &mut Window, cx: &mut App) {
+ let mut size = new_size.max(self.bounds.left() - RESIZE_HANDLE_SIZE);
+ self.left_dock.read_with(cx, |left_dock, cx| {
+ let left_dock_size = left_dock
+ .active_panel_size(window, cx)
+ .unwrap_or(Pixels(0.0));
+ if left_dock_size + size > self.bounds.right() {
+ size = self.bounds.right() - left_dock_size
+ }
+ });
+ self.right_dock.update(cx, |right_dock, cx| {
+ if WorkspaceSettings::get_global(cx)
+ .resize_all_panels_in_dock
+ .contains(&DockPosition::Right)
+ {
+ right_dock.resize_all_panels(Some(size), window, cx);
+ } else {
+ right_dock.resize_active_panel(Some(size), window, cx);
+ }
+ });
+ }
+
+ fn resize_bottom_dock(&mut self, new_size: Pixels, window: &mut Window, cx: &mut App) {
+ let size = new_size.min(self.bounds.bottom() - RESIZE_HANDLE_SIZE - self.bounds.top());
+ self.bottom_dock.update(cx, |bottom_dock, cx| {
+ if WorkspaceSettings::get_global(cx)
+ .resize_all_panels_in_dock
+ .contains(&DockPosition::Bottom)
+ {
+ bottom_dock.resize_all_panels(Some(size), window, cx);
+ } else {
+ bottom_dock.resize_active_panel(Some(size), window, cx);
+ }
+ });
+ }
}
fn leader_border_for_pane(
@@ -5804,6 +6057,63 @@ fn notify_if_database_failed(workspace: WindowHandle<Workspace>, cx: &mut AsyncA
.log_err();
}
+fn px_with_ui_font_fallback(val: u32, cx: &Context<Workspace>) -> Pixels {
+ if val == 0 {
+ ThemeSettings::get_global(cx).ui_font_size(cx)
+ } else {
+ px(val as f32)
+ }
+}
+
+fn adjust_active_dock_size_by_px(
+ px: Pixels,
+ workspace: &mut Workspace,
+ window: &mut Window,
+ cx: &mut Context<Workspace>,
+) {
+ let Some(active_dock) = workspace
+ .all_docks()
+ .into_iter()
+ .find(|dock| dock.focus_handle(cx).contains_focused(window, cx))
+ else {
+ return;
+ };
+ let dock = active_dock.read(cx);
+ let Some(panel_size) = dock.active_panel_size(window, cx) else {
+ return;
+ };
+ let dock_pos = dock.position();
+ workspace.adjust_dock_size_by_px(panel_size, dock_pos, px, window, cx);
+}
+
+fn adjust_open_docks_size_by_px(
+ px: Pixels,
+ workspace: &mut Workspace,
+ window: &mut Window,
+ cx: &mut Context<Workspace>,
+) {
+ let docks = workspace
+ .all_docks()
+ .into_iter()
+ .filter_map(|dock| {
+ if dock.read(cx).is_open() {
+ let dock = dock.read(cx);
+ let panel_size = dock.active_panel_size(window, cx)?;
+ let dock_pos = dock.position();
+ Some((panel_size, dock_pos, px))
+ } else {
+ None
+ }
+ })
+ .collect::<Vec<_>>();
+
+ docks
+ .into_iter()
+ .for_each(|(panel_size, dock_pos, offset)| {
+ workspace.adjust_dock_size_by_px(panel_size, dock_pos, offset, window, cx);
+ });
+}
+
impl Focusable for Workspace {
fn focus_handle(&self, cx: &App) -> FocusHandle {
self.active_pane.focus_handle(cx)
@@ -5961,28 +6271,25 @@ impl Render for Workspace {
Some(e.event.position);
match e.drag(cx).0 {
DockPosition::Left => {
- resize_left_dock(
+ workspace.resize_left_dock(
e.event.position.x
- workspace.bounds.left(),
- workspace,
window,
cx,
);
}
DockPosition::Right => {
- resize_right_dock(
+ workspace.resize_right_dock(
workspace.bounds.right()
- e.event.position.x,
- workspace,
window,
cx,
);
}
DockPosition::Bottom => {
- resize_bottom_dock(
+ workspace.resize_bottom_dock(
workspace.bounds.bottom()
- e.event.position.y,
- workspace,
window,
cx,
);
@@ -6268,73 +6575,6 @@ impl Render for Workspace {
}
}
-fn resize_bottom_dock(
- new_size: Pixels,
- workspace: &mut Workspace,
- window: &mut Window,
- cx: &mut App,
-) {
- let size =
- new_size.min(workspace.bounds.bottom() - RESIZE_HANDLE_SIZE - workspace.bounds.top());
- workspace.bottom_dock.update(cx, |bottom_dock, cx| {
- if WorkspaceSettings::get_global(cx)
- .resize_all_panels_in_dock
- .contains(&DockPosition::Bottom)
- {
- bottom_dock.resize_all_panels(Some(size), window, cx);
- } else {
- bottom_dock.resize_active_panel(Some(size), window, cx);
- }
- });
-}
-
-fn resize_right_dock(
- new_size: Pixels,
- workspace: &mut Workspace,
- window: &mut Window,
- cx: &mut App,
-) {
- let mut size = new_size.max(workspace.bounds.left() - RESIZE_HANDLE_SIZE);
- workspace.left_dock.read_with(cx, |left_dock, cx| {
- let left_dock_size = left_dock
- .active_panel_size(window, cx)
- .unwrap_or(Pixels(0.0));
- if left_dock_size + size > workspace.bounds.right() {
- size = workspace.bounds.right() - left_dock_size
- }
- });
- workspace.right_dock.update(cx, |right_dock, cx| {
- if WorkspaceSettings::get_global(cx)
- .resize_all_panels_in_dock
- .contains(&DockPosition::Right)
- {
- right_dock.resize_all_panels(Some(size), window, cx);
- } else {
- right_dock.resize_active_panel(Some(size), window, cx);
- }
- });
-}
-
-fn resize_left_dock(
- new_size: Pixels,
- workspace: &mut Workspace,
- window: &mut Window,
- cx: &mut App,
-) {
- let size = new_size.min(workspace.bounds.right() - RESIZE_HANDLE_SIZE);
-
- workspace.left_dock.update(cx, |left_dock, cx| {
- if WorkspaceSettings::get_global(cx)
- .resize_all_panels_in_dock
- .contains(&DockPosition::Left)
- {
- left_dock.resize_all_panels(Some(size), window, cx);
- } else {
- left_dock.resize_active_panel(Some(size), window, cx);
- }
- });
-}
-
impl WorkspaceStore {
pub fn new(client: Arc<Client>, cx: &mut Context<Self>) -> Self {
Self {
@@ -6496,14 +6736,25 @@ actions!(
/// can be copied via "Copy link to section" in the context menu of the channel notes
/// buffer. These URLs look like `https://zed.dev/channel/channel-name-CHANNEL_ID/notes`.
OpenChannelNotes,
+ /// Mutes your microphone.
Mute,
+ /// Deafens yourself (mute both microphone and speakers).
Deafen,
+ /// Leaves the current call.
LeaveCall,
+ /// Shares the current project with collaborators.
ShareProject,
+ /// Shares your screen with collaborators.
ScreenShare
]
);
-actions!(zed, [OpenLog]);
+actions!(
+ zed,
+ [
+ /// Opens the Zed log file.
+ OpenLog
+ ]
+);
async fn join_channel_internal(
channel_id: ChannelId,
@@ -7310,6 +7561,7 @@ fn parse_pixel_size_env_var(value: &str) -> Option<Size<Pixels>> {
Some(size(px(width as f32), px(height as f32)))
}
+/// Add client-side decorations (rounded corners, shadows, resize handling) when appropriate.
pub fn client_side_decorations(
element: impl IntoElement,
window: &mut Window,
@@ -7318,8 +7570,9 @@ pub fn client_side_decorations(
const BORDER_SIZE: Pixels = px(1.0);
let decorations = window.window_decorations();
- if matches!(decorations, Decorations::Client { .. }) {
- window.set_client_inset(theme::CLIENT_SIDE_DECORATION_SHADOW);
+ match decorations {
+ Decorations::Client { .. } => window.set_client_inset(theme::CLIENT_SIDE_DECORATION_SHADOW),
+ Decorations::Server { .. } => window.set_client_inset(px(0.0)),
}
struct GlobalResizeEdge(ResizeEdge);
@@ -2,7 +2,7 @@
description = "The fast, collaborative code editor."
edition.workspace = true
name = "zed"
-version = "0.194.0"
+version = "0.195.0"
publish.workspace = true
license = "GPL-3.0-or-later"
authors = ["Zed Team <hi@zed.dev>"]
@@ -63,7 +63,12 @@ git.workspace = true
git_hosting_providers.workspace = true
git_ui.workspace = true
go_to_line.workspace = true
-gpui = { workspace = true, features = ["wayland", "x11", "font-kit"] }
+gpui = { workspace = true, features = [
+ "wayland",
+ "x11",
+ "font-kit",
+ "windows-manifest",
+] }
gpui_tokio.workspace = true
http_client.workspace = true
image_viewer.workspace = true
@@ -44,7 +44,10 @@ use theme::{
use util::{ConnectionResult, ResultExt, TryFutureExt, maybe};
use uuid::Uuid;
use welcome::{BaseKeymap, FIRST_OPEN, show_welcome_view};
-use workspace::{AppState, SerializedWorkspaceLocation, WorkspaceSettings, WorkspaceStore};
+use workspace::{
+ AppState, SerializedWorkspaceLocation, Toast, Workspace, WorkspaceSettings, WorkspaceStore,
+ notifications::NotificationId,
+};
use zed::{
OpenListener, OpenRequest, RawOpenRequest, app_menus, build_window_options,
derive_paths_with_position, handle_cli_connection, handle_keymap_file_changes,
@@ -888,40 +891,107 @@ async fn installation_id() -> Result<IdType> {
async fn restore_or_create_workspace(app_state: Arc<AppState>, cx: &mut AsyncApp) -> Result<()> {
if let Some(locations) = restorable_workspace_locations(cx, &app_state).await {
+ let mut tasks = Vec::new();
+
for location in locations {
match location {
SerializedWorkspaceLocation::Local(location, _) => {
- let task = cx.update(|cx| {
- workspace::open_paths(
- location.paths().as_ref(),
- app_state.clone(),
- workspace::OpenOptions::default(),
- cx,
- )
- })?;
- task.await?;
+ let app_state = app_state.clone();
+ let paths = location.paths().to_vec();
+ let task = cx.spawn(async move |cx| {
+ let open_task = cx.update(|cx| {
+ workspace::open_paths(
+ &paths,
+ app_state,
+ workspace::OpenOptions::default(),
+ cx,
+ )
+ })?;
+ open_task.await.map(|_| ())
+ });
+ tasks.push(task);
}
SerializedWorkspaceLocation::Ssh(ssh) => {
- let connection_options = cx.update(|cx| {
- SshSettings::get_global(cx)
- .connection_options_for(ssh.host, ssh.port, ssh.user)
- })?;
let app_state = app_state.clone();
- cx.spawn(async move |cx| {
- recent_projects::open_ssh_project(
- connection_options,
- ssh.paths.into_iter().map(PathBuf::from).collect(),
- app_state,
- workspace::OpenOptions::default(),
- cx,
- )
- .await
- .log_err();
- })
- .detach();
+ let ssh_host = ssh.host.clone();
+ let task = cx.spawn(async move |cx| {
+ let connection_options = cx.update(|cx| {
+ SshSettings::get_global(cx)
+ .connection_options_for(ssh.host, ssh.port, ssh.user)
+ });
+
+ match connection_options {
+ Ok(connection_options) => recent_projects::open_ssh_project(
+ connection_options,
+ ssh.paths.into_iter().map(PathBuf::from).collect(),
+ app_state,
+ workspace::OpenOptions::default(),
+ cx,
+ )
+ .await
+ .map_err(|e| anyhow::anyhow!(e)),
+ Err(e) => Err(anyhow::anyhow!(
+ "Failed to get SSH connection options for {}: {}",
+ ssh_host,
+ e
+ )),
+ }
+ });
+ tasks.push(task);
}
}
}
+
+ // Wait for all workspaces to open concurrently
+ let results = future::join_all(tasks).await;
+
+ // Show notifications for any errors that occurred
+ let mut error_count = 0;
+ for result in results {
+ if let Err(e) = result {
+ log::error!("Failed to restore workspace: {}", e);
+ error_count += 1;
+ }
+ }
+
+ if error_count > 0 {
+ let message = if error_count == 1 {
+ "Failed to restore 1 workspace. Check logs for details.".to_string()
+ } else {
+ format!(
+ "Failed to restore {} workspaces. Check logs for details.",
+ error_count
+ )
+ };
+
+ // Try to find an active workspace to show the toast
+ let toast_shown = cx
+ .update(|cx| {
+ if let Some(window) = cx.active_window() {
+ if let Some(workspace) = window.downcast::<Workspace>() {
+ workspace
+ .update(cx, |workspace, _, cx| {
+ workspace.show_toast(
+ Toast::new(NotificationId::unique::<()>(), message),
+ cx,
+ )
+ })
+ .ok();
+ return true;
+ }
+ }
+ false
+ })
+ .unwrap_or(false);
+
+ // If we couldn't show a toast (no windows opened successfully),
+ // we've already logged the errors above, so the user can check logs
+ if !toast_shown {
+ log::error!(
+ "Failed to show notification for window restoration errors, because no workspace windows were available."
+ );
+ }
+ }
} else if matches!(KEY_VALUE_STORE.read_kvp(FIRST_OPEN), Ok(None)) {
cx.update(|cx| show_welcome_view(app_state, cx))?.await?;
} else {
@@ -1299,6 +1369,7 @@ fn dump_all_gpui_actions() {
name: &'static str,
human_name: String,
aliases: &'static [&'static str],
+ documentation: Option<&'static str>,
}
let mut actions = gpui::generate_list_of_all_registered_actions()
.into_iter()
@@ -1306,6 +1377,7 @@ fn dump_all_gpui_actions() {
name: action.name,
human_name: command_palette::humanize_action_name(action.name),
aliases: action.deprecated_aliases,
+ documentation: action.documentation,
})
.collect::<Vec<ActionDef>>();
@@ -78,19 +78,33 @@ use zed_actions::{
actions!(
zed,
[
+ /// Opens the element inspector for debugging UI.
DebugElements,
+ /// Hides the application window.
Hide,
+ /// Hides all other application windows.
HideOthers,
+ /// Minimizes the current window.
Minimize,
+ /// Opens the default settings file.
OpenDefaultSettings,
+ /// Opens project-specific settings.
OpenProjectSettings,
+ /// Opens the project tasks configuration.
OpenProjectTasks,
+ /// Opens the tasks panel.
OpenTasks,
+ /// Opens debug tasks configuration.
OpenDebugTasks,
+ /// Resets the application database.
ResetDatabase,
+ /// Shows all hidden windows.
ShowAll,
+ /// Toggles fullscreen mode.
ToggleFullScreen,
+ /// Zooms the window.
Zoom,
+ /// Triggers a test panic for debugging.
TestPanic,
]
);
@@ -1429,6 +1443,8 @@ fn reload_keymaps(cx: &mut App, mut user_key_bindings: Vec<KeyBinding>) {
"New Window",
workspace::NewWindow,
)]);
+ // todo: nicer api here?
+ settings_ui::keybindings::KeymapEventChannel::trigger_keymap_changed(cx);
}
pub fn load_default_keymap(cx: &mut App) {
@@ -4309,6 +4325,7 @@ mod tests {
"icon_theme_selector",
"jj",
"journal",
+ "keymap_editor",
"language_selector",
"lsp_tool",
"markdown",
@@ -258,18 +258,12 @@ impl Render for QuickActionBar {
.action("Next Problem", Box::new(GoToDiagnostic))
.action("Previous Problem", Box::new(GoToPreviousDiagnostic))
.separator()
- .map(|menu| {
- if has_diff_hunks {
- menu.action("Next Hunk", Box::new(GoToHunk))
- .action("Previous Hunk", Box::new(GoToPreviousHunk))
- } else {
- menu.disabled_action("Next Hunk", Box::new(GoToHunk))
- .disabled_action(
- "Previous Hunk",
- Box::new(GoToPreviousHunk),
- )
- }
- })
+ .action_disabled_when(!has_diff_hunks, "Next Hunk", Box::new(GoToHunk))
+ .action_disabled_when(
+ !has_diff_hunks,
+ "Previous Hunk",
+ Box::new(GoToPreviousHunk),
+ )
.separator()
.action("Move Line Up", Box::new(MoveLineUp))
.action("Move Line Down", Box::new(MoveLineDown))
@@ -7,7 +7,7 @@ use svg_preview::{
OpenPreview as SvgOpenPreview, OpenPreviewToTheSide as SvgOpenPreviewToTheSide,
svg_preview_view::SvgPreviewView,
};
-use ui::{IconButtonShape, Tooltip, prelude::*, text_for_keystroke};
+use ui::{Tooltip, prelude::*, text_for_keystroke};
use workspace::Workspace;
use super::QuickActionBar;
@@ -66,7 +66,6 @@ impl QuickActionBar {
};
let button = IconButton::new(button_id, IconName::Eye)
- .shape(IconButtonShape::Square)
.icon_size(IconSize::Small)
.style(ButtonStyle::Subtle)
.tooltip(move |window, cx| {
@@ -11,6 +11,7 @@ use serde::{Deserialize, Serialize};
// https://github.com/mmastrac/rust-ctor/issues/280
pub fn init() {}
+/// Opens a URL in the system's default web browser.
#[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)]
#[action(namespace = zed)]
#[serde(deny_unknown_fields)]
@@ -18,6 +19,7 @@ pub struct OpenBrowser {
pub url: String,
}
+/// Opens a zed:// URL within the application.
#[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)]
#[action(namespace = zed)]
#[serde(deny_unknown_fields)]
@@ -28,15 +30,25 @@ pub struct OpenZedUrl {
actions!(
zed,
[
+ /// Opens the settings editor.
OpenSettings,
+ /// Opens the default keymap file.
OpenDefaultKeymap,
+ /// Opens account settings.
OpenAccountSettings,
+ /// Opens server settings.
OpenServerSettings,
+ /// Quits the application.
Quit,
+ /// Opens the user keymap file.
OpenKeymap,
+ /// Shows information about Zed.
About,
+ /// Opens the documentation website.
OpenDocs,
+ /// Views open source licenses.
OpenLicenses,
+ /// Opens the telemetry log.
OpenTelemetryLog,
]
);
@@ -56,51 +68,65 @@ pub enum ExtensionCategoryFilter {
DebugAdapters,
}
+/// Opens the extensions management interface.
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)]
#[action(namespace = zed)]
+#[serde(deny_unknown_fields)]
pub struct Extensions {
/// Filters the extensions page down to extensions that are in the specified category.
#[serde(default)]
pub category_filter: Option<ExtensionCategoryFilter>,
}
+/// Decreases the font size in the editor buffer.
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)]
#[action(namespace = zed)]
+#[serde(deny_unknown_fields)]
pub struct DecreaseBufferFontSize {
#[serde(default)]
pub persist: bool,
}
+/// Increases the font size in the editor buffer.
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)]
#[action(namespace = zed)]
+#[serde(deny_unknown_fields)]
pub struct IncreaseBufferFontSize {
#[serde(default)]
pub persist: bool,
}
+/// Resets the buffer font size to the default value.
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)]
#[action(namespace = zed)]
+#[serde(deny_unknown_fields)]
pub struct ResetBufferFontSize {
#[serde(default)]
pub persist: bool,
}
+/// Decreases the font size of the user interface.
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)]
#[action(namespace = zed)]
+#[serde(deny_unknown_fields)]
pub struct DecreaseUiFontSize {
#[serde(default)]
pub persist: bool,
}
+/// Increases the font size of the user interface.
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)]
#[action(namespace = zed)]
+#[serde(deny_unknown_fields)]
pub struct IncreaseUiFontSize {
#[serde(default)]
pub persist: bool,
}
+/// Resets the UI font size to the default value.
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)]
#[action(namespace = zed)]
+#[serde(deny_unknown_fields)]
pub struct ResetUiFontSize {
#[serde(default)]
pub persist: bool,
@@ -109,7 +135,13 @@ pub struct ResetUiFontSize {
pub mod dev {
use gpui::actions;
- actions!(dev, [ToggleInspector]);
+ actions!(
+ dev,
+ [
+ /// Toggles the developer inspector for debugging UI elements.
+ ToggleInspector
+ ]
+ );
}
pub mod workspace {
@@ -132,9 +164,13 @@ pub mod git {
actions!(
git,
[
+ /// Checks out a different git branch.
CheckoutBranch,
+ /// Switches to a different git branch.
Switch,
+ /// Selects a different repository.
SelectRepo,
+ /// Opens the git branch selector.
#[action(deprecated_aliases = ["branches::OpenRecent"])]
Branch
]
@@ -144,25 +180,51 @@ pub mod git {
pub mod jj {
use gpui::actions;
- actions!(jj, [BookmarkList]);
+ actions!(
+ jj,
+ [
+ /// Opens the Jujutsu bookmark list.
+ BookmarkList
+ ]
+ );
}
pub mod toast {
use gpui::actions;
- actions!(toast, [RunAction]);
+ actions!(
+ toast,
+ [
+ /// Runs the action associated with a toast notification.
+ RunAction
+ ]
+ );
}
pub mod command_palette {
use gpui::actions;
- actions!(command_palette, [Toggle]);
+ actions!(
+ command_palette,
+ [
+ /// Toggles the command palette.
+ Toggle
+ ]
+ );
}
pub mod feedback {
use gpui::actions;
- actions!(feedback, [FileBugReport, GiveFeedback]);
+ actions!(
+ feedback,
+ [
+ /// Opens the bug report form.
+ FileBugReport,
+ /// Opens the feedback form.
+ GiveFeedback
+ ]
+ );
}
pub mod theme_selector {
@@ -170,6 +232,7 @@ pub mod theme_selector {
use schemars::JsonSchema;
use serde::Deserialize;
+ /// Toggles the theme selector interface.
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)]
#[action(namespace = theme_selector)]
#[serde(deny_unknown_fields)]
@@ -184,6 +247,7 @@ pub mod icon_theme_selector {
use schemars::JsonSchema;
use serde::Deserialize;
+ /// Toggles the icon theme selector interface.
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)]
#[action(namespace = icon_theme_selector)]
#[serde(deny_unknown_fields)]
@@ -198,7 +262,14 @@ pub mod agent {
actions!(
agent,
- [OpenConfiguration, OpenOnboardingModal, ResetOnboarding]
+ [
+ /// Opens the agent configuration panel.
+ OpenConfiguration,
+ /// Opens the agent onboarding modal.
+ OpenOnboardingModal,
+ /// Resets the agent onboarding state.
+ ResetOnboarding
+ ]
);
}
@@ -216,8 +287,15 @@ pub mod assistant {
]
);
- actions!(assistant, [ShowConfiguration]);
+ actions!(
+ assistant,
+ [
+ /// Shows the assistant configuration panel.
+ ShowConfiguration
+ ]
+ );
+ /// Opens the rules library for managing agent rules and prompts.
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)]
#[action(namespace = agent, deprecated_aliases = ["assistant::OpenRulesLibrary", "assistant::DeployPromptLibrary"])]
#[serde(deny_unknown_fields)]
@@ -226,6 +304,7 @@ pub mod assistant {
pub prompt_to_select: Option<Uuid>,
}
+ /// Deploys the assistant interface with the specified configuration.
#[derive(Clone, Default, Deserialize, PartialEq, JsonSchema, Action)]
#[action(namespace = assistant)]
#[serde(deny_unknown_fields)]
@@ -237,9 +316,18 @@ pub mod assistant {
pub mod debugger {
use gpui::actions;
- actions!(debugger, [OpenOnboardingModal, ResetOnboarding]);
+ actions!(
+ debugger,
+ [
+ /// Opens the debugger onboarding modal.
+ OpenOnboardingModal,
+ /// Resets the debugger onboarding state.
+ ResetOnboarding
+ ]
+ );
}
+/// Opens the recent projects interface.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = projects)]
#[serde(deny_unknown_fields)]
@@ -248,6 +336,7 @@ pub struct OpenRecent {
pub create_new_window: bool,
}
+/// Creates a project from a selected template.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = projects)]
#[serde(deny_unknown_fields)]
@@ -269,7 +358,7 @@ pub enum RevealTarget {
Dock,
}
-/// Spawn a task with name or open tasks modal.
+/// Spawns a task with name or opens tasks modal.
#[derive(Debug, PartialEq, Clone, Deserialize, JsonSchema, Action)]
#[action(namespace = task)]
#[serde(untagged)]
@@ -302,7 +391,7 @@ impl Spawn {
}
}
-/// Rerun the last task.
+/// Reruns the last task.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = task)]
#[serde(deny_unknown_fields)]
@@ -343,15 +432,36 @@ pub mod outline {
pub static TOGGLE_OUTLINE: OnceLock<fn(AnyView, &mut Window, &mut App)> = OnceLock::new();
}
-actions!(zed_predict_onboarding, [OpenZedPredictOnboarding]);
-actions!(git_onboarding, [OpenGitIntegrationOnboarding]);
+actions!(
+ zed_predict_onboarding,
+ [
+ /// Opens the Zed Predict onboarding modal.
+ OpenZedPredictOnboarding
+ ]
+);
+actions!(
+ git_onboarding,
+ [
+ /// Opens the git integration onboarding modal.
+ OpenGitIntegrationOnboarding
+ ]
+);
-actions!(debug_panel, [ToggleFocus]);
+actions!(
+ debug_panel,
+ [
+ /// Toggles focus on the debug panel.
+ ToggleFocus
+ ]
+);
actions!(
debugger,
[
+ /// Toggles the enabled state of a breakpoint.
ToggleEnableBreakpoint,
+ /// Removes a breakpoint.
UnsetBreakpoint,
+ /// Opens the project debug tasks configuration.
OpenProjectDebugTasks,
]
);
@@ -10,7 +10,15 @@ use workspace::Workspace;
use crate::{RateCompletionModal, onboarding_modal::ZedPredictModal};
-actions!(edit_prediction, [ResetOnboarding, RateCompletions]);
+actions!(
+ edit_prediction,
+ [
+ /// Resets the edit prediction onboarding state.
+ ResetOnboarding,
+ /// Opens the rate completions modal.
+ RateCompletions
+ ]
+);
pub fn init(cx: &mut App) {
cx.observe_new(move |workspace: &mut Workspace, _, _cx| {
@@ -9,11 +9,17 @@ use workspace::{ModalView, Workspace};
actions!(
zeta,
[
+ /// Rates the active completion with a thumbs up.
ThumbsUpActiveCompletion,
+ /// Rates the active completion with a thumbs down.
ThumbsDownActiveCompletion,
+ /// Navigates to the next edit in the completion history.
NextEdit,
+ /// Navigates to the previous edit in the completion history.
PreviousEdit,
+ /// Focuses on the completions list.
FocusCompletions,
+ /// Previews the selected completion.
PreviewCompletion,
]
);
@@ -72,7 +72,13 @@ const MAX_EVENT_TOKENS: usize = 500;
/// Maximum number of events to track.
const MAX_EVENT_COUNT: usize = 16;
-actions!(edit_prediction, [ClearHistory]);
+actions!(
+ edit_prediction,
+ [
+ /// Clears the edit prediction history.
+ ClearHistory
+ ]
+);
#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, Hash)]
pub struct InlineCompletionId(Uuid);
@@ -40,13 +40,11 @@ You can connect them by adding their commands directly to your `settings.json`,
```json
{
"context_servers": {
- "some-context-server": {
+ "your-mcp-server": {
"source": "custom",
- "command": {
- "path": "some-command",
- "args": ["arg-1", "arg-2"],
- "env": {}
- }
+ "command": "some-command",
+ "args": ["arg-1", "arg-2"],
+ "env": {}
}
}
}
@@ -1944,17 +1944,17 @@ Example:
1. Maps to `Alt` on Linux and Windows and to `Option` on MacOS:
-```jsonc
+```json
{
- "multi_cursor_modifier": "alt",
+ "multi_cursor_modifier": "alt"
}
```
2. Maps `Control` on Linux and Windows and to `Command` on MacOS:
-```jsonc
+```json
{
- "multi_cursor_modifier": "cmd_or_ctrl", // alias: "cmd", "ctrl"
+ "multi_cursor_modifier": "cmd_or_ctrl" // alias: "cmd", "ctrl"
}
```
@@ -2212,7 +2212,7 @@ The following URI schemes are supported:
`http` will be used when no scheme is specified.
-By default no proxy will be used, or Zed will attempt to retrieve proxy settings from environment variables, such as `http_proxy`, `HTTP_PROXY`, `https_proxy`, `HTTPS_PROXY`, `all_proxy`, `ALL_PROXY`.
+By default no proxy will be used, or Zed will attempt to retrieve proxy settings from environment variables, such as `http_proxy`, `HTTP_PROXY`, `https_proxy`, `HTTPS_PROXY`, `all_proxy`, `ALL_PROXY`, `no_proxy` and `NO_PROXY`.
For example, to set an `http` proxy, add the following to your settings:
@@ -2230,6 +2230,8 @@ Or to set a `socks5` proxy:
}
```
+If you wish to exclude certain hosts from using the proxy, set the `NO_PROXY` environment variable. This accepts a comma-separated list of hostnames, host suffixes, IPv4/IPv6 addresses or blocks that should not use the proxy. For example if your environment included `NO_PROXY="google.com, 192.168.1.0/24"` all hosts in `192.168.1.*`, `google.com` and `*.google.com` would bypass the proxy. See [reqwest NoProxy docs](https://docs.rs/reqwest/latest/reqwest/struct.NoProxy.html#method.from_string) for more.
+
## Preview tabs
- Description:
@@ -2562,6 +2564,7 @@ List of `integer` column numbers
"alternate_scroll": "off",
"blinking": "terminal_controlled",
"copy_on_select": false,
+ "keep_selection_on_copy": false,
"dock": "bottom",
"default_width": 640,
"default_height": 320,
@@ -2686,6 +2689,26 @@ List of `integer` column numbers
}
```
+### Terminal: Keep Selection On Copy
+
+- Description: Whether or not to keep the selection in the terminal after copying text.
+- Setting: `keep_selection_on_copy`
+- Default: `false`
+
+**Options**
+
+`boolean` values
+
+**Example**
+
+```json
+{
+ "terminal": {
+ "keep_selection_on_copy": true
+ }
+}
+```
+
### Terminal: Env
- Description: Any key-value pairs added to this object will be added to the terminal's environment. Keys must be unique, use `:` to separate multiple values in a single variable
@@ -3,6 +3,7 @@
Zed uses the [Debug Adapter Protocol (DAP)](https://microsoft.github.io/debug-adapter-protocol/) to provide debugging functionality across multiple programming languages.
DAP is a standardized protocol that defines how debuggers, editors, and IDEs communicate with each other.
It allows Zed to support various debuggers without needing to implement language-specific debugging logic.
+Zed implements the client side of the protocol, and various _debug adapters_ implement the server side.
This protocol enables features like setting breakpoints, stepping through code, inspecting variables,
and more, in a consistent manner across different programming languages and runtime environments.
@@ -10,36 +11,53 @@ and more, in a consistent manner across different programming languages and runt
> We currently offer onboarding support for users. We are eager to hear from you if you encounter any issues or have suggestions for improvement for our debugging experience.
> You can schedule a call via [Cal.com](https://cal.com/team/zed-research/debugger)
-## Supported Debug Adapters
+## Supported Languages
-Zed supports a variety of debug adapters for different programming languages out of the box:
+To debug code written in a specific language, Zed needs to find a debug adapter for that language. Some debug adapters are provided by Zed without additional setup, and some are provided by [language extensions](./extensions/debugger-extensions.md). The following languages currently have debug adapters available:
-- JavaScript ([vscode-js-debug](https://github.com/microsoft/vscode-js-debug.git)): Enables debugging of Node.js applications, including setting breakpoints, stepping through code, and inspecting variables in JavaScript.
+<!-- keep this sorted -->
-- Python ([debugpy](https://github.com/microsoft/debugpy.git)): Provides debugging capabilities for Python applications, supporting features like remote debugging, multi-threaded debugging, and Django/Flask application debugging.
+- [C](./languages/c.md#debugging) (built-in)
+- [C++](./languages/cpp.md#debugging) (built-in)
+- [Go](./languages/go.md#debugging) (built-in)
+- [JavaScript](./languages/javascript.md#debugging) (built-in)
+- [PHP](./languages/php.md#debugging) (built-in)
+- [Python](./languages/python.md#debugging) (built-in)
+- [Ruby](./languages/ruby.md#debugging) (provided by extension)
+- [Rust](./languages/rust.md#debugging) (built-in)
+- [Swift](./languages/swift.md#debugging) (provided by extension)
+- [TypeScript](./languages/typescript.md#debugging) (built-in)
-- LLDB ([CodeLLDB](https://github.com/vadimcn/codelldb.git)): A powerful debugger for Rust, C, C++, and some other compiled languages, offering low-level debugging features and support for Apple platforms.
+> If your language isn't listed, you can contribute by adding a debug adapter for it. Check out our [debugger extensions](./extensions/debugger-extensions.md) documentation for more information.
-- GDB ([GDB](https://sourceware.org/gdb/)): The GNU Debugger, which supports debugging for multiple programming languages including C, C++, Go, and Rust, across various platforms.
+Follow those links for language- and adapter-specific information and examples, or read on for more about Zed's general debugging features that apply to all adapters.
-- Go ([Delve](https://github.com/go-delve/delve)): Delve, a debugger for the Go programming language, offering both local and remote debugging capabilities with full support for Go's runtime and standard library.
-
-- PHP ([Xdebug](https://xdebug.org/)): Provides debugging and profiling capabilities for PHP applications, including remote debugging and code coverage analysis.
+## Getting Started
-- Ruby ([rdbg](https://github.com/ruby/debug)): Provides debugging for Ruby.
+For most languages, the fastest way to get started is to run {#action debugger::Start} ({#kb debugger::Start}). This opens the _new process modal_, which shows you a contextual list of preconfigured debug tasks for the current project. Debug tasks are created from tests, entry points (like a `main` function), and from other sources — consult the documentation for your language for full information about what's supported.
-These adapters enable Zed to provide a consistent debugging experience across multiple languages while leveraging the specific features and capabilities of each debugger.
+You can open the same modal by clicking the "plus" button at the top right of the debug panel.
-> Is your desired debugger not listed? You can install a [Debug Adapter extension](https://zed.dev/extensions?filter=debug-adapters) to add support for your favorite debugger.
-> If that's not enough, you can contribute by creating an extension yourself. Check out our [debugger extensions](extensions/debugger-extensions.md) documentation for more information.
+For languages that don't provide preconfigured debug tasks (this includes C, C++, and some extension-supported languages), you can define debug configurations in the `.zed/debug.json` file in your project root. This file should be an array of configuration objects:
-## Getting Started
-
-For basic debugging, you can set up a new configuration by opening the `New Session Modal` either via the `debugger: start` (default: f4) or by clicking the plus icon at the top right of the debug panel.
+```json
+[
+ {
+ "adapter": "CodeLLDB",
+ "label": "First configuration"
+ // ...
+ },
+ {
+ "adapter": "Debugpy",
+ "label": "Second configuration"
+ // ...
+ }
+]
+```
-For more advanced use cases, you can create debug configurations by directly editing the `.zed/debug.json` file in your project root directory.
+Check the documentation for your language for example configurations covering typical use-cases. Once you've added configurations to `.zed/debug.json`, they'll appear in the list in the new process modal.
-You can then use the `New Session Modal` to select a configuration and start debugging.
+Zed will also load debug configurations from `.vscode/launch.json`, and show them in the new process modal if no configurations are found in `.zed/debug.json`.
### Launching & Attaching
@@ -58,7 +76,7 @@ While configuration fields are debug adapter-dependent, most adapters support th
```json
[
{
- // The label for the debug configuration and used to identify the debug session inside the debug panel & new session modal
+ // The label for the debug configuration and used to identify the debug session inside the debug panel & new process modal
"label": "Example Start debugger config",
// The debug adapter that Zed should use to debug the program
"adapter": "Example adapter name",
@@ -113,291 +131,7 @@ Build tasks can also refer to the existing tasks by unsubstituted label:
### Automatic scenario creation
Given a Zed task, Zed can automatically create a scenario for you. Automatic scenario creation also powers our scenario creation from gutter.
-Automatic scenario creation is currently supported for Rust, Go, and Python. JavaScript/TypeScript support is being worked on.
-
-### Example Configurations
-
-#### JavaScript
-
-##### Debug Active File
-
-```json
-[
- {
- "label": "Debug with node",
- "adapter": "JavaScript",
- "program": "$ZED_FILE",
- "request": "launch",
- "console": "integratedTerminal",
- "type": "pwa-node"
- }
-]
-```
-
-##### Attach debugger to a server running in web browser (`npx serve`)
-
-Given an externally-ran web server (e.g., with `npx serve` or `npx live-server`) one can attach to it and open it with a browser.
-
-```json
-[
- {
- "label": "Inspect ",
- "adapter": "JavaScript",
- "type": "pwa-chrome",
- "request": "launch",
- "url": "http://localhost:5500", // Fill your URL here.
- "program": "$ZED_FILE",
- "webRoot": "${ZED_WORKTREE_ROOT}"
- }
-]
-```
-
-#### Python
-
-##### Debug Active File
-
-```json
-[
- {
- "label": "Python Active File",
- "adapter": "Debugpy",
- "program": "$ZED_FILE",
- "request": "launch"
- }
-]
-```
-
-##### Flask App
-
-For a common Flask Application with a file structure similar to the following:
-
-```
-.venv/
-app/
- init.py
- main.py
- routes.py
-templates/
- index.html
-static/
- style.css
-requirements.txt
-```
-
-…the following configuration can be used:
-
-```json
-[
- {
- "label": "Python: Flask",
- "adapter": "Debugpy",
- "request": "launch",
- "module": "app",
- "cwd": "$ZED_WORKTREE_ROOT",
- "env": {
- "FLASK_APP": "app",
- "FLASK_DEBUG": "1"
- },
- "args": [
- "run",
- "--reload", // Enables Flask reloader that watches for file changes
- "--debugger" // Enables Flask debugger
- ],
- "autoReload": {
- "enable": true
- },
- "jinja": true,
- "justMyCode": true
- }
-]
-```
-
-#### Rust/C++/C
-
-##### Using pre-built binary
-
-```json
-[
- {
- "label": "Debug native binary",
- "program": "$ZED_WORKTREE_ROOT/build/binary",
- "request": "launch",
- "adapter": "CodeLLDB" // GDB is available on non arm macs as well as linux
- }
-]
-```
-
-##### Build binary then debug
-
-```json
-[
- {
- "label": "Build & Debug native binary",
- "build": {
- "command": "cargo",
- "args": ["build"]
- },
- "program": "$ZED_WORKTREE_ROOT/target/debug/binary",
- "request": "launch",
- "adapter": "CodeLLDB" // GDB is available on non arm macs as well as linux
- }
-]
-```
-
-#### TypeScript
-
-##### Attach debugger to a server running in web browser (`npx serve`)
-
-Given an externally-ran web server (e.g., with `npx serve` or `npx live-server`) one can attach to it and open it with a browser.
-
-```json
-[
- {
- "label": "Launch Chrome (TypeScript)",
- "adapter": "JavaScript",
- "type": "pwa-chrome",
- "request": "launch",
- "url": "http://localhost:5500",
- "program": "$ZED_FILE",
- "webRoot": "${ZED_WORKTREE_ROOT}",
- "sourceMaps": true,
- "build": {
- "command": "npx",
- "args": ["tsc"]
- }
- }
-]
-```
-
-#### Go
-
-Zed uses [delve](https://github.com/go-delve/delve?tab=readme-ov-file) to debug Go applications.
-Zed will automatically create debug scenarios for `func main` in your main packages, and also
-for any tests, so you can use the Play button in the gutter to debug these without configuration.
-
-##### Debug Go Packages
-
-To debug a specific package, you can do so by setting the Delve mode to "debug". In this case "program" should be set to the package name.
-
-```json
-[
- {
- "label": "Go (Delve)",
- "adapter": "Delve",
- "program": "$ZED_FILE",
- "request": "launch",
- "mode": "debug"
- }
-]
-```
-
-```json
-[
- {
- "label": "Run server",
- "adapter": "Delve",
- "request": "launch",
- "mode": "debug",
- // For Delve, the program can be a package name
- "program": "./cmd/server"
- // "args": [],
- // "buildFlags": [],
- }
-]
-```
-
-##### Debug Go Tests
-
-To debug the tests for a package, set the Delve mode to "test".
-The "program" is still the package name, and you can use the "buildFlags" to do things like set tags, and the "args" to set args on the test binary. (See `go help testflags` for more information on doing that).
-
-```json
-[
- {
- "label": "Run integration tests",
- "adapter": "Delve",
- "request": "launch",
- "mode": "test",
- "program": ".",
- "buildFlags": ["-tags", "integration"]
- // To filter down to just the test your cursor is in:
- // "args": ["-test.run", "$ZED_SYMBOL"]
- }
-]
-```
-
-##### Build and debug separately
-
-If you need to build your application with a specific command, you can use the "exec" mode of Delve. In this case "program" should point to an executable,
-and the "build" command should build that.
-
-```json
-{
- "label": "Debug Prebuilt Unit Tests",
- "adapter": "Delve",
- "request": "launch",
- "mode": "exec",
- "program": "${ZED_WORKTREE_ROOT}/__debug_unit",
- "args": ["-test.v", "-test.run=${ZED_SYMBOL}"],
- "build": {
- "command": "go",
- "args": [
- "test",
- "-c",
- "-tags",
- "unit",
- "-gcflags\"all=-N -l\"",
- "-o",
- "__debug_unit",
- "./pkg/..."
- ]
- }
-}
-```
-
-##### Attaching to an existing instance of Delve
-
-You might find yourself needing to connect to an existing instance of Delve that's not necessarily running on your machine; in such case, you can use `tcp_arguments` to instrument Zed's connection to Delve.
-
-```
-{
- "adapter": "Delve",
- "label": "Connect to a running Delve instance",
- "program": "/Users/zed/Projects/language_repositories/golang/hello/hello",
- "cwd": "/Users/zed/Projects/language_repositories/golang/hello",
- "args": [],
- "env": {},
- "request": "launch",
- "mode": "exec",
- "stopOnEntry": false,
- "tcp_connection": { "host": "123.456.789.012", "port": 53412 }
-}
-```
-
-In such case Zed won't spawn a new instance of Delve, as it opts to use an existing one. The consequence of this is that _there will be no terminal_ in Zed; you have to interact with the Delve instance directly, as it handles stdin/stdout of the debuggee.
-
-#### Ruby
-
-To run a ruby task in the debugger, you will need to configure it in the `.zed/debug.json` file in your project. We don't yet have automatic detection of ruby tasks, nor do we support connecting to an existing process.
-
-The configuration should look like this:
-
-```json
-[
- {
- "adapter": "Ruby",
- "label": "Run CLI",
- "script": "cli.rb"
- // If you want to customize how the script is run (for example using bundle exec)
- // use "command" instead.
- // "command": "bundle exec cli.rb"
- //
- // "args": []
- // "env": {}
- // "cwd": ""
- }
-]
-```
+Automatic scenario creation is currently supported for Rust, Go, Python, JavaScript, and TypeScript.
## Breakpoints
@@ -605,5 +339,5 @@ If you're running into problems with the debugger, please [open a GitHub issue](
There are also some features you can use to gather more information about the problem:
-- When you have a session running in the debug panel, you can run the `dev: copy debug adapter arguments` action to copy a JSON blob to the clipboard that describes how Zed initialized the session. This is especially useful when the session failed to start, and is great context to add if you open a GitHub issue.
-- You can also use the `dev: open debug adapter logs` action to see a trace of all of Zed's communications with debug adapters during the most recent debug sessions.
+- When you have a session running in the debug panel, you can run the {#action dev::CopyDebugAdapterArguments} action to copy a JSON blob to the clipboard that describes how Zed initialized the session. This is especially useful when the session failed to start, and is great context to add if you open a GitHub issue.
+- You can also use the {#action dev::OpenDebugAdapterLogs} action to see a trace of all of Zed's communications with debug adapters during the most recent debug sessions.
@@ -16,15 +16,36 @@ Clone the [Zed repository](https://github.com/zed-industries/zed).
If preferred, you can inspect [`script/freebsd`](https://github.com/zed-industries/zed/blob/main/script/freebsd) and perform the steps manually.
----
+## Building from source
-### ⚠️ WebRTC Notice
+Once the dependencies are installed, you can build Zed using [Cargo](https://doc.rust-lang.org/cargo/).
-Currently, building `webrtc-sys` on FreeBSD fails due to missing upstream support and unavailable prebuilt binaries.
-This is actively being worked on.
+For a debug build of the editor:
-More progress and discussion can be found in [Zed’s GitHub Discussions](https://github.com/zed-industries/zed/discussions/29550).
+```sh
+cargo run
+```
-_Environment:
-FreeBSD 14.2-RELEASE
-Architecture: amd64 (x86_64)_
+And to run the tests:
+
+```sh
+cargo test --workspace
+```
+
+In release mode, the primary user interface is the `cli` crate. You can run it in development with:
+
+```sh
+cargo run -p cli
+```
+
+### WebRTC Notice
+
+Currently, building `webrtc-sys` on FreeBSD fails due to missing upstream support and unavailable prebuilt binaries. As a result, some collaboration features (audio calls and screensharing) that depend on WebRTC are temporarily disabled.
+
+See [Issue #15309: FreeBSD Support] and [Discussion #29550: Unofficial FreeBSD port for Zed] for more.
+
+## Troubleshooting
+
+### Cargo errors claiming that a dependency is using unstable features
+
+Try `cargo clean` and `cargo build`.
@@ -56,7 +56,7 @@ impl zed::Extension for MyExtension {
}
```
-`dap_config_to_scenario` is used when the user spawns a session via new session modal UI. At a high level, it takes a generic debug configuration (that isn't specific to any
+`dap_config_to_scenario` is used when the user spawns a session via new process modal UI. At a high level, it takes a generic debug configuration (that isn't specific to any
debug adapter) and tries to turn it into a concrete debug scenario for your adapter.
Put another way, it is supposed to answer the question: "Given a program, a list of arguments, current working directory and environment variables, what would the configuration for spawning this debug adapter look like?".
@@ -4,6 +4,7 @@ C support is available natively in Zed.
- Tree-sitter: [tree-sitter/tree-sitter-c](https://github.com/tree-sitter/tree-sitter-c)
- Language Server: [clangd/clangd](https://github.com/clangd/clangd)
+- Debug Adapter: [CodeLLDB](https://github.com/vadimcn) (primary), [GDB](https://sourceware.org/gdb/) (secondary, not available on Apple silicon)
## Clangd: Force detect as C
@@ -61,3 +62,25 @@ set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
```
After building your project, CMake will generate the `compile_commands.json` file in the build directory and clangd will automatically pick it up.
+
+## Debugging
+
+You can use CodeLLDB or GDB to debug native binaries. (Make sure that your build process passes `-g` to the C compiler, so that debug information is included in the resulting binary.) See below for examples of debug configurations that you can add to `.zed/debug.json`.
+
+### Build and Debug Binary
+
+```json
+[
+ {
+ "label": "Debug native binary",
+ "build": {
+ "command": "make",
+ "args": ["-j8"],
+ "cwd": "$ZED_WORKTREE_ROOT"
+ }
+ "program": "$ZED_WORKTREE_ROOT/build/prog",
+ "request": "launch",
+ "adapter": "CodeLLDB"
+ }
+]
+```
@@ -112,3 +112,25 @@ set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
```
After building your project, CMake will generate the `compile_commands.json` file in the build directory and clangd will automatically pick it up.
+
+## Debugging
+
+You can use CodeLLDB or GDB to debug native binaries. (Make sure that your build process passes `-g` to the C++ compiler, so that debug information is included in the resulting binary.) See below for examples of debug configurations that you can add to `.zed/debug.json`.
+
+### Build and Debug Binary
+
+```json
+[
+ {
+ "label": "Debug native binary",
+ "build": {
+ "command": "make",
+ "args": ["-j8"],
+ "cwd": "$ZED_WORKTREE_ROOT"
+ }
+ "program": "$ZED_WORKTREE_ROOT/build/prog",
+ "request": "launch",
+ "adapter": "CodeLLDB"
+ }
+]
+```
@@ -4,6 +4,7 @@ Go support is available natively in Zed.
- Tree-sitter: [tree-sitter/tree-sitter-go](https://github.com/tree-sitter/tree-sitter-go)
- Language Server: [golang/tools/tree/master/gopls](https://github.com/golang/tools/tree/master/gopls)
+- Debug Adapter: [delve](https://github.com/go-delve/delve)
## Setup
@@ -72,6 +73,112 @@ to override these settings.
See [gopls inlayHints documentation](https://github.com/golang/tools/blob/master/gopls/doc/inlayHints.md) for more information.
+## Debugging
+
+Zed supports zero-configuration debugging of Go tests and entry points (`func main`). Run {#action debugger::Start} ({#kb debugger::Start}) to see a contextual list of these preconfigured debug tasks.
+
+For more control, you can add debug configurations to `.zed/debug.json`. See below for examples.
+
+### Debug Go Packages
+
+To debug a specific package, you can do so by setting the Delve mode to "debug". In this case "program" should be set to the package name.
+
+```json
+[
+ {
+ "label": "Go (Delve)",
+ "adapter": "Delve",
+ "program": "$ZED_FILE",
+ "request": "launch",
+ "mode": "debug"
+ },
+ {
+ "label": "Run server",
+ "adapter": "Delve",
+ "request": "launch",
+ "mode": "debug",
+ // For Delve, the program can be a package name
+ "program": "./cmd/server"
+ // "args": [],
+ // "buildFlags": [],
+ }
+]
+```
+
+### Debug Go Tests
+
+To debug the tests for a package, set the Delve mode to "test".
+The "program" is still the package name, and you can use the "buildFlags" to do things like set tags, and the "args" to set args on the test binary. (See `go help testflags` for more information on doing that).
+
+```json
+[
+ {
+ "label": "Run integration tests",
+ "adapter": "Delve",
+ "request": "launch",
+ "mode": "test",
+ "program": ".",
+ "buildFlags": ["-tags", "integration"]
+ // To filter down to just the test your cursor is in:
+ // "args": ["-test.run", "$ZED_SYMBOL"]
+ }
+]
+```
+
+### Build and debug separately
+
+If you need to build your application with a specific command, you can use the "exec" mode of Delve. In this case "program" should point to an executable,
+and the "build" command should build that.
+
+```json
+[
+ {
+ "label": "Debug Prebuilt Unit Tests",
+ "adapter": "Delve",
+ "request": "launch",
+ "mode": "exec",
+ "program": "${ZED_WORKTREE_ROOT}/__debug_unit",
+ "args": ["-test.v", "-test.run=${ZED_SYMBOL}"],
+ "build": {
+ "command": "go",
+ "args": [
+ "test",
+ "-c",
+ "-tags",
+ "unit",
+ "-gcflags\"all=-N -l\"",
+ "-o",
+ "__debug_unit",
+ "./pkg/..."
+ ]
+ }
+ }
+]
+```
+
+### Attaching to an existing instance of Delve
+
+You might find yourself needing to connect to an existing instance of Delve that's not necessarily running on your machine; in such case, you can use `tcp_arguments` to instrument Zed's connection to Delve.
+
+```json
+[
+ {
+ "adapter": "Delve",
+ "label": "Connect to a running Delve instance",
+ "program": "/Users/zed/Projects/language_repositories/golang/hello/hello",
+ "cwd": "/Users/zed/Projects/language_repositories/golang/hello",
+ "args": [],
+ "env": {},
+ "request": "launch",
+ "mode": "exec",
+ "stopOnEntry": false,
+ "tcp_connection": { "host": "123.456.789.012", "port": 53412 }
+ }
+]
+```
+
+In such case Zed won't spawn a new instance of Delve, as it opts to use an existing one. The consequence of this is that _there will be no terminal_ in Zed; you have to interact with the Delve instance directly, as it handles stdin/stdout of the debuggee.
+
## Go Mod
- Tree-sitter: [camdencheek/tree-sitter-go-mod](https://github.com/camdencheek/tree-sitter-go-mod)
@@ -4,6 +4,7 @@ JavaScript support is available natively in Zed.
- Tree-sitter: [tree-sitter/tree-sitter-javascript](https://github.com/tree-sitter/tree-sitter-javascript)
- Language Server: [typescript-language-server/typescript-language-server](https://github.com/typescript-language-server/typescript-language-server)
+- Debug Adapter: [vscode-js-debug](https://github.com/microsoft/vscode-js-debug)
## Code formatting
@@ -174,6 +175,54 @@ You can configure ESLint's `workingDirectory` setting:
}
```
+## Debugging
+
+Zed supports debugging JavaScript code out of the box.
+The following can be debugged without writing additional configuration:
+
+- Tasks from `package.json`
+- Tests written using several popular frameworks (Jest, Mocha, Vitest, Jasmine)
+
+Run {#action debugger::Start} ({#kb debugger::Start}) to see a contextual list of these predefined debug tasks.
+
+As for all languages, configurations from `.vscode/launch.json` are also available for debugging in Zed.
+
+If your use-case isn't covered by any of these, you can take full control by adding debug configurations to `.zed/debug.json`. See below for example configurations.
+
+### Debug the current file
+
+```json
+[
+ {
+ "adapter": "JavaScript",
+ "label": "Debug JS file",
+ "type": "node",
+ "request": "launch",
+ "program": "$ZED_FILE",
+ "skipFiles": ["<node_internals>/**"]
+ }
+]
+```
+
+This implicitly runs the current file using `node`.
+
+### Launch a web app in Chrome
+
+```json
+[
+ {
+ "adapter": "JavaScript",
+ "label": "Debug app in Chrome",
+ "type": "chrome",
+ "request": "launch",
+ "file": "$ZED_WORKTREE_ROOT/index.html",
+ "webRoot": "$ZED_WORKTREE_ROOT",
+ "console": "integratedTerminal",
+ "skipFiles": ["<node_internals>/**"]
+ }
+]
+```
+
## See also
- [Yarn documentation](./yarn.md) for a walkthrough of configuring your project to use Yarn.
@@ -6,6 +6,7 @@ Python support is available natively in Zed.
- Language Servers:
- [microsoft/pyright](https://github.com/microsoft/pyright)
- [python-lsp/python-lsp-server](https://github.com/python-lsp/python-lsp-server) (PyLSP)
+- Debug Adapter: [debugpy](https://github.com/microsoft/debugpy)
## Language Servers
@@ -125,3 +126,67 @@ A common tool for formatting Python code is [Ruff](https://docs.astral.sh/ruff/)
TBD: Expand Python Ruff docs.
TBD: Ruff pyproject.toml, ruff.toml docs. `ruff.configuration`.
-->
+
+## Debugging
+
+Zed supports zero-configuration debugging of Python module entry points and pytest tests.
+Run {#action debugger::Start} ({#kb debugger::Start}) to see a contextual list for the current project.
+For greater control, you can add debug configurations to `.zed/debug.json`. See the examples below.
+
+### Debug Active File
+
+```json
+[
+ {
+ "label": "Python Active File",
+ "adapter": "Debugpy",
+ "program": "$ZED_FILE",
+ "request": "launch"
+ }
+]
+```
+
+### Flask App
+
+For a common Flask Application with a file structure similar to the following:
+
+```
+.venv/
+app/
+ init.py
+ main.py
+ routes.py
+templates/
+ index.html
+static/
+ style.css
+requirements.txt
+```
+
+…the following configuration can be used:
+
+```json
+[
+ {
+ "label": "Python: Flask",
+ "adapter": "Debugpy",
+ "request": "launch",
+ "module": "app",
+ "cwd": "$ZED_WORKTREE_ROOT",
+ "env": {
+ "FLASK_APP": "app",
+ "FLASK_DEBUG": "1"
+ },
+ "args": [
+ "run",
+ "--reload", // Enables Flask reloader that watches for file changes
+ "--debugger" // Enables Flask debugger
+ ],
+ "autoReload": {
+ "enable": true
+ },
+ "jinja": true,
+ "justMyCode": true
+ }
+]
+```
@@ -63,6 +63,9 @@ See [Using lintr](https://lintr.r-lib.org/articles/lintr.html) for a complete li
`REditorSupport/languageserver` bundles support for [r-lib/styler](https://github.com/r-lib/styler) as a formatter. See [Customizing Styler](https://cran.r-project.org/web/packages/styler/vignettes/customizing_styler.html) for more information on how to customize its behavior.
+<!--
+TBD: Get this working
+
### REditorSupport/languageserver Configuration
You can configure the [R languageserver settings](https://github.com/REditorSupport/languageserver#settings) via Zed Project Settings `.zed/settings.json` or Zed User Settings `~/.config/zed/settings.json`:
@@ -86,6 +89,8 @@ For example to disable Lintr linting and suppress code snippet suggestions (both
}
```
+-->
+
<!--
TBD: R REPL Docs
@@ -9,6 +9,7 @@ Ruby support is available through the [Ruby extension](https://github.com/zed-ex
- [ruby-lsp](https://github.com/Shopify/ruby-lsp)
- [solargraph](https://github.com/castwide/solargraph)
- [rubocop](https://github.com/rubocop/rubocop)
+- Debug Adapter: [`rdbg`](https://github.com/ruby/debug)
The Ruby extension also provides support for ERB files.
@@ -43,15 +44,15 @@ For all supported Ruby language servers (`solargraph`, `ruby-lsp`, `rubocop`, `s
You can skip step 1 and force using the system executable by setting `use_bundler` to `false` in your settings:
-```jsonc
+```json
{
"lsp": {
"<SERVER_NAME>": {
"settings": {
- "use_bundler": false,
- },
- },
- },
+ "use_bundler": false
+ }
+ }
+ }
}
```
@@ -255,7 +256,7 @@ In order to do that, you need to configure the language server so that it knows
"tailwindcss-language-server": {
"settings": {
"includeLanguages": {
- "erb": "html",
+ "html/erb": "html",
"ruby": "html"
},
"experimental": {
@@ -340,3 +341,60 @@ Plain minitest does not support running tests by line number, only by name, so w
```
Similar task syntax can be used for other test frameworks such as `quickdraw` or `tldr`.
+
+## Debugging
+
+The Ruby extension provides a debug adapter for debugging Ruby code. Zed's name for the adapter (in the UI and `debug.json`) is `rdbg`, and under the hood, it uses the [`debug`](https://github.com/ruby/debug) gem. The extension uses the [same activation logic](#language-server-activation) as the language servers.
+
+### Examples
+
+#### Debug a Ruby script
+
+```json
+[
+ {
+ "label": "Debug current file",
+ "adapter": "rdbg",
+ "request": "launch",
+ "script": "$ZED_FILE",
+ "cwd": "$ZED_WORKTREE_ROOT"
+ }
+]
+```
+
+#### Debug Rails server
+
+```json
+[
+ {
+ "label": "Debug Rails server",
+ "adapter": "rdbg",
+ "request": "launch",
+ "command": "$ZED_WORKTREE_ROOT/bin/rails",
+ "args": ["server"],
+ "cwd": "$ZED_WORKTREE_ROOT",
+ "env": {
+ "RUBY_DEBUG_OPEN": "true"
+ }
+ }
+]
+```
+
+## Formatters
+
+### `erb-formatter`
+
+To format ERB templates, you can use the `erb-formatter` formatter. This formatter uses the [`erb-formatter`](https://rubygems.org/gems/erb-formatter) gem to format ERB templates.
+
+```jsonc
+{
+ "HTML/ERB": {
+ "formatter": {
+ "external": {
+ "command": "erb-formatter",
+ "arguments": ["--stdin-filename", "{buffer_path}"],
+ },
+ },
+ },
+}
+```
@@ -4,6 +4,7 @@ Rust support is available natively in Zed.
- Tree-sitter: [tree-sitter/tree-sitter-rust](https://github.com/tree-sitter/tree-sitter-rust)
- Language Server: [rust-lang/rust-analyzer](https://github.com/rust-lang/rust-analyzer)
+- Debug Adapter: [CodeLLDB](https://github.com/vadimcn/codelldb) (primary), [GDB](https://sourceware.org/gdb/) (secondary, not available on Apple silicon)
<!--
TBD: Polish Rust Docs. Zed is a good rust editor, good Rust docs make it look like we care about Rust (we do!)
@@ -291,3 +292,47 @@ There's a way get custom completion items from rust-analyzer, that will transfor
}
}
```
+
+## Debugging
+
+Zed supports debugging Rust binaries and tests out of the box. Run {#action debugger::Start} ({#kb debugger::Start}) to launch one of these preconfigured debug tasks.
+
+For more control, you can add debug configurations to `.zed/debug.json`. See the examples below.
+
+### Build binary then debug
+
+```json
+[
+ {
+ "label": "Build & Debug native binary",
+ "build": {
+ "command": "cargo",
+ "args": ["build"]
+ },
+ "program": "$ZED_WORKTREE_ROOT/target/debug/binary",
+ // sourceLanguages is required for CodeLLDB (not GDB) when using Rust
+ "sourceLanguages": ["rust"],
+ "request": "launch",
+ "adapter": "CodeLLDB"
+ }
+]
+```
+
+### Automatically locate a debug target based on build command
+
+When you use `cargo build` or `cargo test` as the build command, Zed can infer the path to the output binary.
+
+```json
+[
+ {
+ "label": "Build & Debug native binary",
+ "adapter": "CodeLLDB"
+ "build": {
+ "command": "cargo",
+ "args": ["build"]
+ },
+ // sourceLanguages is required for CodeLLDB (not GDB) when using Rust
+ "sourceLanguages": ["rust"]
+ }
+]
+```
@@ -5,6 +5,7 @@ TypeScript and TSX support are available natively in Zed.
- Tree-sitter: [tree-sitter/tree-sitter-typescript](https://github.com/tree-sitter/tree-sitter-typescript)
- Language Server: [yioneko/vtsls](https://github.com/yioneko/vtsls)
- Alternate Language Server: [typescript-language-server/typescript-language-server](https://github.com/typescript-language-server/typescript-language-server)
+- Debug Adapter: [vscode-js-debug](https://github.com/microsoft/vscode-js-debug)
<!--
TBD: Document the difference between Language servers
@@ -155,6 +156,43 @@ When using `vtsls`:
}
```
+## Debugging
+
+Zed supports debugging TypeScript code out of the box.
+The following can be debugged without writing additional configuration:
+
+- Tasks from `package.json`
+- Tests written using several popular frameworks (Jest, Mocha, Vitest, Jasmine)
+
+Run {#action debugger::Start} ({#kb debugger::Start}) to see a contextual list of these predefined debug tasks.
+
+As for all languages, configurations from `.vscode/launch.json` are also available for debugging in Zed.
+
+If your use-case isn't covered by any of these, you can take full control by adding debug configurations to `.zed/debug.json`. See below for example configurations.
+
+### Attach debugger to a server running in web browser (`npx serve`)
+
+Given an externally-ran web server (e.g., with `npx serve` or `npx live-server`) one can attach to it and open it with a browser.
+
+```json
+[
+ {
+ "label": "Launch Chrome (TypeScript)",
+ "adapter": "JavaScript",
+ "type": "chrome",
+ "request": "launch",
+ "url": "http://localhost:5500",
+ "program": "$ZED_FILE",
+ "webRoot": "${ZED_WORKTREE_ROOT}",
+ "build": {
+ "command": "npx",
+ "args": ["tsc"]
+ },
+ "skipFiles": ["<node_internals>/**"]
+ }
+]
+```
+
## See also
- [Zed Yarn documentation](./yarn.md) for a walkthrough of configuring your project to use Yarn.
@@ -148,7 +148,7 @@ On some systems the file `/etc/prime-discrete` can be used to enforce the use of
On others, you may be able to the environment variable `DRI_PRIME=1` when running Zed to force the use of the discrete GPU.
-If you're using an AMD GPU and Zed crashes when selecting long lines, try setting the `ZED_PATH_SAMPLE_COUNT=0` environment variable. (See [#26143](https://github.com/zed-industries/zed/issues/26143))
+If you're using an AMD GPU and Zed crashes when selecting long lines, try setting the `ZED_SAMPLE_COUNT=0` environment variable. (See [#26143](https://github.com/zed-industries/zed/issues/26143))
If you're using an AMD GPU, you might get a 'Broken Pipe' error. Try using the RADV or Mesa drivers. (See [#13880](https://github.com/zed-industries/zed/issues/13880))
@@ -6,8 +6,6 @@ a build of Zed on Windows, and you can compile it yourself with these instructio
- [Building for Windows](./development/windows.md)
-We are currently hiring a [Windows Lead](https://zed.dev/jobs/windows-lead).
-
For now, we welcome contributions from the community to improve Windows support.
- [GitHub Issues with 'Windows' label](https://github.com/zed-industries/zed/issues?q=is%3Aissue+is%3Aopen+label%3Awindows)
@@ -1,7 +1,7 @@
id = "emmet"
name = "Emmet"
description = "Emmet support"
-version = "0.0.3"
+version = "0.0.4"
schema_version = 1
authors = ["Piotr Osiewicz <piotr@zed.dev>"]
repository = "https://github.com/zed-industries/zed"
@@ -9,12 +9,15 @@ repository = "https://github.com/zed-industries/zed"
[language_servers.emmet-language-server]
name = "Emmet Language Server"
language = "HTML"
-languages = ["HTML", "PHP", "ERB", "JavaScript", "TSX", "CSS"]
+languages = ["HTML", "PHP", "ERB", "HTML/ERB", "JavaScript", "TSX", "CSS", "HEEX", "Elixir"]
[language_servers.emmet-language-server.language_ids]
"HTML" = "html"
"PHP" = "php"
"ERB" = "eruby"
+"HTML/ERB" = "eruby"
"JavaScript" = "javascriptreact"
"TSX" = "typescriptreact"
"CSS" = "css"
+"HEEX" = "heex"
+"Elixir" = "heex"
@@ -94,3 +94,24 @@ for (const promptPath of modifiedPrompts) {
);
}
}
+
+const FIXTURE_CHANGE_ATTESTATION = "Changes to test fixtures are intentional and necessary.";
+
+const FIXTURES_PATHS = ["crates/assistant_tools/src/edit_agent/evals/fixtures"];
+
+const modifiedFixtures = danger.git.modified_files.filter((file) =>
+ FIXTURES_PATHS.some((fixturePath) => file.includes(fixturePath)),
+);
+
+if (modifiedFixtures.length > 0) {
+ if (!body.includes(FIXTURE_CHANGE_ATTESTATION)) {
+ const modifiedFixturesStr = modifiedFixtures.map((path) => "`" + path + "`").join(", ");
+ fail(
+ [
+ `This PR modifies eval or test fixtures (${modifiedFixturesStr}), which are typically expected to remain unchanged.`,
+ "If these changes are intentional and required, please add the following attestation to your PR description: ",
+ `"${FIXTURE_CHANGE_ATTESTATION}"`,
+ ].join("\n\n"),
+ );
+ }
+}
@@ -11,24 +11,29 @@ fi
input_file=$1;
if [[ "$input_file" == *.json ]]; then
- version=$(cat $input_file | jq -r .app_version)
- channel=$(cat $input_file | jq -r .release_channel)
- target_triple=$(cat $input_file | jq -r .target)
+ version=$(cat $input_file | jq -r .panic.app_version)
+ channel=$(cat $input_file | jq -r .panic.release_channel)
+ target_triple=$(cat $input_file | jq -r .panic.target)
- which llvm-symbolizer rustfilt >dev/null || echo Need to install llvm-symbolizer and rustfilt
+ which llvm-symbolizer rustfilt >/dev/null || (echo Need to install llvm-symbolizer and rustfilt && exit 1)
echo $channel;
mkdir -p target/dsyms/$channel
- dsym="$channel/zed-$version-$target_triple.dbg"
+ if [[ "$version" == "remote-server-"* ]]; then
+ version="${version#remote-server-}"
+ dsym="$channel/remote_server-$version-$target_triple.dbg"
+ else
+ dsym="$channel/zed-$version-$target_triple.dbg"
+ fi
if [[ ! -f target/dsyms/$dsym ]]; then
echo "Downloading $dsym..."
curl -o target/dsyms/$dsym.gz "https://zed-debug-symbols.nyc3.digitaloceanspaces.com/$dsym.gz"
gunzip target/dsyms/$dsym.gz
fi
- cat $input_file | jq -r .backtrace[] | sed s'/.*+//' | llvm-symbolizer --no-demangle --obj=target/dsyms/$dsym | rustfilt
+ cat $input_file | jq -r .panic.backtrace[] | sed s'/.*+//' | llvm-symbolizer --no-demangle --obj=target/dsyms/$dsym | rustfilt
else # ips file
@@ -111,7 +111,7 @@ sea-orm = { version = "1", features = ["runtime-tokio-rustls", "sqlx-postgres",
sea-query-binder = { version = "0.7", default-features = false, features = ["postgres-array", "sqlx-postgres", "sqlx-sqlite", "with-bigdecimal", "with-chrono", "with-json", "with-rust_decimal", "with-time", "with-uuid"] }
semver = { version = "1", features = ["serde"] }
serde = { version = "1", features = ["alloc", "derive", "rc"] }
-serde_json = { version = "1", features = ["preserve_order", "raw_value", "unbounded_depth"] }
+serde_json = { version = "1", features = ["alloc", "preserve_order", "raw_value", "unbounded_depth"] }
sha1 = { version = "0.10", features = ["compress"] }
simd-adler32 = { version = "0.3" }
smallvec = { version = "1", default-features = false, features = ["const_new", "serde", "union", "write"] }
@@ -244,7 +244,7 @@ sea-query-binder = { version = "0.7", default-features = false, features = ["pos
semver = { version = "1", features = ["serde"] }
serde = { version = "1", features = ["alloc", "derive", "rc"] }
serde_derive = { version = "1", features = ["deserialize_in_place"] }
-serde_json = { version = "1", features = ["preserve_order", "raw_value", "unbounded_depth"] }
+serde_json = { version = "1", features = ["alloc", "preserve_order", "raw_value", "unbounded_depth"] }
sha1 = { version = "0.10", features = ["compress"] }
simd-adler32 = { version = "0.3" }
smallvec = { version = "1", default-features = false, features = ["const_new", "serde", "union", "write"] }