From c6f08dea89efaf3a2acd01f9f8c0371c87b9af51 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Wed, 23 Oct 2024 13:12:28 -0400 Subject: [PATCH 01/76] v0.160.x dev --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8f5842d9e4d9943d24ebcde90533c90f13cc3a79..845cdb227f9540e578578bf459ca80269b0a3231 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14601,7 +14601,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.159.0" +version = "0.160.0" dependencies = [ "activity_indicator", "anyhow", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 2feb110be4bcb40173154fd467dabca0d038d941..dda06ff7d2419bc118663764a865cba0d1723596 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.159.0" +version = "0.160.0" publish = false license = "GPL-3.0-or-later" authors = ["Zed Team "] From 8bfd27b00b8feab2eb145d5f756ea986470fcd02 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 23 Oct 2024 17:23:01 +0000 Subject: [PATCH 02/76] docs: Improve Markdown trailing whitespace section (#19630) --- docs/src/languages/markdown.md | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/docs/src/languages/markdown.md b/docs/src/languages/markdown.md index 14786ad4f2d89d95ee549a80c859def87b75ea5d..d9fdd29b5ede5ebaab4543dbb0467d8f092c8d55 100644 --- a/docs/src/languages/markdown.md +++ b/docs/src/languages/markdown.md @@ -21,13 +21,26 @@ def fib(n): ## Configuration -If you wish change the default language settings for Markdown files, perhaps to disable auto format on save or if your markdown relies upon trailing whitespace ` ` being converted to `
` you can add change these values in your `settings.json`: +### Format + +Zed supports using Prettier to automatically re-format Markdown documents. You can trigger this manually via the {#action editor::Format} action or via the {#kb editor::Format} keyboard shortcut. Alternately, you can automattically format by enabling [`format_on_save`](./configuring-zed.md#format-on-save) in your settings.json: ```json "languages": { "Markdown": { - "remove_trailing_whitespace_on_save": true, "format_on_save": "on" } }, ``` + +### Trailing Whitespace + +By default Zed will remove trailing whitespace on save. If you rely on invisible trailing whitespace being converted to `
` in Markdown files you can disable this behavior with: + +```json + "languages": { + "Markdown": { + "remove_trailing_whitespace_on_save": false + } + }, +``` From 9c0dba4ce1d8770339799bbd6ef0b470c28a7691 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 23 Oct 2024 14:32:43 -0400 Subject: [PATCH 03/76] Add a `SlashCommandResult` type alias (#19633) This PR adds a new `SlashCommandResult` type alias. We're going to be changing what slash commands can return in order to support streaming, so having this type alias in place will make that switch a bit more neat. Release Notes: - N/A --- crates/assistant/src/context.rs | 4 ++-- crates/assistant/src/context/context_tests.rs | 4 ++-- .../assistant/src/slash_command/auto_command.rs | 11 +++++++---- .../src/slash_command/cargo_workspace_command.rs | 8 +++++--- .../src/slash_command/context_server_command.rs | 7 ++++--- .../src/slash_command/default_command.rs | 8 +++++--- .../assistant/src/slash_command/delta_command.rs | 3 ++- .../src/slash_command/diagnostics_command.rs | 10 +++++++--- .../assistant/src/slash_command/docs_command.rs | 3 ++- .../assistant/src/slash_command/fetch_command.rs | 3 ++- .../assistant/src/slash_command/file_command.rs | 12 ++++++++---- crates/assistant/src/slash_command/now_command.rs | 3 ++- .../src/slash_command/project_command.rs | 4 ++-- .../assistant/src/slash_command/prompt_command.rs | 8 +++++--- .../assistant/src/slash_command/search_command.rs | 15 ++++++++------- .../src/slash_command/symbols_command.rs | 8 +++++--- crates/assistant/src/slash_command/tab_command.rs | 10 +++++++--- .../src/slash_command/terminal_command.rs | 3 ++- .../src/slash_command/workflow_command.rs | 10 +++++----- .../src/assistant_slash_command.rs | 4 +++- crates/extension/src/extension_slash_command.rs | 3 ++- 21 files changed, 87 insertions(+), 54 deletions(-) diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index d337606dfa304d7fb00be4c80b52ab3b0398345c..d2b80ca22491a001b84772f60ee78d97f58f8aab 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -7,7 +7,7 @@ use crate::{ }; use anyhow::{anyhow, Context as _, Result}; use assistant_slash_command::{ - SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry, + SlashCommandOutputSection, SlashCommandRegistry, SlashCommandResult, }; use assistant_tool::ToolRegistry; use client::{self, proto, telemetry::Telemetry}; @@ -1677,7 +1677,7 @@ impl Context { pub fn insert_command_output( &mut self, command_range: Range, - output: Task>, + output: Task, ensure_trailing_newline: bool, expand_result: bool, cx: &mut ModelContext, diff --git a/crates/assistant/src/context/context_tests.rs b/crates/assistant/src/context/context_tests.rs index a11cfc375d21a4aab960fd903b3b456fb363fc17..4d866b4d8b9bb5cf52bd1ee3d0fbd0afb36a0134 100644 --- a/crates/assistant/src/context/context_tests.rs +++ b/crates/assistant/src/context/context_tests.rs @@ -6,7 +6,7 @@ use crate::{ use anyhow::Result; use assistant_slash_command::{ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, - SlashCommandRegistry, + SlashCommandRegistry, SlashCommandResult, }; use collections::HashSet; use fs::FakeFs; @@ -1416,7 +1416,7 @@ impl SlashCommand for FakeSlashCommand { _workspace: WeakView, _delegate: Option>, _cx: &mut WindowContext, - ) -> Task> { + ) -> Task { Task::ready(Ok(SlashCommandOutput { text: format!("Executed fake command: {}", self.0), sections: vec![], diff --git a/crates/assistant/src/slash_command/auto_command.rs b/crates/assistant/src/slash_command/auto_command.rs index 14bbb7c8412b411975003f74dd70c15ddb7f8ea7..352b5a3ac917ea7039cd736e99322a83e928c6e8 100644 --- a/crates/assistant/src/slash_command/auto_command.rs +++ b/crates/assistant/src/slash_command/auto_command.rs @@ -1,7 +1,8 @@ -use super::create_label_for_command; -use super::{SlashCommand, SlashCommandOutput}; use anyhow::{anyhow, Result}; -use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; +use assistant_slash_command::{ + ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, + SlashCommandResult, +}; use feature_flags::FeatureFlag; use futures::StreamExt; use gpui::{AppContext, AsyncAppContext, Task, WeakView}; @@ -17,6 +18,8 @@ use ui::{BorrowAppContext, WindowContext}; use util::ResultExt; use workspace::Workspace; +use crate::slash_command::create_label_for_command; + pub struct AutoSlashCommandFeatureFlag; impl FeatureFlag for AutoSlashCommandFeatureFlag { @@ -92,7 +95,7 @@ impl SlashCommand for AutoCommand { workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, - ) -> Task> { + ) -> Task { let Some(workspace) = workspace.upgrade() else { return Task::ready(Err(anyhow::anyhow!("workspace was dropped"))); }; diff --git a/crates/assistant/src/slash_command/cargo_workspace_command.rs b/crates/assistant/src/slash_command/cargo_workspace_command.rs index baf16d7f014cb264df7530936c343c9540782c90..04fa408717bf3dfe5041bfd99ceb87cc290095b7 100644 --- a/crates/assistant/src/slash_command/cargo_workspace_command.rs +++ b/crates/assistant/src/slash_command/cargo_workspace_command.rs @@ -1,6 +1,8 @@ -use super::{SlashCommand, SlashCommandOutput}; use anyhow::{anyhow, Context, Result}; -use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; +use assistant_slash_command::{ + ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, + SlashCommandResult, +}; use fs::Fs; use gpui::{AppContext, Model, Task, WeakView}; use language::{BufferSnapshot, LspAdapterDelegate}; @@ -123,7 +125,7 @@ impl SlashCommand for CargoWorkspaceSlashCommand { workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, - ) -> Task> { + ) -> Task { let output = workspace.update(cx, |workspace, cx| { let project = workspace.project().clone(); let fs = workspace.project().read(cx).fs().clone(); diff --git a/crates/assistant/src/slash_command/context_server_command.rs b/crates/assistant/src/slash_command/context_server_command.rs index 9e6c4b7718889c2bc6fc1a6b97ff2fbafd9d32cc..b749f9e4cd9a5afbb403247305d304b4a79eb459 100644 --- a/crates/assistant/src/slash_command/context_server_command.rs +++ b/crates/assistant/src/slash_command/context_server_command.rs @@ -1,8 +1,7 @@ -use super::create_label_for_command; use anyhow::{anyhow, Result}; use assistant_slash_command::{ AfterCompletion, ArgumentCompletion, SlashCommand, SlashCommandOutput, - SlashCommandOutputSection, + SlashCommandOutputSection, SlashCommandResult, }; use collections::HashMap; use context_servers::{ @@ -17,6 +16,8 @@ use text::LineEnding; use ui::{IconName, SharedString}; use workspace::Workspace; +use crate::slash_command::create_label_for_command; + pub struct ContextServerSlashCommand { server_id: String, prompt: Prompt, @@ -128,7 +129,7 @@ impl SlashCommand for ContextServerSlashCommand { _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, - ) -> Task> { + ) -> Task { let server_id = self.server_id.clone(); let prompt_name = self.prompt.name.clone(); diff --git a/crates/assistant/src/slash_command/default_command.rs b/crates/assistant/src/slash_command/default_command.rs index 4199840300a24c183579fa07c46c964d83997fdd..2c956f8ca66d9b2ad574358ad2aefafe09ccc1a3 100644 --- a/crates/assistant/src/slash_command/default_command.rs +++ b/crates/assistant/src/slash_command/default_command.rs @@ -1,7 +1,9 @@ -use super::{SlashCommand, SlashCommandOutput}; use crate::prompt_library::PromptStore; use anyhow::{anyhow, Result}; -use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; +use assistant_slash_command::{ + ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, + SlashCommandResult, +}; use gpui::{Task, WeakView}; use language::{BufferSnapshot, LspAdapterDelegate}; use std::{ @@ -48,7 +50,7 @@ impl SlashCommand for DefaultSlashCommand { _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, - ) -> Task> { + ) -> Task { let store = PromptStore::global(cx); cx.background_executor().spawn(async move { let store = store.await?; diff --git a/crates/assistant/src/slash_command/delta_command.rs b/crates/assistant/src/slash_command/delta_command.rs index 6f697ecbb9bcba2d72657ba79a88610836eff1dc..a17c5d739c68fa4e0bb70685932dc8c6173facdd 100644 --- a/crates/assistant/src/slash_command/delta_command.rs +++ b/crates/assistant/src/slash_command/delta_command.rs @@ -2,6 +2,7 @@ use crate::slash_command::file_command::{FileCommandMetadata, FileSlashCommand}; use anyhow::Result; use assistant_slash_command::{ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, + SlashCommandResult, }; use collections::HashSet; use futures::future; @@ -48,7 +49,7 @@ impl SlashCommand for DeltaSlashCommand { workspace: WeakView, delegate: Option>, cx: &mut WindowContext, - ) -> Task> { + ) -> Task { let mut paths = HashSet::default(); let mut file_command_old_outputs = Vec::new(); let mut file_command_new_outputs = Vec::new(); diff --git a/crates/assistant/src/slash_command/diagnostics_command.rs b/crates/assistant/src/slash_command/diagnostics_command.rs index 146a4e5d366dd36cf58544f94f50cfee7da1b32c..54be2219fff0bd1c5ab813c2b409325c7fa809a1 100644 --- a/crates/assistant/src/slash_command/diagnostics_command.rs +++ b/crates/assistant/src/slash_command/diagnostics_command.rs @@ -1,6 +1,8 @@ -use super::{create_label_for_command, SlashCommand, SlashCommandOutput}; use anyhow::{anyhow, Result}; -use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; +use assistant_slash_command::{ + ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, + SlashCommandResult, +}; use fuzzy::{PathMatch, StringMatchCandidate}; use gpui::{AppContext, Model, Task, View, WeakView}; use language::{ @@ -19,6 +21,8 @@ use util::paths::PathMatcher; use util::ResultExt; use workspace::Workspace; +use crate::slash_command::create_label_for_command; + pub(crate) struct DiagnosticsSlashCommand; impl DiagnosticsSlashCommand { @@ -167,7 +171,7 @@ impl SlashCommand for DiagnosticsSlashCommand { workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, - ) -> Task> { + ) -> Task { let Some(workspace) = workspace.upgrade() else { return Task::ready(Err(anyhow!("workspace was dropped"))); }; diff --git a/crates/assistant/src/slash_command/docs_command.rs b/crates/assistant/src/slash_command/docs_command.rs index 399ede9d9995499e07d6e827dc30c089c0789cf1..92c3cd1977b9bc2ea6ecdcdb5894dfab9b40e2cd 100644 --- a/crates/assistant/src/slash_command/docs_command.rs +++ b/crates/assistant/src/slash_command/docs_command.rs @@ -6,6 +6,7 @@ use std::time::Duration; use anyhow::{anyhow, bail, Result}; use assistant_slash_command::{ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, + SlashCommandResult, }; use gpui::{AppContext, BackgroundExecutor, Model, Task, WeakView}; use indexed_docs::{ @@ -274,7 +275,7 @@ impl SlashCommand for DocsSlashCommand { _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, - ) -> Task> { + ) -> Task { if arguments.is_empty() { return Task::ready(Err(anyhow!("missing an argument"))); }; diff --git a/crates/assistant/src/slash_command/fetch_command.rs b/crates/assistant/src/slash_command/fetch_command.rs index 3a01bb645a36bb21428a662874ca41c04cda3481..9b61c547dbd6598f6390d024f20fb91888603381 100644 --- a/crates/assistant/src/slash_command/fetch_command.rs +++ b/crates/assistant/src/slash_command/fetch_command.rs @@ -6,6 +6,7 @@ use std::sync::Arc; use anyhow::{anyhow, bail, Context, Result}; use assistant_slash_command::{ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, + SlashCommandResult, }; use futures::AsyncReadExt; use gpui::{Task, WeakView}; @@ -133,7 +134,7 @@ impl SlashCommand for FetchSlashCommand { workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, - ) -> Task> { + ) -> Task { let Some(argument) = arguments.first() else { return Task::ready(Err(anyhow!("missing URL"))); }; diff --git a/crates/assistant/src/slash_command/file_command.rs b/crates/assistant/src/slash_command/file_command.rs index 6da56d064178adcad9e9cfb6960660667bef5e9d..51d0b33ba26240789fbc1003785cae359040e0fa 100644 --- a/crates/assistant/src/slash_command/file_command.rs +++ b/crates/assistant/src/slash_command/file_command.rs @@ -1,6 +1,8 @@ -use super::{diagnostics_command::collect_buffer_diagnostics, SlashCommand, SlashCommandOutput}; use anyhow::{anyhow, Context as _, Result}; -use assistant_slash_command::{AfterCompletion, ArgumentCompletion, SlashCommandOutputSection}; +use assistant_slash_command::{ + AfterCompletion, ArgumentCompletion, SlashCommand, SlashCommandOutput, + SlashCommandOutputSection, SlashCommandResult, +}; use fuzzy::PathMatch; use gpui::{AppContext, Model, Task, View, WeakView}; use language::{BufferSnapshot, CodeLabel, HighlightId, LineEnding, LspAdapterDelegate}; @@ -16,6 +18,8 @@ use ui::prelude::*; use util::ResultExt; use workspace::Workspace; +use crate::slash_command::diagnostics_command::collect_buffer_diagnostics; + pub(crate) struct FileSlashCommand; impl FileSlashCommand { @@ -181,7 +185,7 @@ impl SlashCommand for FileSlashCommand { workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, - ) -> Task> { + ) -> Task { let Some(workspace) = workspace.upgrade() else { return Task::ready(Err(anyhow!("workspace was dropped"))); }; @@ -198,7 +202,7 @@ fn collect_files( project: Model, glob_inputs: &[String], cx: &mut AppContext, -) -> Task> { +) -> Task { let Ok(matchers) = glob_inputs .into_iter() .map(|glob_input| { diff --git a/crates/assistant/src/slash_command/now_command.rs b/crates/assistant/src/slash_command/now_command.rs index 221ba05cafc6238c2eb84566f057dcb146805c4e..40bc29f27ddffee49eed1ea2663d1f561db588dc 100644 --- a/crates/assistant/src/slash_command/now_command.rs +++ b/crates/assistant/src/slash_command/now_command.rs @@ -4,6 +4,7 @@ use std::sync::Arc; use anyhow::Result; use assistant_slash_command::{ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, + SlashCommandResult, }; use chrono::Local; use gpui::{Task, WeakView}; @@ -48,7 +49,7 @@ impl SlashCommand for NowSlashCommand { _workspace: WeakView, _delegate: Option>, _cx: &mut WindowContext, - ) -> Task> { + ) -> Task { let now = Local::now(); let text = format!("Today is {now}.", now = now.to_rfc2822()); let range = 0..text.len(); diff --git a/crates/assistant/src/slash_command/project_command.rs b/crates/assistant/src/slash_command/project_command.rs index 58fef8f338771dbc2ad9dc5d24cbdcfc1aa0df78..e55699b026826a85dddf49019a31ad74e54a9eff 100644 --- a/crates/assistant/src/slash_command/project_command.rs +++ b/crates/assistant/src/slash_command/project_command.rs @@ -4,7 +4,7 @@ use super::{ }; use crate::PromptBuilder; use anyhow::{anyhow, Result}; -use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; +use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection, SlashCommandResult}; use feature_flags::FeatureFlag; use gpui::{AppContext, Task, WeakView, WindowContext}; use language::{Anchor, CodeLabel, LspAdapterDelegate}; @@ -76,7 +76,7 @@ impl SlashCommand for ProjectSlashCommand { workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, - ) -> Task> { + ) -> Task { let model_registry = LanguageModelRegistry::read_global(cx); let current_model = model_registry.active_model(); let prompt_builder = self.prompt_builder.clone(); diff --git a/crates/assistant/src/slash_command/prompt_command.rs b/crates/assistant/src/slash_command/prompt_command.rs index 978c6d7504caeb66ccb9729de9b65a4a6762b1f5..dc803293823fb3974006a243ee4152499289a56a 100644 --- a/crates/assistant/src/slash_command/prompt_command.rs +++ b/crates/assistant/src/slash_command/prompt_command.rs @@ -1,7 +1,9 @@ -use super::{SlashCommand, SlashCommandOutput}; use crate::prompt_library::PromptStore; use anyhow::{anyhow, Context, Result}; -use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; +use assistant_slash_command::{ + ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, + SlashCommandResult, +}; use gpui::{Task, WeakView}; use language::{BufferSnapshot, LspAdapterDelegate}; use std::sync::{atomic::AtomicBool, Arc}; @@ -61,7 +63,7 @@ impl SlashCommand for PromptSlashCommand { _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, - ) -> Task> { + ) -> Task { let title = arguments.to_owned().join(" "); if title.trim().is_empty() { return Task::ready(Err(anyhow!("missing prompt name"))); diff --git a/crates/assistant/src/slash_command/search_command.rs b/crates/assistant/src/slash_command/search_command.rs index c7183e95bbc8537b1560e1f165efcc099a02b709..999fe252becc74875fbdc944f1f10a819da3abbf 100644 --- a/crates/assistant/src/slash_command/search_command.rs +++ b/crates/assistant/src/slash_command/search_command.rs @@ -1,10 +1,8 @@ -use super::{ - create_label_for_command, - file_command::{build_entry_output_section, codeblock_fence_for_path}, - SlashCommand, SlashCommandOutput, -}; use anyhow::Result; -use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; +use assistant_slash_command::{ + ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, + SlashCommandResult, +}; use feature_flags::FeatureFlag; use gpui::{AppContext, Task, WeakView}; use language::{CodeLabel, LspAdapterDelegate}; @@ -16,6 +14,9 @@ use std::{ use ui::{prelude::*, IconName}; use workspace::Workspace; +use crate::slash_command::create_label_for_command; +use crate::slash_command::file_command::{build_entry_output_section, codeblock_fence_for_path}; + pub(crate) struct SearchSlashCommandFeatureFlag; impl FeatureFlag for SearchSlashCommandFeatureFlag { @@ -63,7 +64,7 @@ impl SlashCommand for SearchSlashCommand { workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, - ) -> Task> { + ) -> Task { let Some(workspace) = workspace.upgrade() else { return Task::ready(Err(anyhow::anyhow!("workspace was dropped"))); }; diff --git a/crates/assistant/src/slash_command/symbols_command.rs b/crates/assistant/src/slash_command/symbols_command.rs index 887b57ba9956c72ff1b148907b1b6a7710250514..d28b53c1a13895610558468c09bbab0eed3f5173 100644 --- a/crates/assistant/src/slash_command/symbols_command.rs +++ b/crates/assistant/src/slash_command/symbols_command.rs @@ -1,6 +1,8 @@ -use super::{SlashCommand, SlashCommandOutput}; use anyhow::{anyhow, Context as _, Result}; -use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; +use assistant_slash_command::{ + ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, + SlashCommandResult, +}; use editor::Editor; use gpui::{Task, WeakView}; use language::{BufferSnapshot, LspAdapterDelegate}; @@ -46,7 +48,7 @@ impl SlashCommand for OutlineSlashCommand { workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, - ) -> Task> { + ) -> Task { let output = workspace.update(cx, |workspace, cx| { let Some(active_item) = workspace.active_item(cx) else { return Task::ready(Err(anyhow!("no active tab"))); diff --git a/crates/assistant/src/slash_command/tab_command.rs b/crates/assistant/src/slash_command/tab_command.rs index 0bff4730d8e5c8c21bcf8c5ba37b670e876429d8..23c3b64b38505cd63455446704d9e2d14e4c2dcf 100644 --- a/crates/assistant/src/slash_command/tab_command.rs +++ b/crates/assistant/src/slash_command/tab_command.rs @@ -1,6 +1,8 @@ -use super::{file_command::append_buffer_to_output, SlashCommand, SlashCommandOutput}; use anyhow::{Context, Result}; -use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; +use assistant_slash_command::{ + ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, + SlashCommandResult, +}; use collections::{HashMap, HashSet}; use editor::Editor; use futures::future::join_all; @@ -14,6 +16,8 @@ use ui::{ActiveTheme, WindowContext}; use util::ResultExt; use workspace::Workspace; +use crate::slash_command::file_command::append_buffer_to_output; + pub(crate) struct TabSlashCommand; const ALL_TABS_COMPLETION_ITEM: &str = "all"; @@ -132,7 +136,7 @@ impl SlashCommand for TabSlashCommand { workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, - ) -> Task> { + ) -> Task { let tab_items_search = tab_items_for_queries( Some(workspace), arguments, diff --git a/crates/assistant/src/slash_command/terminal_command.rs b/crates/assistant/src/slash_command/terminal_command.rs index 1d4959fb1995720b939bb93f8121466f6ba4a58d..7516b275ac8b819b22614ca6cc4cbb31e44c2440 100644 --- a/crates/assistant/src/slash_command/terminal_command.rs +++ b/crates/assistant/src/slash_command/terminal_command.rs @@ -4,6 +4,7 @@ use std::sync::Arc; use anyhow::Result; use assistant_slash_command::{ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, + SlashCommandResult, }; use gpui::{AppContext, Task, View, WeakView}; use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate}; @@ -62,7 +63,7 @@ impl SlashCommand for TerminalSlashCommand { workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, - ) -> Task> { + ) -> Task { let Some(workspace) = workspace.upgrade() else { return Task::ready(Err(anyhow::anyhow!("workspace was dropped"))); }; diff --git a/crates/assistant/src/slash_command/workflow_command.rs b/crates/assistant/src/slash_command/workflow_command.rs index 50c0e6cbc60e6011b4d60d339e4cc08abc70d071..1379eb5e803a633bcaf63093bdee6cd5b35751c6 100644 --- a/crates/assistant/src/slash_command/workflow_command.rs +++ b/crates/assistant/src/slash_command/workflow_command.rs @@ -1,18 +1,18 @@ -use crate::prompts::PromptBuilder; -use std::sync::Arc; - use std::sync::atomic::AtomicBool; +use std::sync::Arc; use anyhow::Result; use assistant_slash_command::{ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, + SlashCommandResult, }; use gpui::{Task, WeakView}; use language::{BufferSnapshot, LspAdapterDelegate}; use ui::prelude::*; - use workspace::Workspace; +use crate::prompts::PromptBuilder; + pub(crate) struct WorkflowSlashCommand { prompt_builder: Arc, } @@ -60,7 +60,7 @@ impl SlashCommand for WorkflowSlashCommand { _workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, - ) -> Task> { + ) -> Task { let prompt_builder = self.prompt_builder.clone(); cx.spawn(|_cx| async move { let text = prompt_builder.generate_workflow_prompt()?; diff --git a/crates/assistant_slash_command/src/assistant_slash_command.rs b/crates/assistant_slash_command/src/assistant_slash_command.rs index 36e229d49a246d46055b618497c0892638ade427..90e47690a83d930ce67a79861035784fee70c5ef 100644 --- a/crates/assistant_slash_command/src/assistant_slash_command.rs +++ b/crates/assistant_slash_command/src/assistant_slash_command.rs @@ -56,6 +56,8 @@ pub struct ArgumentCompletion { pub replace_previous_arguments: bool, } +pub type SlashCommandResult = Result; + pub trait SlashCommand: 'static + Send + Sync { fn name(&self) -> String; fn label(&self, _cx: &AppContext) -> CodeLabel { @@ -87,7 +89,7 @@ pub trait SlashCommand: 'static + Send + Sync { // perhaps another kind of delegate is needed here. delegate: Option>, cx: &mut WindowContext, - ) -> Task>; + ) -> Task; } pub type RenderFoldPlaceholder = Arc< diff --git a/crates/extension/src/extension_slash_command.rs b/crates/extension/src/extension_slash_command.rs index 3dfbc4c03d9bb00c4eb8e053c753bf7e1f48b488..e9725f1ae423c4805d678d84a34841da01db0dcb 100644 --- a/crates/extension/src/extension_slash_command.rs +++ b/crates/extension/src/extension_slash_command.rs @@ -3,6 +3,7 @@ use std::sync::{atomic::AtomicBool, Arc}; use anyhow::{anyhow, Result}; use assistant_slash_command::{ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, + SlashCommandResult, }; use futures::FutureExt; use gpui::{Task, WeakView, WindowContext}; @@ -87,7 +88,7 @@ impl SlashCommand for ExtensionSlashCommand { _workspace: WeakView, delegate: Option>, cx: &mut WindowContext, - ) -> Task> { + ) -> Task { let arguments = arguments.to_owned(); let output = cx.background_executor().spawn(async move { self.extension From 291af664e1a3c059b44894e30d6d624068f3c2d9 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 23 Oct 2024 19:13:52 +0000 Subject: [PATCH 04/76] Switch to Anthropic -latest tags (#19615) - Closes: https://github.com/zed-industries/zed/issues/19609 Switches us to using `-latest` tags with Anthropic models instead of pinning to a specific date version. See: [Anthropic Model Docs](https://docs.anthropic.com/en/docs/about-claude/models) This is a no-op for: - Claude 3 Opus (`claude-3-opus-20240229`) - Claude 3 Sonnet (`claude-3-sonnet-20240229`) - Claude 3 Haiku (`claude-3-haiku-20240307`) For Claude 3.5 Sonnet this will update us from `claude-3-5-sonnet-20240620` to `claude-3-5-sonnet-20241022`. We will also pickup any subsequent model updates automatically when Anthropic updates the `latest` tag. This matches the behavior for OpenAI where use `gpt-4o` as the model_name and not `gpt-4o-2024-08-06`. --- crates/anthropic/src/anthropic.rs | 16 ++++++++-------- crates/language_model/src/provider/anthropic.rs | 2 +- docs/src/assistant/configuration.md | 4 ++-- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/crates/anthropic/src/anthropic.rs b/crates/anthropic/src/anthropic.rs index 08c8f27bd9027667268ab11cb37f2672e587c575..cc82380fa24920fa41e66834e5463b9014ec0323 100644 --- a/crates/anthropic/src/anthropic.rs +++ b/crates/anthropic/src/anthropic.rs @@ -29,13 +29,13 @@ pub struct AnthropicModelCacheConfiguration { #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)] pub enum Model { #[default] - #[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-20240620")] + #[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-latest")] Claude3_5Sonnet, - #[serde(rename = "claude-3-opus", alias = "claude-3-opus-20240229")] + #[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")] Claude3Opus, - #[serde(rename = "claude-3-sonnet", alias = "claude-3-sonnet-20240229")] + #[serde(rename = "claude-3-sonnet", alias = "claude-3-sonnet-latest")] Claude3Sonnet, - #[serde(rename = "claude-3-haiku", alias = "claude-3-haiku-20240307")] + #[serde(rename = "claude-3-haiku", alias = "claude-3-haiku-latest")] Claude3Haiku, #[serde(rename = "custom")] Custom { @@ -69,10 +69,10 @@ impl Model { pub fn id(&self) -> &str { match self { - Model::Claude3_5Sonnet => "claude-3-5-sonnet-20240620", - Model::Claude3Opus => "claude-3-opus-20240229", - Model::Claude3Sonnet => "claude-3-sonnet-20240229", - Model::Claude3Haiku => "claude-3-haiku-20240307", + Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest", + Model::Claude3Opus => "claude-3-opus-latest", + Model::Claude3Sonnet => "claude-3-sonnet-latest", + Model::Claude3Haiku => "claude-3-haiku-latest", Self::Custom { name, .. } => name, } } diff --git a/crates/language_model/src/provider/anthropic.rs b/crates/language_model/src/provider/anthropic.rs index 86538bec49172d88aa87375f6335fddac94ea27c..fe88c73b90deb6ee8a7af07497b0b59cab1fd7a5 100644 --- a/crates/language_model/src/provider/anthropic.rs +++ b/crates/language_model/src/provider/anthropic.rs @@ -40,7 +40,7 @@ pub struct AnthropicSettings { #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)] pub struct AvailableModel { - /// The model's name in the Anthropic API. e.g. claude-3-5-sonnet-20240620 + /// The model's name in the Anthropic API. e.g. claude-3-5-sonnet-latest, claude-3-opus-20240229, etc pub name: String, /// The model's name in Zed's UI, such as in the model selector dropdown menu in the assistant panel. pub display_name: Option, diff --git a/docs/src/assistant/configuration.md b/docs/src/assistant/configuration.md index b4d364dc20743ddcec339147ef88803df428ffc4..1081dc94714bfc9089cb1cdb2db9d55ae16011c8 100644 --- a/docs/src/assistant/configuration.md +++ b/docs/src/assistant/configuration.md @@ -49,8 +49,8 @@ You can add custom models to the Anthropic provider by adding the following to y "anthropic": { "available_models": [ { - "name": "some-model", - "display_name": "some-model", + "name": "claude-3-5-sonnet-20240620", + "display_name": "Sonnet 2024-June", "max_tokens": 128000, "max_output_tokens": 2560, "cache_configuration": { From e87d5e145d897083630c403272663a63dfc0ec8a Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 23 Oct 2024 22:19:06 +0300 Subject: [PATCH 05/76] Use zstd without dynamic linking due to musl usage (#19627) Due to leaning towards `musl` builds, unit features for `zstd` and link it statically too for Zed. https://github.com/gyscos/zstd-rs/blob/bfe1e34f593c2427e9dc35f1b98d06788174711f/zstd-safe/zstd-sys/build.rs#L260 shows that `ZSTD_SYS_USE_PKG_CONFIG` env var can be used to return this behavior. Release Notes: - N/A --- Cargo.lock | 1 - crates/zed/Cargo.toml | 2 -- 2 files changed, 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 845cdb227f9540e578578bf459ca80269b0a3231..207e34e49be4436891406f4cac9508bcec5ff80c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14709,7 +14709,6 @@ dependencies = [ "winresource", "workspace", "zed_actions", - "zstd", ] [[package]] diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index dda06ff7d2419bc118663764a865cba0d1723596..d80db98393d95745df8f5572ee878f3a455d8446 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -125,8 +125,6 @@ winresource = "0.1" [target.'cfg(target_os = "linux")'.dependencies] ashpd.workspace = true -# We don't use zstd in the zed crate, but we want to add this feature when compiling a desktop build of Zed -zstd = { workspace = true, features = [ "pkg-config" ] } [dev-dependencies] call = { workspace = true, features = ["test-support"] } From eee91f3f1b489390d6ae930738b14a627e93151f Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 23 Oct 2024 14:25:27 -0600 Subject: [PATCH 06/76] docs: Update SSH docs (#19339) Update of the SSH remoting docs Release Notes: - N/A --- assets/settings/default.json | 4 +- docs/src/remote-development.md | 117 ++++++++++++++------------------- 2 files changed, 52 insertions(+), 69 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index da2636f93bb0cd8f53be1d346760d016e316cbf0..8da7abe18fafa12511824ca3ec52586fb30c34b4 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1099,13 +1099,13 @@ // } "command_aliases": {}, // ssh_connections is an array of ssh connections. - // By default this setting is null, which disables the direct ssh connection support. // You can configure these from `project: Open Remote` in the command palette. // Zed's ssh support will pull configuration from your ~/.ssh too. // Examples: // [ // { // "host": "example-box", + // // "port": 22, "username": "test", "args": ["-i", "/home/user/.ssh/id_rsa"] // "projects": [ // { // "paths": ["/home/user/code/zed"] @@ -1113,7 +1113,7 @@ // ] // } // ] - "ssh_connections": null, + "ssh_connections": [], // Configures the Context Server Protocol binaries // // Examples: diff --git a/docs/src/remote-development.md b/docs/src/remote-development.md index bd0bb26e5e32e8fb304544352ea294740c8905de..aacccf62278fa2d09929107257faaf2c0ba1fdbe 100644 --- a/docs/src/remote-development.md +++ b/docs/src/remote-development.md @@ -2,109 +2,92 @@ Remote Development allows you to code at the speed of thought, even when your codebase is not on your local machine. You use Zed locally so the UI is immediately responsive, but offload heavy computation to the development server so that you can work effectively. -> **Note:** Remoting is still "alpha". We have several changes we would like to make before it is fully released. +> **Note:** Remoting is still "beta". We are still refining the reliability and performance. ## Overview -Remote development requires running two instances of Zed. A headless instance on the remote machine, and the editor interface on your local computer. All configuration is done on your local computer. +Remote development requires two computers, your local machine that runs the Zed UI and the remote server which runs a Zed headless server. The two communicate over SSH, so you will need to be able to SSH from your local machine into the remote server to use this feature. -Currently the two instances connect via Zed's servers, but we intend to build peer to peer communication before the feature is fully released. +> **Note:** The original version of remote development sent traffic via Zed's servers. As of Zed v0.157 you can no-longer use this mode. ## Setup -1. Download and install the latest [Zed Preview](https://zed.dev/releases/preview). +1. Download and install the latest [Zed Preview](https://zed.dev/releases/preview). You need at least Zed v0.159. 1. Open the remote projects dialogue with `cmd-shift-p remote`. -1. Click "New Server". -1. Choose whether to setup via SSH, or to follow the manual setup. - > **Note:** With both options your laptop and the remote machine will communicate - > via https://collab.zed.dev/, so you will need outbound internet access on the remote machine. -1. On your laptop you can now open folders on the remote machine. +1. Click "New Server" and enter the command you use to ssh into the server. See [Supported SSH options](#supported-ssh-options) for options you can pass. +1. Your local machine will attempt to connect to the remote server using the `ssh` binary on your path. Assuming the connection is successful, it will download the latest version of the Zed server and upload it to the remote over SSH. +1. Once the Zed server is running, you will be prompted to choose a path to open on the remote server. > **Note:** Zed does not currently handle opening very large directories (for example, `/` or `~` that may have >100,000 files) very well. We are working on improving this, but suggest in the meantime opening only specific projects, or subfolders of very large mono-repos. -## Troubleshooting +For simple cases where you don't need any SSH arguments, you can run `zed ssh://[@][:]/` to open a remote folder/file directly. -### UI is not showing up - -You need to be on a relatively recent Zed (v0.145.0 or later). - -### SSH connections - -If you chose to connect via SSH, the command you specify will be run in a Zed terminal given you an opportunity to type any passwords/keyphrases etc. that you need. -Once a connection is established, Zed will be downloaded and installed to `~/.local/bin/zed` on the remote machine, and run. - -If you don't see any output from the Zed command, it is likely that Zed is crashing -on startup. You can troubleshoot this by switching to manual mode and passing the `--foreground` flag. Please [file a bug](https://github.com/zed-industries/zed) so we can debug it together. +## Supported platforms -If you are trying to connect to a platform like GitHub Codespaces or Google Cloud, you may want to first make sure that your SSH configuration is set up correctly. Once you can `ssh X` to connect to the machine, then Zed will be able to connect. +The remote machine must be able to run Zed's server. The following platforms should work, though note that we have not exhaustively tested every Linux distribution: -> **Note:** In an earlier version of remoting, we supported typing in `gh cs ssh` or `gcloud compute ssh` directly. This is no longer supported. Instead you should make sure your SSH configuration is up to date with `gcloud compute ssh --config` or `gh cs ssh --config`, or use Manual setup mode if you cannot ssh directly to the machine. +- macOS Catalina or later (Intel or Apple Silicon) +- Linux (x86_64 or arm64, we do not yet support 32-bit platforms) +- Windows is not yet supported. -### zed --dev-server-token isn't connecting +## Settings -There are a few likely causes of failure: +When opening a remote project there are three relevant settings locations: -- `zed --dev-server-token` runs but outputs nothing. This is probably because the Zed background process is crashing on startup. Try running `zed --dev-server-token XX --foreground` to see any output, and [file a bug](https://github.com/zed-industries/zed) so we can debug it together. -- `zed --dev-server-token` outputs something like "Connection refused" or "Unauthorized" and immediately exits. This is likely due to issues making outbound HTTP requests to https://collab.zed.dev from your host. You can try to debug this with `curl https://collab.zed.dev`, but we have seen cases where curl is whitelisted, but other binaries are not allowed network access. -- `zed --dev-server-token` outputs "Zed is already running". If you are editing an existing server, it is possible that clicking "Connect" a second time will work, but if not you will have to manually log into the server and kill the Zed process. +- The local Zed settings (in `~/.zed/settings.json` on macOS or `~/.config/zed/settings.json` on Linux) on your local machine. +- The server Zed settings (in the same place) on the remote server. +- The project settings (in `.zed/settings.json` or `.editorconfig` of your project) -## Supported platforms +Both the local Zed and the server Zed read the project settings, but they are not aware of the other's main settings.json. -The remote machine must be able to run Zed. The following platforms should work, though note that we have not exhaustively tested every Linux distribution: +Depending on the kind of setting you want to make, which settings file you should use: -- macOS Catalina or later (Intel or Apple Silicon) -- Linux (x86_64 or arm64, we do not yet support 32-bit platforms). You must have `glibc` installed at version 2.29 (released in 2019) or greater and available globally. -- Windows is not yet supported. +- Project settings should be used for things that affect the project: indentation settings, which formatter / language server to use etc. +- Server settings should be used for things that affect the server: paths to language servers, etc. +- Local settings should be used for things that affect the UI: font size, etc. -## Settings and extensions +## Initializing the remote server -> **Note:** This may change as the alpha program continues. +Once you provide the SSH options, Zed shells out to `ssh` on your local machine to create a ControlMaster connection with the options you provide. - +Any prompts that SSH needs will be shown in the UI, so you can verify host keys, type key passwords, etc. -You can edit the settings file on the remote instance. To do so, add a new project to your server in the directory `~/.config/zed`. You can create a file called `settings.json` if it does not yet exist. +Once the master connection is established, Zed will check to see if the remote server binary is present in `~/.zed_server` on the remote, and that its version matches the current version of Zed that you're using. -Note that this is most useful for configuring language servers, as any UI related settings do not apply. +If it is not there or the version mismatches, Zed will try to download the latest version. By default, it will download from `https://zed.dev` directly, but if you set: `{"remote_server": {"download":false}}` in your local settings, it will download the binary to your local machine and then upload it to the remote server. -If you'd like to install language-server extensions, you can add them to the list of `auto_installed_extensions`. Again you don't need to do this to get syntax highlighting (which is handled by the local zed). +## Maintaining the SSH connection -```json -{ - "auto_install_extensions": { - "java": true - } -} -``` +Once the server is initialized. Zed will create new SSH connections (reusing the existing ControlMaster) to run the remote development server. -## Known Limitations +Each connection tries to run the development server in proxy mode. This mode will start the daemon if it is not running, and reconnect to it if it is. This way when your connection drops and is restarted, you can continue to work without interruption. -- You can't use the Terminal or Tasks if you choose "Manual Connection" -- You can't run `zed` in headless mode and in GUI mode at the same time on the same machine. -- You can't open files from the remote Terminal by typing the `zed` command. +In the case that reconnecting fails, the daemon will not be re-used. That said, unsaved changes are by default persisted locally, so that you do not lose work. You can always reconnect to the project at a later date and Zed will restore unsaved changes. -## Feedback +If you are struggling with connection issues, you should be able to see more information in the Zed log `cmd-shift-p Open Log`. If you are seeing things that are unexpected, please file a [GitHub issue](https://github.com/zed-industries/zed/issues/new) or reach out in the #remoting-feedback channel in the [Zed Discord](https://discord.gg/zed-community). -Please join the #remoting-feedback channel in the [Zed Discord](https://discord.gg/zed-community). +## Supported SSH Options -# Direct SSH Connections +Under the hood, Zed shells out to the `ssh` binary to connect to the remote server. We create one SSH control master per project, and use then use that to multiplex ssh connections for the Zed protocol itself, any terminals you open and tasks you run. We read settings from your ssh config file, but if you want to specify additional options to the ssh control master you can configure Zed to set them. -The current alpha release of Zed always connects via our servers. This was to get experience building the feature on top of our existing collaboration support. We plan to move to direct SSH connections for any machine that can be SSH'd into. +When typing in the "New Server" dialogue, you can use bash-style quoting to pass options containing a space. Once you have created a server it will be added to the `"ssh_connections": []` array in your settings file. You can edit the settings file directly to make changes to SSH connections. -We are working on a direct SSH connection feature, which you can try out if you'd like. +Supported options: -> **Note:** Direct SSH support does not support most features yet! You cannot use project search, language servers, or basically do anything except edit files... +- `-p` / `-l` - these are equivalent to passing the port and the username in the host string. +- `-L` / `-R` for port forwarding +- `-i` - to use a specific key file +- `-o` - to set custom options +- `-J` / `-w` - to proxy the SSH connection +- And also... `-4`, `-6`, `-A`, `-a`, `-C`, `-K`, `-k`, `-X`, `-x`, `-Y`, `-y`, `-B`, `-b`, `-c`, `-D`, `-I`, `-i`, `-J`, `-l`, `-m`, `-o`, `-P`, `-p`, `-w` -To try this out you can either from the command line run: +Note that we deliberately disallow some options (for example `-t` or `-T`) that Zed will set for you. -```sh -zed ssh://user@host:port/path/to/project -``` +## Known Limitations -Or you can (in your settings file) add: +- Zed extensions are not yet supported on remotes, so languages that need them for support do not work. +- You can't open files from the remote Terminal by typing the `zed` command. +- Zed does not yet support automatic port-forwarding. You can use `-R` and `-L` in your SSH arguments for now. -```json -"ssh_connections": [] -``` +## Feedback -And then from the command palette choose `projects: Open Remote` and configure an SSH connection from there. +Please join the #remoting-feedback channel in the [Zed Discord](https://discord.gg/zed-community). From 013d2d52fdb6d6cb7e832c9cce799c9cff82e4d3 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 24 Oct 2024 00:45:58 +0300 Subject: [PATCH 07/76] Update Rust crate anyhow to v1.0.91 (#19640) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [anyhow](https://redirect.github.com/dtolnay/anyhow) | workspace.dependencies | patch | `1.0.89` -> `1.0.91` | --- ### Release Notes
dtolnay/anyhow (anyhow) ### [`v1.0.91`](https://redirect.github.com/dtolnay/anyhow/releases/tag/1.0.91) [Compare Source](https://redirect.github.com/dtolnay/anyhow/compare/1.0.90...1.0.91) - Ensure OUT_DIR is left with deterministic contents after build script execution ([#​388](https://redirect.github.com/dtolnay/anyhow/issues/388)) ### [`v1.0.90`](https://redirect.github.com/dtolnay/anyhow/releases/tag/1.0.90) [Compare Source](https://redirect.github.com/dtolnay/anyhow/compare/1.0.89...1.0.90) - Documentation improvements
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 207e34e49be4436891406f4cac9508bcec5ff80c..271af64ff5faa240ffe9578b53e55c097702169e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -261,9 +261,9 @@ checksum = "34cd60c5e3152cef0a592f1b296f1cc93715d89d2551d85315828c3a09575ff4" [[package]] name = "anyhow" -version = "1.0.89" +version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" +checksum = "c042108f3ed77fd83760a5fd79b53be043192bb3b9dba91d8c574c0ada7850c8" [[package]] name = "approx" From 510c71d41b8850191be0b575ea2fc6e0a000f0a8 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 23 Oct 2024 19:00:28 -0400 Subject: [PATCH 08/76] Pin crate-ci/typos action to 8e6a428 (#19635) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [crate-ci/typos](https://redirect.github.com/crate-ci/typos) | action | pinDigest | -> `8e6a428` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/ci.yml | 2 +- .github/workflows/docs.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 059787aac765b2b18ef07f74129666f12898d284..6114895412518e09dfa1a2094adaf0c4b3f094d6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -84,7 +84,7 @@ jobs: uses: ./.github/actions/check_style - name: Check for typos - uses: crate-ci/typos@v1.24.6 + uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6 with: config: ./typos.toml diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 437e7f96a678f489a1a72c058b2c0639cf929af6..7aebf6adbcb4ac06776980193d79d370feba8624 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -31,7 +31,7 @@ jobs: } - name: Check for Typos with Typos-CLI - uses: crate-ci/typos@v1.24.6 + uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6 with: config: ./typos.toml files: ./docs/ From d30361537ea6432336e19cb624c5d4ee6f926f27 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 23 Oct 2024 21:26:50 -0400 Subject: [PATCH 09/76] assistant: Update `SlashCommand` trait with streaming return type (#19652) This PR updates the `SlashCommand` trait to use a streaming return type. This change is just at the trait layer. The goal here is to decouple changing the trait's API while preserving behavior on either side. The `SlashCommandOutput` type now has two methods for converting two and from a stream to use in cases where we're not yet doing streaming. On the `SlashCommand` implementer side, the implements can call `to_event_stream` to produce a stream of events based off the `SlashCommandOutput`. On the slash command consumer side we use `SlashCommandOutput::from_event_stream` to convert a stream of events back into a `SlashCommandOutput`. The `/file` slash command has been updated to emit `SlashCommandEvent`s directly in order for it to work properly. Release Notes: - N/A --------- Co-authored-by: Max --- Cargo.lock | 2 + crates/assistant/src/context.rs | 18 +- crates/assistant/src/context/context_tests.rs | 6 +- .../src/slash_command/auto_command.rs | 3 +- .../slash_command/cargo_workspace_command.rs | 3 +- .../slash_command/context_server_command.rs | 3 +- .../src/slash_command/default_command.rs | 3 +- .../src/slash_command/delta_command.rs | 31 +- .../src/slash_command/diagnostics_command.rs | 6 +- .../src/slash_command/docs_command.rs | 3 +- .../src/slash_command/fetch_command.rs | 3 +- .../src/slash_command/file_command.rs | 114 ++++-- .../src/slash_command/now_command.rs | 3 +- .../src/slash_command/project_command.rs | 3 +- .../src/slash_command/prompt_command.rs | 3 +- .../src/slash_command/search_command.rs | 1 + .../src/slash_command/symbols_command.rs | 3 +- .../src/slash_command/tab_command.rs | 2 +- .../src/slash_command/terminal_command.rs | 3 +- .../src/slash_command/workflow_command.rs | 3 +- crates/assistant_slash_command/Cargo.toml | 6 + .../src/assistant_slash_command.rs | 379 +++++++++++++++++- .../extension/src/extension_slash_command.rs | 3 +- 23 files changed, 516 insertions(+), 88 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 271af64ff5faa240ffe9578b53e55c097702169e..7c73ec0cff58e962b90030b5c4a8d5f0b6a800b3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -453,9 +453,11 @@ dependencies = [ "anyhow", "collections", "derive_more", + "futures 0.3.30", "gpui", "language", "parking_lot", + "pretty_assertions", "serde", "serde_json", "workspace", diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index d2b80ca22491a001b84772f60ee78d97f58f8aab..78237e51b216567dda9cf97d6ef75888e8a6925d 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -7,7 +7,7 @@ use crate::{ }; use anyhow::{anyhow, Context as _, Result}; use assistant_slash_command::{ - SlashCommandOutputSection, SlashCommandRegistry, SlashCommandResult, + SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry, SlashCommandResult, }; use assistant_tool::ToolRegistry; use client::{self, proto, telemetry::Telemetry}; @@ -1688,19 +1688,13 @@ impl Context { let command_range = command_range.clone(); async move { let output = output.await; + let output = match output { + Ok(output) => SlashCommandOutput::from_event_stream(output).await, + Err(err) => Err(err), + }; this.update(&mut cx, |this, cx| match output { Ok(mut output) => { - // Ensure section ranges are valid. - for section in &mut output.sections { - section.range.start = section.range.start.min(output.text.len()); - section.range.end = section.range.end.min(output.text.len()); - while !output.text.is_char_boundary(section.range.start) { - section.range.start -= 1; - } - while !output.text.is_char_boundary(section.range.end) { - section.range.end += 1; - } - } + output.ensure_valid_section_ranges(); // Ensure there is a newline after the last section. if ensure_trailing_newline { diff --git a/crates/assistant/src/context/context_tests.rs b/crates/assistant/src/context/context_tests.rs index 4d866b4d8b9bb5cf52bd1ee3d0fbd0afb36a0134..e1b74487386930c0fdeef152304a6cec8d055df6 100644 --- a/crates/assistant/src/context/context_tests.rs +++ b/crates/assistant/src/context/context_tests.rs @@ -1097,7 +1097,8 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std text: output_text, sections, run_commands_in_text: false, - })), + } + .to_event_stream())), true, false, cx, @@ -1421,6 +1422,7 @@ impl SlashCommand for FakeSlashCommand { text: format!("Executed fake command: {}", self.0), sections: vec![], run_commands_in_text: false, - })) + } + .to_event_stream())) } } diff --git a/crates/assistant/src/slash_command/auto_command.rs b/crates/assistant/src/slash_command/auto_command.rs index 352b5a3ac917ea7039cd736e99322a83e928c6e8..cc73f36ebf391c1e1062be3796c9850f8520c77a 100644 --- a/crates/assistant/src/slash_command/auto_command.rs +++ b/crates/assistant/src/slash_command/auto_command.rs @@ -147,7 +147,8 @@ impl SlashCommand for AutoCommand { text: prompt, sections: Vec::new(), run_commands_in_text: true, - }) + } + .to_event_stream()) }) } } diff --git a/crates/assistant/src/slash_command/cargo_workspace_command.rs b/crates/assistant/src/slash_command/cargo_workspace_command.rs index 04fa408717bf3dfe5041bfd99ceb87cc290095b7..968238d36e0f529a87040028509f1a4eebc08a21 100644 --- a/crates/assistant/src/slash_command/cargo_workspace_command.rs +++ b/crates/assistant/src/slash_command/cargo_workspace_command.rs @@ -147,7 +147,8 @@ impl SlashCommand for CargoWorkspaceSlashCommand { metadata: None, }], run_commands_in_text: false, - }) + } + .to_event_stream()) }) }); output.unwrap_or_else(|error| Task::ready(Err(error))) diff --git a/crates/assistant/src/slash_command/context_server_command.rs b/crates/assistant/src/slash_command/context_server_command.rs index b749f9e4cd9a5afbb403247305d304b4a79eb459..5b22e76bf87a2cd312ccffba70082ed9b55e818b 100644 --- a/crates/assistant/src/slash_command/context_server_command.rs +++ b/crates/assistant/src/slash_command/context_server_command.rs @@ -185,7 +185,8 @@ impl SlashCommand for ContextServerSlashCommand { }], text: prompt, run_commands_in_text: false, - }) + } + .to_event_stream()) }) } else { Task::ready(Err(anyhow!("Context server not found"))) diff --git a/crates/assistant/src/slash_command/default_command.rs b/crates/assistant/src/slash_command/default_command.rs index 2c956f8ca66d9b2ad574358ad2aefafe09ccc1a3..4d9c9e2ae425e6f575fe3408ee6705528d2f3077 100644 --- a/crates/assistant/src/slash_command/default_command.rs +++ b/crates/assistant/src/slash_command/default_command.rs @@ -78,7 +78,8 @@ impl SlashCommand for DefaultSlashCommand { }], text, run_commands_in_text: true, - }) + } + .to_event_stream()) }) } } diff --git a/crates/assistant/src/slash_command/delta_command.rs b/crates/assistant/src/slash_command/delta_command.rs index a17c5d739c68fa4e0bb70685932dc8c6173facdd..a37d33e2af5619c72925f416c9baafa0d2119d63 100644 --- a/crates/assistant/src/slash_command/delta_command.rs +++ b/crates/assistant/src/slash_command/delta_command.rs @@ -86,25 +86,28 @@ impl SlashCommand for DeltaSlashCommand { .zip(file_command_new_outputs) { if let Ok(new_output) = new_output { - if let Some(file_command_range) = new_output.sections.first() { - let new_text = &new_output.text[file_command_range.range.clone()]; - if old_text.chars().ne(new_text.chars()) { - output.sections.extend(new_output.sections.into_iter().map( - |section| SlashCommandOutputSection { - range: output.text.len() + section.range.start - ..output.text.len() + section.range.end, - icon: section.icon, - label: section.label, - metadata: section.metadata, - }, - )); - output.text.push_str(&new_output.text); + if let Ok(new_output) = SlashCommandOutput::from_event_stream(new_output).await + { + if let Some(file_command_range) = new_output.sections.first() { + let new_text = &new_output.text[file_command_range.range.clone()]; + if old_text.chars().ne(new_text.chars()) { + output.sections.extend(new_output.sections.into_iter().map( + |section| SlashCommandOutputSection { + range: output.text.len() + section.range.start + ..output.text.len() + section.range.end, + icon: section.icon, + label: section.label, + metadata: section.metadata, + }, + )); + output.text.push_str(&new_output.text); + } } } } } - Ok(output) + Ok(output.to_event_stream()) }) } } diff --git a/crates/assistant/src/slash_command/diagnostics_command.rs b/crates/assistant/src/slash_command/diagnostics_command.rs index 54be2219fff0bd1c5ab813c2b409325c7fa809a1..c7475445ce2538b1496e7d54f2e0869a56be5187 100644 --- a/crates/assistant/src/slash_command/diagnostics_command.rs +++ b/crates/assistant/src/slash_command/diagnostics_command.rs @@ -180,7 +180,11 @@ impl SlashCommand for DiagnosticsSlashCommand { let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx); - cx.spawn(move |_| async move { task.await?.ok_or_else(|| anyhow!("No diagnostics found")) }) + cx.spawn(move |_| async move { + task.await? + .map(|output| output.to_event_stream()) + .ok_or_else(|| anyhow!("No diagnostics found")) + }) } } diff --git a/crates/assistant/src/slash_command/docs_command.rs b/crates/assistant/src/slash_command/docs_command.rs index 92c3cd1977b9bc2ea6ecdcdb5894dfab9b40e2cd..b54f708e3201121d5bcd9aa0d0ff8387d7d331a8 100644 --- a/crates/assistant/src/slash_command/docs_command.rs +++ b/crates/assistant/src/slash_command/docs_command.rs @@ -356,7 +356,8 @@ impl SlashCommand for DocsSlashCommand { }) .collect(), run_commands_in_text: false, - }) + } + .to_event_stream()) }) } } diff --git a/crates/assistant/src/slash_command/fetch_command.rs b/crates/assistant/src/slash_command/fetch_command.rs index 9b61c547dbd6598f6390d024f20fb91888603381..4d38bb20a7baa7a1d8d605b1a6a34d34b7f601ca 100644 --- a/crates/assistant/src/slash_command/fetch_command.rs +++ b/crates/assistant/src/slash_command/fetch_command.rs @@ -167,7 +167,8 @@ impl SlashCommand for FetchSlashCommand { metadata: None, }], run_commands_in_text: false, - }) + } + .to_event_stream()) }) } } diff --git a/crates/assistant/src/slash_command/file_command.rs b/crates/assistant/src/slash_command/file_command.rs index 51d0b33ba26240789fbc1003785cae359040e0fa..0a1794cae197aaa50c2d3b169912cccf7b5f5a60 100644 --- a/crates/assistant/src/slash_command/file_command.rs +++ b/crates/assistant/src/slash_command/file_command.rs @@ -1,13 +1,15 @@ use anyhow::{anyhow, Context as _, Result}; use assistant_slash_command::{ - AfterCompletion, ArgumentCompletion, SlashCommand, SlashCommandOutput, - SlashCommandOutputSection, SlashCommandResult, + AfterCompletion, ArgumentCompletion, SlashCommand, SlashCommandContent, SlashCommandEvent, + SlashCommandOutput, SlashCommandOutputSection, SlashCommandResult, }; +use futures::channel::mpsc; use fuzzy::PathMatch; use gpui::{AppContext, Model, Task, View, WeakView}; use language::{BufferSnapshot, CodeLabel, HighlightId, LineEnding, LspAdapterDelegate}; use project::{PathMatchCandidateSet, Project}; use serde::{Deserialize, Serialize}; +use smol::stream::StreamExt; use std::{ fmt::Write, ops::{Range, RangeInclusive}, @@ -221,11 +223,11 @@ fn collect_files( .map(|worktree| worktree.read(cx).snapshot()) .collect::>(); + let (events_tx, events_rx) = mpsc::unbounded(); cx.spawn(|mut cx| async move { - let mut output = SlashCommandOutput::default(); for snapshot in snapshots { let worktree_id = snapshot.id(); - let mut directory_stack: Vec<(Arc, String, usize)> = Vec::new(); + let mut directory_stack: Vec> = Vec::new(); let mut folded_directory_names_stack = Vec::new(); let mut is_top_level_directory = true; @@ -241,17 +243,19 @@ fn collect_files( continue; } - while let Some((dir, _, _)) = directory_stack.last() { + while let Some(dir) = directory_stack.last() { if entry.path.starts_with(dir) { break; } - let (_, entry_name, start) = directory_stack.pop().unwrap(); - output.sections.push(build_entry_output_section( - start..output.text.len().saturating_sub(1), - Some(&PathBuf::from(entry_name)), - true, - None, - )); + directory_stack.pop().unwrap(); + events_tx + .unbounded_send(Ok(SlashCommandEvent::EndSection { metadata: None }))?; + events_tx.unbounded_send(Ok(SlashCommandEvent::Content( + SlashCommandContent::Text { + text: "\n".into(), + run_commands_in_text: false, + }, + )))?; } let filename = entry @@ -283,23 +287,46 @@ fn collect_files( continue; } let prefix_paths = folded_directory_names_stack.drain(..).as_slice().join("/"); - let entry_start = output.text.len(); if prefix_paths.is_empty() { - if is_top_level_directory { - output - .text - .push_str(&path_including_worktree_name.to_string_lossy()); + let label = if is_top_level_directory { is_top_level_directory = false; + path_including_worktree_name.to_string_lossy().to_string() } else { - output.text.push_str(&filename); - } - directory_stack.push((entry.path.clone(), filename, entry_start)); + filename + }; + events_tx.unbounded_send(Ok(SlashCommandEvent::StartSection { + icon: IconName::Folder, + label: label.clone().into(), + metadata: None, + }))?; + events_tx.unbounded_send(Ok(SlashCommandEvent::Content( + SlashCommandContent::Text { + text: label, + run_commands_in_text: false, + }, + )))?; + directory_stack.push(entry.path.clone()); } else { let entry_name = format!("{}/{}", prefix_paths, &filename); - output.text.push_str(&entry_name); - directory_stack.push((entry.path.clone(), entry_name, entry_start)); + events_tx.unbounded_send(Ok(SlashCommandEvent::StartSection { + icon: IconName::Folder, + label: entry_name.clone().into(), + metadata: None, + }))?; + events_tx.unbounded_send(Ok(SlashCommandEvent::Content( + SlashCommandContent::Text { + text: entry_name, + run_commands_in_text: false, + }, + )))?; + directory_stack.push(entry.path.clone()); } - output.text.push('\n'); + events_tx.unbounded_send(Ok(SlashCommandEvent::Content( + SlashCommandContent::Text { + text: "\n".into(), + run_commands_in_text: false, + }, + )))?; } else if entry.is_file() { let Some(open_buffer_task) = project_handle .update(&mut cx, |project, cx| { @@ -310,6 +337,7 @@ fn collect_files( continue; }; if let Some(buffer) = open_buffer_task.await.log_err() { + let mut output = SlashCommandOutput::default(); let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?; append_buffer_to_output( &snapshot, @@ -317,32 +345,19 @@ fn collect_files( &mut output, ) .log_err(); + let mut buffer_events = output.to_event_stream(); + while let Some(event) = buffer_events.next().await { + events_tx.unbounded_send(event)?; + } } } } - while let Some((dir, entry, start)) = directory_stack.pop() { - if directory_stack.is_empty() { - let mut root_path = PathBuf::new(); - root_path.push(snapshot.root_name()); - root_path.push(&dir); - output.sections.push(build_entry_output_section( - start..output.text.len(), - Some(&root_path), - true, - None, - )); - } else { - output.sections.push(build_entry_output_section( - start..output.text.len(), - Some(&PathBuf::from(entry.as_str())), - true, - None, - )); - } + while let Some(_) = directory_stack.pop() { + events_tx.unbounded_send(Ok(SlashCommandEvent::EndSection { metadata: None }))?; } } - Ok(output) + Ok(events_rx.boxed()) }) } @@ -528,8 +543,10 @@ pub fn append_buffer_to_output( #[cfg(test)] mod test { + use assistant_slash_command::SlashCommandOutput; use fs::FakeFs; use gpui::TestAppContext; + use pretty_assertions::assert_eq; use project::Project; use serde_json::json; use settings::SettingsStore; @@ -577,6 +594,9 @@ mod test { .update(|cx| collect_files(project.clone(), &["root/dir".to_string()], cx)) .await .unwrap(); + let result_1 = SlashCommandOutput::from_event_stream(result_1) + .await + .unwrap(); assert!(result_1.text.starts_with("root/dir")); // 4 files + 2 directories @@ -586,6 +606,9 @@ mod test { .update(|cx| collect_files(project.clone(), &["root/dir/".to_string()], cx)) .await .unwrap(); + let result_2 = SlashCommandOutput::from_event_stream(result_2) + .await + .unwrap(); assert_eq!(result_1, result_2); @@ -593,6 +616,7 @@ mod test { .update(|cx| collect_files(project.clone(), &["root/dir*".to_string()], cx)) .await .unwrap(); + let result = SlashCommandOutput::from_event_stream(result).await.unwrap(); assert!(result.text.starts_with("root/dir")); // 5 files + 2 directories @@ -639,6 +663,7 @@ mod test { .update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx)) .await .unwrap(); + let result = SlashCommandOutput::from_event_stream(result).await.unwrap(); // Sanity check assert!(result.text.starts_with("zed/assets/themes\n")); @@ -700,6 +725,7 @@ mod test { .update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx)) .await .unwrap(); + let result = SlashCommandOutput::from_event_stream(result).await.unwrap(); assert!(result.text.starts_with("zed/assets/themes\n")); assert_eq!(result.sections[0].label, "zed/assets/themes/LICENSE"); @@ -720,6 +746,8 @@ mod test { assert_eq!(result.sections[6].label, "summercamp"); assert_eq!(result.sections[7].label, "zed/assets/themes"); + assert_eq!(result.text, "zed/assets/themes\n```zed/assets/themes/LICENSE\n1\n```\n\nsummercamp\n```zed/assets/themes/summercamp/LICENSE\n1\n```\n\nsubdir\n```zed/assets/themes/summercamp/subdir/LICENSE\n1\n```\n\nsubsubdir\n```zed/assets/themes/summercamp/subdir/subsubdir/LICENSE\n3\n```\n\n"); + // Ensure that the project lasts until after the last await drop(project); } diff --git a/crates/assistant/src/slash_command/now_command.rs b/crates/assistant/src/slash_command/now_command.rs index 40bc29f27ddffee49eed1ea2663d1f561db588dc..cf81bec9265bb1351bafe30a048f49160f4d1e96 100644 --- a/crates/assistant/src/slash_command/now_command.rs +++ b/crates/assistant/src/slash_command/now_command.rs @@ -63,6 +63,7 @@ impl SlashCommand for NowSlashCommand { metadata: None, }], run_commands_in_text: false, - })) + } + .to_event_stream())) } } diff --git a/crates/assistant/src/slash_command/project_command.rs b/crates/assistant/src/slash_command/project_command.rs index e55699b026826a85dddf49019a31ad74e54a9eff..d14cb310ad10df85e0a2d60af6cfc8c4793babff 100644 --- a/crates/assistant/src/slash_command/project_command.rs +++ b/crates/assistant/src/slash_command/project_command.rs @@ -162,7 +162,8 @@ impl SlashCommand for ProjectSlashCommand { text: output, sections, run_commands_in_text: true, - }) + } + .to_event_stream()) }) .await }) diff --git a/crates/assistant/src/slash_command/prompt_command.rs b/crates/assistant/src/slash_command/prompt_command.rs index dc803293823fb3974006a243ee4152499289a56a..079d1425af0984db7f0158697b98ae519c297085 100644 --- a/crates/assistant/src/slash_command/prompt_command.rs +++ b/crates/assistant/src/slash_command/prompt_command.rs @@ -102,7 +102,8 @@ impl SlashCommand for PromptSlashCommand { metadata: None, }], run_commands_in_text: true, - }) + } + .to_event_stream()) }) } } diff --git a/crates/assistant/src/slash_command/search_command.rs b/crates/assistant/src/slash_command/search_command.rs index 999fe252becc74875fbdc944f1f10a819da3abbf..9c4938ce9342bb47c4e151de698e534ccd5d6db0 100644 --- a/crates/assistant/src/slash_command/search_command.rs +++ b/crates/assistant/src/slash_command/search_command.rs @@ -130,6 +130,7 @@ impl SlashCommand for SearchSlashCommand { sections, run_commands_in_text: false, } + .to_event_stream() }) .await; diff --git a/crates/assistant/src/slash_command/symbols_command.rs b/crates/assistant/src/slash_command/symbols_command.rs index d28b53c1a13895610558468c09bbab0eed3f5173..468c8d7126437c73b0d88f2670fb92d528b231b6 100644 --- a/crates/assistant/src/slash_command/symbols_command.rs +++ b/crates/assistant/src/slash_command/symbols_command.rs @@ -85,7 +85,8 @@ impl SlashCommand for OutlineSlashCommand { }], text: outline_text, run_commands_in_text: false, - }) + } + .to_event_stream()) }) }); diff --git a/crates/assistant/src/slash_command/tab_command.rs b/crates/assistant/src/slash_command/tab_command.rs index 23c3b64b38505cd63455446704d9e2d14e4c2dcf..771c0765eea7f99f335b12b69cbbaafb1a10f141 100644 --- a/crates/assistant/src/slash_command/tab_command.rs +++ b/crates/assistant/src/slash_command/tab_command.rs @@ -150,7 +150,7 @@ impl SlashCommand for TabSlashCommand { for (full_path, buffer, _) in tab_items_search.await? { append_buffer_to_output(&buffer, full_path.as_deref(), &mut output).log_err(); } - Ok(output) + Ok(output.to_event_stream()) }) } } diff --git a/crates/assistant/src/slash_command/terminal_command.rs b/crates/assistant/src/slash_command/terminal_command.rs index 7516b275ac8b819b22614ca6cc4cbb31e44c2440..2ca1d4041b87208f9d5015d2fdc5e4b27196144c 100644 --- a/crates/assistant/src/slash_command/terminal_command.rs +++ b/crates/assistant/src/slash_command/terminal_command.rs @@ -97,7 +97,8 @@ impl SlashCommand for TerminalSlashCommand { metadata: None, }], run_commands_in_text: false, - })) + } + .to_event_stream())) } } diff --git a/crates/assistant/src/slash_command/workflow_command.rs b/crates/assistant/src/slash_command/workflow_command.rs index 1379eb5e803a633bcaf63093bdee6cd5b35751c6..ca6ccde92ee0c701f5be347b43b983926fe04958 100644 --- a/crates/assistant/src/slash_command/workflow_command.rs +++ b/crates/assistant/src/slash_command/workflow_command.rs @@ -75,7 +75,8 @@ impl SlashCommand for WorkflowSlashCommand { metadata: None, }], run_commands_in_text: false, - }) + } + .to_event_stream()) }) } } diff --git a/crates/assistant_slash_command/Cargo.toml b/crates/assistant_slash_command/Cargo.toml index a58a84312fc3e25b4dac166d5a75fe625a96527a..8ec5b729c936036ab98e6f7e9a54e8f2824f3930 100644 --- a/crates/assistant_slash_command/Cargo.toml +++ b/crates/assistant_slash_command/Cargo.toml @@ -15,9 +15,15 @@ path = "src/assistant_slash_command.rs" anyhow.workspace = true collections.workspace = true derive_more.workspace = true +futures.workspace = true gpui.workspace = true language.workspace = true parking_lot.workspace = true serde.workspace = true serde_json.workspace = true workspace.workspace = true + +[dev-dependencies] +gpui = { workspace = true, features = ["test-support"] } +pretty_assertions.workspace = true +workspace = { workspace = true, features = ["test-support"] } diff --git a/crates/assistant_slash_command/src/assistant_slash_command.rs b/crates/assistant_slash_command/src/assistant_slash_command.rs index 90e47690a83d930ce67a79861035784fee70c5ef..de247602d8934276229d667f64a3223f72a51a92 100644 --- a/crates/assistant_slash_command/src/assistant_slash_command.rs +++ b/crates/assistant_slash_command/src/assistant_slash_command.rs @@ -1,6 +1,8 @@ mod slash_command_registry; use anyhow::Result; +use futures::stream::{self, BoxStream}; +use futures::StreamExt; use gpui::{AnyElement, AppContext, ElementId, SharedString, Task, WeakView, WindowContext}; use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate, OffsetRangeExt}; use serde::{Deserialize, Serialize}; @@ -56,7 +58,7 @@ pub struct ArgumentCompletion { pub replace_previous_arguments: bool, } -pub type SlashCommandResult = Result; +pub type SlashCommandResult = Result>>; pub trait SlashCommand: 'static + Send + Sync { fn name(&self) -> String; @@ -98,13 +100,146 @@ pub type RenderFoldPlaceholder = Arc< + Fn(ElementId, Arc, &mut WindowContext) -> AnyElement, >; -#[derive(Debug, Default, PartialEq)] +#[derive(Debug, PartialEq, Eq)] +pub enum SlashCommandContent { + Text { + text: String, + run_commands_in_text: bool, + }, +} + +#[derive(Debug, PartialEq, Eq)] +pub enum SlashCommandEvent { + StartSection { + icon: IconName, + label: SharedString, + metadata: Option, + }, + Content(SlashCommandContent), + EndSection { + metadata: Option, + }, +} + +#[derive(Debug, Default, PartialEq, Clone)] pub struct SlashCommandOutput { pub text: String, pub sections: Vec>, pub run_commands_in_text: bool, } +impl SlashCommandOutput { + pub fn ensure_valid_section_ranges(&mut self) { + for section in &mut self.sections { + section.range.start = section.range.start.min(self.text.len()); + section.range.end = section.range.end.min(self.text.len()); + while !self.text.is_char_boundary(section.range.start) { + section.range.start -= 1; + } + while !self.text.is_char_boundary(section.range.end) { + section.range.end += 1; + } + } + } + + /// Returns this [`SlashCommandOutput`] as a stream of [`SlashCommandEvent`]s. + pub fn to_event_stream(mut self) -> BoxStream<'static, Result> { + self.ensure_valid_section_ranges(); + + let mut events = Vec::new(); + let mut last_section_end = 0; + + for section in self.sections { + if last_section_end < section.range.start { + events.push(Ok(SlashCommandEvent::Content(SlashCommandContent::Text { + text: self + .text + .get(last_section_end..section.range.start) + .unwrap_or_default() + .to_string(), + run_commands_in_text: self.run_commands_in_text, + }))); + } + + events.push(Ok(SlashCommandEvent::StartSection { + icon: section.icon, + label: section.label, + metadata: section.metadata.clone(), + })); + events.push(Ok(SlashCommandEvent::Content(SlashCommandContent::Text { + text: self + .text + .get(section.range.start..section.range.end) + .unwrap_or_default() + .to_string(), + run_commands_in_text: self.run_commands_in_text, + }))); + events.push(Ok(SlashCommandEvent::EndSection { + metadata: section.metadata, + })); + + last_section_end = section.range.end; + } + + if last_section_end < self.text.len() { + events.push(Ok(SlashCommandEvent::Content(SlashCommandContent::Text { + text: self.text[last_section_end..].to_string(), + run_commands_in_text: self.run_commands_in_text, + }))); + } + + stream::iter(events).boxed() + } + + pub async fn from_event_stream( + mut events: BoxStream<'static, Result>, + ) -> Result { + let mut output = SlashCommandOutput::default(); + let mut section_stack = Vec::new(); + + while let Some(event) = events.next().await { + match event? { + SlashCommandEvent::StartSection { + icon, + label, + metadata, + } => { + let start = output.text.len(); + section_stack.push(SlashCommandOutputSection { + range: start..start, + icon, + label, + metadata, + }); + } + SlashCommandEvent::Content(SlashCommandContent::Text { + text, + run_commands_in_text, + }) => { + output.text.push_str(&text); + output.run_commands_in_text = run_commands_in_text; + + if let Some(section) = section_stack.last_mut() { + section.range.end = output.text.len(); + } + } + SlashCommandEvent::EndSection { metadata } => { + if let Some(mut section) = section_stack.pop() { + section.metadata = metadata; + output.sections.push(section); + } + } + } + } + + while let Some(section) = section_stack.pop() { + output.sections.push(section); + } + + Ok(output) + } +} + #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct SlashCommandOutputSection { pub range: Range, @@ -118,3 +253,243 @@ impl SlashCommandOutputSection { self.range.start.is_valid(buffer) && !self.range.to_offset(buffer).is_empty() } } + +#[cfg(test)] +mod tests { + use pretty_assertions::assert_eq; + use serde_json::json; + + use super::*; + + #[gpui::test] + async fn test_slash_command_output_to_events_round_trip() { + // Test basic output consisting of a single section. + { + let text = "Hello, world!".to_string(); + let range = 0..text.len(); + let output = SlashCommandOutput { + text, + sections: vec![SlashCommandOutputSection { + range, + icon: IconName::Code, + label: "Section 1".into(), + metadata: None, + }], + run_commands_in_text: false, + }; + + let events = output.clone().to_event_stream().collect::>().await; + let events = events + .into_iter() + .filter_map(|event| event.ok()) + .collect::>(); + + assert_eq!( + events, + vec![ + SlashCommandEvent::StartSection { + icon: IconName::Code, + label: "Section 1".into(), + metadata: None + }, + SlashCommandEvent::Content(SlashCommandContent::Text { + text: "Hello, world!".into(), + run_commands_in_text: false + }), + SlashCommandEvent::EndSection { metadata: None } + ] + ); + + let new_output = + SlashCommandOutput::from_event_stream(output.clone().to_event_stream()) + .await + .unwrap(); + + assert_eq!(new_output, output); + } + + // Test output where the sections do not comprise all of the text. + { + let text = "Apple\nCucumber\nBanana\n".to_string(); + let output = SlashCommandOutput { + text, + sections: vec![ + SlashCommandOutputSection { + range: 0..6, + icon: IconName::Check, + label: "Fruit".into(), + metadata: None, + }, + SlashCommandOutputSection { + range: 15..22, + icon: IconName::Check, + label: "Fruit".into(), + metadata: None, + }, + ], + run_commands_in_text: false, + }; + + let events = output.clone().to_event_stream().collect::>().await; + let events = events + .into_iter() + .filter_map(|event| event.ok()) + .collect::>(); + + assert_eq!( + events, + vec![ + SlashCommandEvent::StartSection { + icon: IconName::Check, + label: "Fruit".into(), + metadata: None + }, + SlashCommandEvent::Content(SlashCommandContent::Text { + text: "Apple\n".into(), + run_commands_in_text: false + }), + SlashCommandEvent::EndSection { metadata: None }, + SlashCommandEvent::Content(SlashCommandContent::Text { + text: "Cucumber\n".into(), + run_commands_in_text: false + }), + SlashCommandEvent::StartSection { + icon: IconName::Check, + label: "Fruit".into(), + metadata: None + }, + SlashCommandEvent::Content(SlashCommandContent::Text { + text: "Banana\n".into(), + run_commands_in_text: false + }), + SlashCommandEvent::EndSection { metadata: None } + ] + ); + + let new_output = + SlashCommandOutput::from_event_stream(output.clone().to_event_stream()) + .await + .unwrap(); + + assert_eq!(new_output, output); + } + + // Test output consisting of multiple sections. + { + let text = "Line 1\nLine 2\nLine 3\nLine 4\n".to_string(); + let output = SlashCommandOutput { + text, + sections: vec![ + SlashCommandOutputSection { + range: 0..6, + icon: IconName::FileCode, + label: "Section 1".into(), + metadata: Some(json!({ "a": true })), + }, + SlashCommandOutputSection { + range: 7..13, + icon: IconName::FileDoc, + label: "Section 2".into(), + metadata: Some(json!({ "b": true })), + }, + SlashCommandOutputSection { + range: 14..20, + icon: IconName::FileGit, + label: "Section 3".into(), + metadata: Some(json!({ "c": true })), + }, + SlashCommandOutputSection { + range: 21..27, + icon: IconName::FileToml, + label: "Section 4".into(), + metadata: Some(json!({ "d": true })), + }, + ], + run_commands_in_text: false, + }; + + let events = output.clone().to_event_stream().collect::>().await; + let events = events + .into_iter() + .filter_map(|event| event.ok()) + .collect::>(); + + assert_eq!( + events, + vec![ + SlashCommandEvent::StartSection { + icon: IconName::FileCode, + label: "Section 1".into(), + metadata: Some(json!({ "a": true })) + }, + SlashCommandEvent::Content(SlashCommandContent::Text { + text: "Line 1".into(), + run_commands_in_text: false + }), + SlashCommandEvent::EndSection { + metadata: Some(json!({ "a": true })) + }, + SlashCommandEvent::Content(SlashCommandContent::Text { + text: "\n".into(), + run_commands_in_text: false + }), + SlashCommandEvent::StartSection { + icon: IconName::FileDoc, + label: "Section 2".into(), + metadata: Some(json!({ "b": true })) + }, + SlashCommandEvent::Content(SlashCommandContent::Text { + text: "Line 2".into(), + run_commands_in_text: false + }), + SlashCommandEvent::EndSection { + metadata: Some(json!({ "b": true })) + }, + SlashCommandEvent::Content(SlashCommandContent::Text { + text: "\n".into(), + run_commands_in_text: false + }), + SlashCommandEvent::StartSection { + icon: IconName::FileGit, + label: "Section 3".into(), + metadata: Some(json!({ "c": true })) + }, + SlashCommandEvent::Content(SlashCommandContent::Text { + text: "Line 3".into(), + run_commands_in_text: false + }), + SlashCommandEvent::EndSection { + metadata: Some(json!({ "c": true })) + }, + SlashCommandEvent::Content(SlashCommandContent::Text { + text: "\n".into(), + run_commands_in_text: false + }), + SlashCommandEvent::StartSection { + icon: IconName::FileToml, + label: "Section 4".into(), + metadata: Some(json!({ "d": true })) + }, + SlashCommandEvent::Content(SlashCommandContent::Text { + text: "Line 4".into(), + run_commands_in_text: false + }), + SlashCommandEvent::EndSection { + metadata: Some(json!({ "d": true })) + }, + SlashCommandEvent::Content(SlashCommandContent::Text { + text: "\n".into(), + run_commands_in_text: false + }), + ] + ); + + let new_output = + SlashCommandOutput::from_event_stream(output.clone().to_event_stream()) + .await + .unwrap(); + + assert_eq!(new_output, output); + } + } +} diff --git a/crates/extension/src/extension_slash_command.rs b/crates/extension/src/extension_slash_command.rs index e9725f1ae423c4805d678d84a34841da01db0dcb..0a10e9e1a25fe4e0160878c9885da5051c086950 100644 --- a/crates/extension/src/extension_slash_command.rs +++ b/crates/extension/src/extension_slash_command.rs @@ -128,7 +128,8 @@ impl SlashCommand for ExtensionSlashCommand { }) .collect(), run_commands_in_text: false, - }) + } + .to_event_stream()) }) } } From 0ffc92ab6524000ead73b75ff5a21fcbeab37e05 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 23 Oct 2024 21:57:06 -0400 Subject: [PATCH 10/76] Update actions/checkout digest to 11bd719 (#19636) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [actions/checkout](https://redirect.github.com/actions/checkout) | action | digest | `eef6144` -> `11bd719` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/bump_collab_staging.yml | 2 +- .github/workflows/bump_patch_version.yml | 2 +- .github/workflows/ci.yml | 18 +++++++++--------- ...community_update_all_top_ranking_issues.yml | 2 +- ...munity_update_weekly_top_ranking_issues.yml | 2 +- .github/workflows/danger.yml | 2 +- .github/workflows/deploy_cloudflare.yml | 2 +- .github/workflows/deploy_collab.yml | 8 ++++---- .github/workflows/docs.yml | 2 +- .github/workflows/publish_extension_cli.yml | 2 +- .github/workflows/randomized_tests.yml | 2 +- .github/workflows/release_nightly.yml | 12 ++++++------ 12 files changed, 28 insertions(+), 28 deletions(-) diff --git a/.github/workflows/bump_collab_staging.yml b/.github/workflows/bump_collab_staging.yml index d0da0a94053b3318c5d7d81cf5ffbd9b716e4c3b..d8eaa6019ec29b5dd908564d05f430d3e7f01909 100644 --- a/.github/workflows/bump_collab_staging.yml +++ b/.github/workflows/bump_collab_staging.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: fetch-depth: 0 diff --git a/.github/workflows/bump_patch_version.yml b/.github/workflows/bump_patch_version.yml index dbd0b2b3e1f6e12626a78bcc09b66f3b81b480fc..e3468274a1e13a9c8a38efeef13df32f14429d98 100644 --- a/.github/workflows/bump_patch_version.yml +++ b/.github/workflows/bump_patch_version.yml @@ -18,7 +18,7 @@ jobs: - buildjet-16vcpu-ubuntu-2204 steps: - name: Checkout code - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: ref: ${{ github.event.inputs.branch }} ssh-key: ${{ secrets.ZED_BOT_DEPLOY_KEY }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6114895412518e09dfa1a2094adaf0c4b3f094d6..3843a3343b4a79a99c06f5fdeb83039bca9823d4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -36,7 +36,7 @@ jobs: - test steps: - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: clean: false fetch-depth: 0 # fetch full history @@ -78,7 +78,7 @@ jobs: - buildjet-8vcpu-ubuntu-2204 steps: - name: Checkout repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Run style checks uses: ./.github/actions/check_style @@ -96,7 +96,7 @@ jobs: - test steps: - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: clean: false @@ -133,7 +133,7 @@ jobs: run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: clean: false @@ -165,7 +165,7 @@ jobs: run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: clean: false @@ -188,7 +188,7 @@ jobs: runs-on: hosted-windows-1 steps: - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: clean: false @@ -229,7 +229,7 @@ jobs: node-version: "18" - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: # We need to fetch more than one commit so that `script/draft-release-notes` # is able to diff between the current and previous tag. @@ -314,7 +314,7 @@ jobs: ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} steps: - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: clean: false @@ -361,7 +361,7 @@ jobs: ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} steps: - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: clean: false diff --git a/.github/workflows/community_update_all_top_ranking_issues.yml b/.github/workflows/community_update_all_top_ranking_issues.yml index ecc7355511ccdc5a6b356892788181d4f261c722..8fb779fa6715944bb21b4aa3ae61c4d892dfcd34 100644 --- a/.github/workflows/community_update_all_top_ranking_issues.yml +++ b/.github/workflows/community_update_all_top_ranking_issues.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest if: github.repository_owner == 'zed-industries' steps: - - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Set up uv uses: astral-sh/setup-uv@f3bcaebff5eace81a1c062af9f9011aae482ca9d # v3 with: diff --git a/.github/workflows/community_update_weekly_top_ranking_issues.yml b/.github/workflows/community_update_weekly_top_ranking_issues.yml index aab45740cdf7a87b151464364067c51334a7fe3d..daf90b70495479dc89a58e96f9ddda85918c85de 100644 --- a/.github/workflows/community_update_weekly_top_ranking_issues.yml +++ b/.github/workflows/community_update_weekly_top_ranking_issues.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest if: github.repository_owner == 'zed-industries' steps: - - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Set up uv uses: astral-sh/setup-uv@f3bcaebff5eace81a1c062af9f9011aae482ca9d # v3 with: diff --git a/.github/workflows/danger.yml b/.github/workflows/danger.yml index 2828cb42eab51f43b4d13a4e62c9ef91ad7c9391..132af3bda22b6908a8e56fb602198227529d391d 100644 --- a/.github/workflows/danger.yml +++ b/.github/workflows/danger.yml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0 with: diff --git a/.github/workflows/deploy_cloudflare.yml b/.github/workflows/deploy_cloudflare.yml index 1c9e7bc5b08bba6e018b10533570a214979eda49..e948eb64c382c4864480cdeef40b729e2e59b50e 100644 --- a/.github/workflows/deploy_cloudflare.yml +++ b/.github/workflows/deploy_cloudflare.yml @@ -13,7 +13,7 @@ jobs: steps: - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: clean: false diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index 0eaf1697df2f62e5c458b9e93a3fc6061f1a340c..664a702cee0b019f579cd6d03f30db100ac1cbf7 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -17,7 +17,7 @@ jobs: - test steps: - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: clean: false fetch-depth: 0 @@ -36,7 +36,7 @@ jobs: needs: style steps: - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: clean: false fetch-depth: 0 @@ -71,7 +71,7 @@ jobs: run: doctl registry login - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: clean: false @@ -97,7 +97,7 @@ jobs: steps: - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: clean: false diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 7aebf6adbcb4ac06776980193d79d370feba8624..0870a55e60057cb8440c7d911ab78aef18c04045 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0 with: diff --git a/.github/workflows/publish_extension_cli.yml b/.github/workflows/publish_extension_cli.yml index dc73bd6931704a6b93ca67df0c552c635a724c3f..03ac8a8cc7e4d181db2ce1f7570dc9b649ddd2aa 100644 --- a/.github/workflows/publish_extension_cli.yml +++ b/.github/workflows/publish_extension_cli.yml @@ -16,7 +16,7 @@ jobs: - ubuntu-latest steps: - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: clean: false diff --git a/.github/workflows/randomized_tests.yml b/.github/workflows/randomized_tests.yml index af604e6abbfd7cdb11e30f14c009e5da513e535f..dd7163dc5e9cc439ccf7e53d31ab15f88b513f77 100644 --- a/.github/workflows/randomized_tests.yml +++ b/.github/workflows/randomized_tests.yml @@ -27,7 +27,7 @@ jobs: node-version: "18" - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: clean: false diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index 9d5581ebdcf220cf3221f1d4b2cd5478c3884cde..534855cd21357fd08b8aa24fed7427c97dcec839 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -23,7 +23,7 @@ jobs: - test steps: - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: clean: false fetch-depth: 0 @@ -44,7 +44,7 @@ jobs: needs: style steps: - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: clean: false @@ -75,7 +75,7 @@ jobs: node-version: "18" - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: clean: false @@ -109,7 +109,7 @@ jobs: ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} steps: - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: clean: false @@ -149,7 +149,7 @@ jobs: ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} steps: - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: clean: false @@ -182,7 +182,7 @@ jobs: - bundle-linux-arm steps: - name: Checkout repo - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: fetch-depth: 0 From 1dba50f42fbd6fe1e051150e168e36e0c7716443 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 24 Oct 2024 11:37:32 +0200 Subject: [PATCH 11/76] ssh remoting: Fix version check (#19668) This snuck in when Bennet and I were debugging why our connection to the SSH host would break. We suspected that somewhere something was logging to STDOUT and, I guess, we changed all `println!` to `eprintln!`. Now, two weeks later, I'm sitting here, wondering why the version check doesn't work anymore. The server always reports a version of `""`. Turns out we take the command's STDOUT and not STDERR, which is correct. But it also turns out we started to print the version to STDERR, which breaks the version check. One-character bug & one-character fix. Release Notes: - N/A --- crates/remote_server/src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/remote_server/src/main.rs b/crates/remote_server/src/main.rs index 10b656cbdb1ce30837eb8a66fd50269c947d293d..72ac438e603f9ef9bd0ac5d45c69075d5f47fe61 100644 --- a/crates/remote_server/src/main.rs +++ b/crates/remote_server/src/main.rs @@ -72,7 +72,7 @@ fn main() { } }, Some(Commands::Version) => { - eprintln!("{}", env!("ZED_PKG_VERSION")); + println!("{}", env!("ZED_PKG_VERSION")); std::process::exit(0); } None => { From e040b200bc0836f5d93afa8aef3bf47092ebf3f9 Mon Sep 17 00:00:00 2001 From: Zhang <17492978+zhang0098@users.noreply.github.com> Date: Thu, 24 Oct 2024 18:15:42 +0800 Subject: [PATCH 12/76] project_panel: Make up/down in file rename editor not select items (#19670) Closes #19017 Release Notes: - Fixed project panel bug when renaming files where up/down keys could select other files. --- crates/editor/src/actions.rs | 2 +- crates/project_panel/src/project_panel.rs | 26 +++++++++++++++++++++++ 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index 4955f00c38233421ba1f593d5b0312af448d813c..5f866f9997297e71cfedac4cab6c44dcd47f68b7 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -18,7 +18,7 @@ pub struct SelectPrevious { #[derive(PartialEq, Clone, Deserialize, Default)] pub struct MoveToBeginningOfLine { #[serde(default = "default_true")] - pub(super) stop_at_soft_wraps: bool, + pub stop_at_soft_wraps: bool, } #[derive(PartialEq, Clone, Deserialize, Default)] diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 491480936976e69910143a6528d8b5f8b161e0ed..bbd1664b9d1e6af31f245ee15c6ca188b23ea4a4 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -726,6 +726,19 @@ impl ProjectPanel { } fn select_prev(&mut self, _: &SelectPrev, cx: &mut ViewContext) { + if let Some(edit_state) = &self.edit_state { + if edit_state.processing_filename.is_none() { + self.filename_editor.update(cx, |editor, cx| { + editor.move_to_beginning_of_line( + &editor::actions::MoveToBeginningOfLine { + stop_at_soft_wraps: false, + }, + cx, + ); + }); + return; + } + } if let Some(selection) = self.selection { let (mut worktree_ix, mut entry_ix, _) = self.index_for_selection(selection).unwrap_or_default(); @@ -1196,6 +1209,19 @@ impl ProjectPanel { } fn select_next(&mut self, _: &SelectNext, cx: &mut ViewContext) { + if let Some(edit_state) = &self.edit_state { + if edit_state.processing_filename.is_none() { + self.filename_editor.update(cx, |editor, cx| { + editor.move_to_end_of_line( + &editor::actions::MoveToEndOfLine { + stop_at_soft_wraps: false, + }, + cx, + ); + }); + return; + } + } if let Some(selection) = self.selection { let (mut worktree_ix, mut entry_ix, _) = self.index_for_selection(selection).unwrap_or_default(); From 4214ed927f14135fb2dcfd0ffa5ce2dd71362f4f Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Thu, 24 Oct 2024 13:07:20 +0200 Subject: [PATCH 13/76] project panel: Add indent guides (#18260) See #12673 https://github.com/user-attachments/assets/94079afc-a851-4206-9c9b-4fad3542334e TODO: - [x] Make active indent guides work for autofolded directories - [x] Figure out which theme colors to use - [x] Fix horizontal scrolling - [x] Make indent guides easier to click - [x] Fix selected background flashing when hovering over entry/indent guide - [x] Docs Release Notes: - Added indent guides to the project panel --- Cargo.lock | 1 + assets/settings/default.json | 2 + crates/gpui/src/elements/uniform_list.rs | 58 +- crates/project_panel/Cargo.toml | 1 + crates/project_panel/src/project_panel.rs | 325 +++++++++-- .../src/project_panel_settings.rs | 5 + crates/storybook/src/stories/indent_guides.rs | 83 +++ crates/theme/src/default_colors.rs | 6 + crates/theme/src/fallback_themes.rs | 3 + crates/theme/src/schema.rs | 21 + crates/theme/src/styles/colors.rs | 3 + crates/ui/src/components.rs | 2 + crates/ui/src/components/indent_guides.rs | 504 ++++++++++++++++++ 13 files changed, 974 insertions(+), 40 deletions(-) create mode 100644 crates/storybook/src/stories/indent_guides.rs create mode 100644 crates/ui/src/components/indent_guides.rs diff --git a/Cargo.lock b/Cargo.lock index 7c73ec0cff58e962b90030b5c4a8d5f0b6a800b3..4e86627d803856f1230d66f29e5e2c3de0ef9246 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8515,6 +8515,7 @@ dependencies = [ "serde_derive", "serde_json", "settings", + "smallvec", "theme", "ui", "util", diff --git a/assets/settings/default.json b/assets/settings/default.json index 8da7abe18fafa12511824ca3ec52586fb30c34b4..32f46ce714379157dfb54ae06d6d507514421b16 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -346,6 +346,8 @@ "git_status": true, // Amount of indentation for nested items. "indent_size": 20, + // Whether to show indent guides in the project panel. + "indent_guides": true, // Whether to reveal it in the project panel automatically, // when a corresponding project entry becomes active. // Gitignored entries are never auto revealed. diff --git a/crates/gpui/src/elements/uniform_list.rs b/crates/gpui/src/elements/uniform_list.rs index b6fcf91e53d07006d011906d3bd1ed0ecdf8e05a..9ce85aab232cd7407ced619ba1f3feecdf73c9e6 100644 --- a/crates/gpui/src/elements/uniform_list.rs +++ b/crates/gpui/src/elements/uniform_list.rs @@ -48,6 +48,7 @@ where item_count, item_to_measure_index: 0, render_items: Box::new(render_range), + decorations: Vec::new(), interactivity: Interactivity { element_id: Some(id), base_style: Box::new(base_style), @@ -69,6 +70,7 @@ pub struct UniformList { item_to_measure_index: usize, render_items: Box Fn(Range, &'a mut WindowContext) -> SmallVec<[AnyElement; 64]>>, + decorations: Vec>, interactivity: Interactivity, scroll_handle: Option, sizing_behavior: ListSizingBehavior, @@ -78,6 +80,7 @@ pub struct UniformList { /// Frame state used by the [UniformList]. pub struct UniformListFrameState { items: SmallVec<[AnyElement; 32]>, + decorations: SmallVec<[AnyElement; 1]>, } /// A handle for controlling the scroll position of a uniform list. @@ -185,6 +188,7 @@ impl Element for UniformList { layout_id, UniformListFrameState { items: SmallVec::new(), + decorations: SmallVec::new(), }, ) } @@ -292,9 +296,10 @@ impl Element for UniformList { ..cmp::min(last_visible_element_ix, self.item_count); let mut items = (self.render_items)(visible_range.clone(), cx); + let content_mask = ContentMask { bounds }; cx.with_content_mask(Some(content_mask), |cx| { - for (mut item, ix) in items.into_iter().zip(visible_range) { + for (mut item, ix) in items.into_iter().zip(visible_range.clone()) { let item_origin = padded_bounds.origin + point( if can_scroll_horizontally { @@ -317,6 +322,34 @@ impl Element for UniformList { item.prepaint_at(item_origin, cx); frame_state.items.push(item); } + + let bounds = Bounds::new( + padded_bounds.origin + + point( + if can_scroll_horizontally { + scroll_offset.x + padding.left + } else { + scroll_offset.x + }, + scroll_offset.y + padding.top, + ), + padded_bounds.size, + ); + for decoration in &self.decorations { + let mut decoration = decoration.as_ref().compute( + visible_range.clone(), + bounds, + item_height, + cx, + ); + let available_space = size( + AvailableSpace::Definite(bounds.size.width), + AvailableSpace::Definite(bounds.size.height), + ); + decoration.layout_as_root(available_space, cx); + decoration.prepaint_at(bounds.origin, cx); + frame_state.decorations.push(decoration); + } }); } @@ -338,6 +371,9 @@ impl Element for UniformList { for item in &mut request_layout.items { item.paint(cx); } + for decoration in &mut request_layout.decorations { + decoration.paint(cx); + } }) } } @@ -350,6 +386,20 @@ impl IntoElement for UniformList { } } +/// A decoration for a [`UniformList`]. This can be used for various things, +/// such as rendering indent guides, or other visual effects. +pub trait UniformListDecoration { + /// Compute the decoration element, given the visible range of list items, + /// the bounds of the list, and the height of each item. + fn compute( + &self, + visible_range: Range, + bounds: Bounds, + item_height: Pixels, + cx: &mut WindowContext, + ) -> AnyElement; +} + impl UniformList { /// Selects a specific list item for measurement. pub fn with_width_from_item(mut self, item_index: Option) -> Self { @@ -382,6 +432,12 @@ impl UniformList { self } + /// Adds a decoration element to the list. + pub fn with_decoration(mut self, decoration: impl UniformListDecoration + 'static) -> Self { + self.decorations.push(Box::new(decoration)); + self + } + fn measure_item(&self, list_width: Option, cx: &mut WindowContext) -> Size { if self.item_count == 0 { return Size::default(); diff --git a/crates/project_panel/Cargo.toml b/crates/project_panel/Cargo.toml index 11c7364e58f2f4dfd6aa41de23258f2eb7fbb394..23241a0f88af3c0dea16fcf2f15287baa48784d2 100644 --- a/crates/project_panel/Cargo.toml +++ b/crates/project_panel/Cargo.toml @@ -30,6 +30,7 @@ serde.workspace = true serde_derive.workspace = true serde_json.workspace = true settings.workspace = true +smallvec.workspace = true theme.workspace = true ui.workspace = true util.workspace = true diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index bbd1664b9d1e6af31f245ee15c6ca188b23ea4a4..0f503c696bb3632286236392734798792b80c055 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -16,12 +16,13 @@ use anyhow::{anyhow, Context as _, Result}; use collections::{hash_map, BTreeSet, HashMap}; use git::repository::GitFileStatus; use gpui::{ - actions, anchored, deferred, div, impl_actions, px, uniform_list, Action, AnyElement, - AppContext, AssetSource, AsyncWindowContext, ClipboardItem, DismissEvent, Div, DragMoveEvent, - EventEmitter, ExternalPaths, FocusHandle, FocusableView, InteractiveElement, KeyContext, - ListHorizontalSizingBehavior, ListSizingBehavior, Model, MouseButton, MouseDownEvent, - ParentElement, Pixels, Point, PromptLevel, Render, Stateful, Styled, Subscription, Task, - UniformListScrollHandle, View, ViewContext, VisualContext as _, WeakView, WindowContext, + actions, anchored, deferred, div, impl_actions, point, px, size, uniform_list, Action, + AnyElement, AppContext, AssetSource, AsyncWindowContext, Bounds, ClipboardItem, DismissEvent, + Div, DragMoveEvent, EventEmitter, ExternalPaths, FocusHandle, FocusableView, + InteractiveElement, KeyContext, ListHorizontalSizingBehavior, ListSizingBehavior, Model, + MouseButton, MouseDownEvent, ParentElement, Pixels, Point, PromptLevel, Render, Stateful, + Styled, Subscription, Task, UniformListScrollHandle, View, ViewContext, VisualContext as _, + WeakView, WindowContext, }; use indexmap::IndexMap; use menu::{Confirm, SelectFirst, SelectLast, SelectNext, SelectPrev}; @@ -31,6 +32,7 @@ use project::{ }; use project_panel_settings::{ProjectPanelDockPosition, ProjectPanelSettings}; use serde::{Deserialize, Serialize}; +use smallvec::SmallVec; use std::{ cell::OnceCell, collections::HashSet, @@ -41,7 +43,10 @@ use std::{ time::Duration, }; use theme::ThemeSettings; -use ui::{prelude::*, v_flex, ContextMenu, Icon, KeyBinding, Label, ListItem, Tooltip}; +use ui::{ + prelude::*, v_flex, ContextMenu, Icon, IndentGuideColors, IndentGuideLayout, KeyBinding, Label, + ListItem, Tooltip, +}; use util::{maybe, ResultExt, TryFutureExt}; use workspace::{ dock::{DockPosition, Panel, PanelEvent}, @@ -654,42 +659,52 @@ impl ProjectPanel { } fn collapse_selected_entry(&mut self, _: &CollapseSelectedEntry, cx: &mut ViewContext) { - if let Some((worktree, mut entry)) = self.selected_entry(cx) { - if let Some(folded_ancestors) = self.ancestors.get_mut(&entry.id) { - if folded_ancestors.current_ancestor_depth + 1 - < folded_ancestors.max_ancestor_depth() - { - folded_ancestors.current_ancestor_depth += 1; - cx.notify(); - return; - } + let Some((worktree, entry)) = self.selected_entry_handle(cx) else { + return; + }; + self.collapse_entry(entry.clone(), worktree, cx) + } + + fn collapse_entry( + &mut self, + entry: Entry, + worktree: Model, + cx: &mut ViewContext, + ) { + let worktree = worktree.read(cx); + if let Some(folded_ancestors) = self.ancestors.get_mut(&entry.id) { + if folded_ancestors.current_ancestor_depth + 1 < folded_ancestors.max_ancestor_depth() { + folded_ancestors.current_ancestor_depth += 1; + cx.notify(); + return; } - let worktree_id = worktree.id(); - let expanded_dir_ids = - if let Some(expanded_dir_ids) = self.expanded_dir_ids.get_mut(&worktree_id) { - expanded_dir_ids - } else { - return; - }; + } + let worktree_id = worktree.id(); + let expanded_dir_ids = + if let Some(expanded_dir_ids) = self.expanded_dir_ids.get_mut(&worktree_id) { + expanded_dir_ids + } else { + return; + }; - loop { - let entry_id = entry.id; - match expanded_dir_ids.binary_search(&entry_id) { - Ok(ix) => { - expanded_dir_ids.remove(ix); - self.update_visible_entries(Some((worktree_id, entry_id)), cx); - cx.notify(); + let mut entry = &entry; + loop { + let entry_id = entry.id; + match expanded_dir_ids.binary_search(&entry_id) { + Ok(ix) => { + expanded_dir_ids.remove(ix); + self.update_visible_entries(Some((worktree_id, entry_id)), cx); + cx.notify(); + break; + } + Err(_) => { + if let Some(parent_entry) = + entry.path.parent().and_then(|p| worktree.entry_for_path(p)) + { + entry = parent_entry; + } else { break; } - Err(_) => { - if let Some(parent_entry) = - entry.path.parent().and_then(|p| worktree.entry_for_path(p)) - { - entry = parent_entry; - } else { - break; - } - } } } } @@ -1727,6 +1742,7 @@ impl ProjectPanel { .copied() .unwrap_or(id) } + pub fn selected_entry<'a>( &self, cx: &'a AppContext, @@ -2144,6 +2160,74 @@ impl ProjectPanel { } } + fn index_for_entry( + &self, + entry_id: ProjectEntryId, + worktree_id: WorktreeId, + ) -> Option<(usize, usize, usize)> { + let mut worktree_ix = 0; + let mut total_ix = 0; + for (current_worktree_id, visible_worktree_entries, _) in &self.visible_entries { + if worktree_id != *current_worktree_id { + total_ix += visible_worktree_entries.len(); + worktree_ix += 1; + continue; + } + + return visible_worktree_entries + .iter() + .enumerate() + .find(|(_, entry)| entry.id == entry_id) + .map(|(ix, _)| (worktree_ix, ix, total_ix + ix)); + } + None + } + + fn entry_at_index(&self, index: usize) -> Option<(WorktreeId, &Entry)> { + let mut offset = 0; + for (worktree_id, visible_worktree_entries, _) in &self.visible_entries { + if visible_worktree_entries.len() > offset + index { + return visible_worktree_entries + .get(index) + .map(|entry| (*worktree_id, entry)); + } + offset += visible_worktree_entries.len(); + } + None + } + + fn iter_visible_entries( + &self, + range: Range, + cx: &mut ViewContext, + mut callback: impl FnMut(&Entry, &HashSet>, &mut ViewContext), + ) { + let mut ix = 0; + for (_, visible_worktree_entries, entries_paths) in &self.visible_entries { + if ix >= range.end { + return; + } + + if ix + visible_worktree_entries.len() <= range.start { + ix += visible_worktree_entries.len(); + continue; + } + + let end_ix = range.end.min(ix + visible_worktree_entries.len()); + let entry_range = range.start.saturating_sub(ix)..end_ix - ix; + let entries = entries_paths.get_or_init(|| { + visible_worktree_entries + .iter() + .map(|e| (e.path.clone())) + .collect() + }); + for entry in visible_worktree_entries[entry_range].iter() { + callback(entry, entries, cx); + } + ix = end_ix; + } + } + fn for_each_visible_entry( &self, range: Range, @@ -2816,6 +2900,70 @@ impl ProjectPanel { cx.notify(); } } + + fn find_active_indent_guide( + &self, + indent_guides: &[IndentGuideLayout], + cx: &AppContext, + ) -> Option { + let (worktree, entry) = self.selected_entry(cx)?; + + // Find the parent entry of the indent guide, this will either be the + // expanded folder we have selected, or the parent of the currently + // selected file/collapsed directory + let mut entry = entry; + loop { + let is_expanded_dir = entry.is_dir() + && self + .expanded_dir_ids + .get(&worktree.id()) + .map(|ids| ids.binary_search(&entry.id).is_ok()) + .unwrap_or(false); + if is_expanded_dir { + break; + } + entry = worktree.entry_for_path(&entry.path.parent()?)?; + } + + let (active_indent_range, depth) = { + let (worktree_ix, child_offset, ix) = self.index_for_entry(entry.id, worktree.id())?; + let child_paths = &self.visible_entries[worktree_ix].1; + let mut child_count = 0; + let depth = entry.path.ancestors().count(); + while let Some(entry) = child_paths.get(child_offset + child_count + 1) { + if entry.path.ancestors().count() <= depth { + break; + } + child_count += 1; + } + + let start = ix + 1; + let end = start + child_count; + + let (_, entries, paths) = &self.visible_entries[worktree_ix]; + let visible_worktree_entries = + paths.get_or_init(|| entries.iter().map(|e| (e.path.clone())).collect()); + + // Calculate the actual depth of the entry, taking into account that directories can be auto-folded. + let (depth, _) = Self::calculate_depth_and_difference(entry, visible_worktree_entries); + (start..end, depth) + }; + + let candidates = indent_guides + .iter() + .enumerate() + .filter(|(_, indent_guide)| indent_guide.offset.x == depth); + + for (i, indent) in candidates { + // Find matches that are either an exact match, partially on screen, or inside the enclosing indent + if active_indent_range.start <= indent.offset.y + indent.length + && indent.offset.y <= active_indent_range.end + { + return Some(i); + } + } + None + } } fn item_width_estimate(depth: usize, item_text_chars: usize, is_symlink: bool) -> usize { @@ -2831,6 +2979,8 @@ impl Render for ProjectPanel { fn render(&mut self, cx: &mut gpui::ViewContext) -> impl IntoElement { let has_worktree = !self.visible_entries.is_empty(); let project = self.project.read(cx); + let indent_size = ProjectPanelSettings::get_global(cx).indent_size; + let indent_guides = ProjectPanelSettings::get_global(cx).indent_guides; let is_local = project.is_local(); if has_worktree { @@ -2934,6 +3084,103 @@ impl Render for ProjectPanel { items } }) + .when(indent_guides, |list| { + list.with_decoration( + ui::indent_guides( + cx.view().clone(), + px(indent_size), + IndentGuideColors::panel(cx), + |this, range, cx| { + let mut items = + SmallVec::with_capacity(range.end - range.start); + this.iter_visible_entries(range, cx, |entry, entries, _| { + let (depth, _) = + Self::calculate_depth_and_difference(entry, entries); + items.push(depth); + }); + items + }, + ) + .on_click(cx.listener( + |this, active_indent_guide: &IndentGuideLayout, cx| { + if cx.modifiers().secondary() { + let ix = active_indent_guide.offset.y; + let Some((target_entry, worktree)) = maybe!({ + let (worktree_id, entry) = this.entry_at_index(ix)?; + let worktree = this + .project + .read(cx) + .worktree_for_id(worktree_id, cx)?; + let target_entry = worktree + .read(cx) + .entry_for_path(&entry.path.parent()?)?; + Some((target_entry, worktree)) + }) else { + return; + }; + + this.collapse_entry(target_entry.clone(), worktree, cx); + } + }, + )) + .with_render_fn( + cx.view().clone(), + move |this, params, cx| { + const LEFT_OFFSET: f32 = 14.; + const PADDING_Y: f32 = 4.; + const HITBOX_OVERDRAW: f32 = 3.; + + let active_indent_guide_index = + this.find_active_indent_guide(¶ms.indent_guides, cx); + + let indent_size = params.indent_size; + let item_height = params.item_height; + + params + .indent_guides + .into_iter() + .enumerate() + .map(|(idx, layout)| { + let offset = if layout.continues_offscreen { + px(0.) + } else { + px(PADDING_Y) + }; + let bounds = Bounds::new( + point( + px(layout.offset.x as f32) * indent_size + + px(LEFT_OFFSET), + px(layout.offset.y as f32) * item_height + + offset, + ), + size( + px(1.), + px(layout.length as f32) * item_height + - px(offset.0 * 2.), + ), + ); + ui::RenderedIndentGuide { + bounds, + layout, + is_active: Some(idx) == active_indent_guide_index, + hitbox: Some(Bounds::new( + point( + bounds.origin.x - px(HITBOX_OVERDRAW), + bounds.origin.y, + ), + size( + bounds.size.width + + px(2. * HITBOX_OVERDRAW), + bounds.size.height, + ), + )), + } + }) + .collect() + }, + ), + ) + }) .size_full() .with_sizing_behavior(ListSizingBehavior::Infer) .with_horizontal_sizing_behavior(ListHorizontalSizingBehavior::Unconstrained) diff --git a/crates/project_panel/src/project_panel_settings.rs b/crates/project_panel/src/project_panel_settings.rs index 0114b3968d4db51ecca3d1cdf5de3689946781c0..16980c00d18b9839dbabb74e5185942c772b72f3 100644 --- a/crates/project_panel/src/project_panel_settings.rs +++ b/crates/project_panel/src/project_panel_settings.rs @@ -20,6 +20,7 @@ pub struct ProjectPanelSettings { pub folder_icons: bool, pub git_status: bool, pub indent_size: f32, + pub indent_guides: bool, pub auto_reveal_entries: bool, pub auto_fold_dirs: bool, pub scrollbar: ScrollbarSettings, @@ -71,6 +72,10 @@ pub struct ProjectPanelSettingsContent { /// /// Default: 20 pub indent_size: Option, + /// Whether to show indent guides in the project panel. + /// + /// Default: true + pub indent_guides: Option, /// Whether to reveal it in the project panel automatically, /// when a corresponding project entry becomes active. /// Gitignored entries are never auto revealed. diff --git a/crates/storybook/src/stories/indent_guides.rs b/crates/storybook/src/stories/indent_guides.rs new file mode 100644 index 0000000000000000000000000000000000000000..cd4d9d7f58eead03e2bbbf38b06fe265fc25d9a8 --- /dev/null +++ b/crates/storybook/src/stories/indent_guides.rs @@ -0,0 +1,83 @@ +use std::fmt::format; + +use gpui::{ + colors, div, prelude::*, uniform_list, DefaultColor, DefaultThemeAppearance, Hsla, Render, + View, ViewContext, WindowContext, +}; +use story::Story; +use strum::IntoEnumIterator; +use ui::{ + h_flex, px, v_flex, AbsoluteLength, ActiveTheme, Color, DefiniteLength, Label, LabelCommon, +}; + +const LENGTH: usize = 100; + +pub struct IndentGuidesStory { + depths: Vec, +} + +impl IndentGuidesStory { + pub fn view(cx: &mut WindowContext) -> View { + let mut depths = Vec::new(); + depths.push(0); + depths.push(1); + depths.push(2); + for _ in 0..LENGTH - 6 { + depths.push(3); + } + depths.push(2); + depths.push(1); + depths.push(0); + + cx.new_view(|_cx| Self { depths }) + } +} + +impl Render for IndentGuidesStory { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + Story::container() + .child(Story::title("Indent guides")) + .child( + v_flex().size_full().child( + uniform_list( + cx.view().clone(), + "some-list", + self.depths.len(), + |this, range, cx| { + this.depths + .iter() + .enumerate() + .skip(range.start) + .take(range.end - range.start) + .map(|(i, depth)| { + div() + .pl(DefiniteLength::Absolute(AbsoluteLength::Pixels(px( + 16. * (*depth as f32), + )))) + .child(Label::new(format!("Item {}", i)).color(Color::Info)) + }) + .collect() + }, + ) + .with_sizing_behavior(gpui::ListSizingBehavior::Infer) + .with_decoration(ui::indent_guides( + cx.view().clone(), + px(16.), + ui::IndentGuideColors { + default: Color::Info.color(cx), + hovered: Color::Accent.color(cx), + active: Color::Accent.color(cx), + }, + |this, range, cx| { + this.depths + .iter() + .skip(range.start) + .take(range.end - range.start) + .cloned() + .collect() + }, + )), + ), + ) + } +} diff --git a/crates/theme/src/default_colors.rs b/crates/theme/src/default_colors.rs index 49c216c0e07e78b02b33d409f4518ae3ad28f34d..05dd6cd1e74fd3b78a9cd42b7130df973624f1af 100644 --- a/crates/theme/src/default_colors.rs +++ b/crates/theme/src/default_colors.rs @@ -59,6 +59,9 @@ impl ThemeColors { search_match_background: neutral().light().step_5(), panel_background: neutral().light().step_2(), panel_focused_border: blue().light().step_5(), + panel_indent_guide: neutral().light_alpha().step_5(), + panel_indent_guide_hover: neutral().light_alpha().step_6(), + panel_indent_guide_active: neutral().light_alpha().step_6(), pane_focused_border: blue().light().step_5(), pane_group_border: neutral().light().step_6(), scrollbar_thumb_background: neutral().light_alpha().step_3(), @@ -162,6 +165,9 @@ impl ThemeColors { search_match_background: neutral().dark().step_5(), panel_background: neutral().dark().step_2(), panel_focused_border: blue().dark().step_5(), + panel_indent_guide: neutral().dark_alpha().step_4(), + panel_indent_guide_hover: neutral().dark_alpha().step_6(), + panel_indent_guide_active: neutral().dark_alpha().step_6(), pane_focused_border: blue().dark().step_5(), pane_group_border: neutral().dark().step_6(), scrollbar_thumb_background: neutral().dark_alpha().step_3(), diff --git a/crates/theme/src/fallback_themes.rs b/crates/theme/src/fallback_themes.rs index 553c75623381d644a26a2106bad245bf9c33ef35..9f665ea965d3794b9853211967211d838b806841 100644 --- a/crates/theme/src/fallback_themes.rs +++ b/crates/theme/src/fallback_themes.rs @@ -136,6 +136,9 @@ pub(crate) fn zed_default_dark() -> Theme { terminal_ansi_dim_white: crate::neutral().dark().step_10(), panel_background: bg, panel_focused_border: blue, + panel_indent_guide: hsla(228. / 360., 8. / 100., 25. / 100., 1.), + panel_indent_guide_hover: hsla(225. / 360., 13. / 100., 12. / 100., 1.), + panel_indent_guide_active: hsla(225. / 360., 13. / 100., 12. / 100., 1.), pane_focused_border: blue, pane_group_border: hsla(225. / 360., 13. / 100., 12. / 100., 1.), scrollbar_thumb_background: gpui::transparent_black(), diff --git a/crates/theme/src/schema.rs b/crates/theme/src/schema.rs index af334d8aed54b9a9b9169a038445c5924ce6d65e..88e24f08ff56322edfcd6f3857cf3363f99e700d 100644 --- a/crates/theme/src/schema.rs +++ b/crates/theme/src/schema.rs @@ -322,6 +322,15 @@ pub struct ThemeColorsContent { #[serde(rename = "panel.focused_border")] pub panel_focused_border: Option, + #[serde(rename = "panel.indent_guide")] + pub panel_indent_guide: Option, + + #[serde(rename = "panel.indent_guide_hover")] + pub panel_indent_guide_hover: Option, + + #[serde(rename = "panel.indent_guide_active")] + pub panel_indent_guide_active: Option, + #[serde(rename = "pane.focused_border")] pub pane_focused_border: Option, @@ -710,6 +719,18 @@ impl ThemeColorsContent { .panel_focused_border .as_ref() .and_then(|color| try_parse_color(color).ok()), + panel_indent_guide: self + .panel_indent_guide + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + panel_indent_guide_hover: self + .panel_indent_guide_hover + .as_ref() + .and_then(|color| try_parse_color(color).ok()), + panel_indent_guide_active: self + .panel_indent_guide_active + .as_ref() + .and_then(|color| try_parse_color(color).ok()), pane_focused_border: self .pane_focused_border .as_ref() diff --git a/crates/theme/src/styles/colors.rs b/crates/theme/src/styles/colors.rs index 881a68334dcf647325a25dc844d504da31d6082c..485a8e4b9e13d6b82545793c893da911db78e01d 100644 --- a/crates/theme/src/styles/colors.rs +++ b/crates/theme/src/styles/colors.rs @@ -123,6 +123,9 @@ pub struct ThemeColors { pub search_match_background: Hsla, pub panel_background: Hsla, pub panel_focused_border: Hsla, + pub panel_indent_guide: Hsla, + pub panel_indent_guide_hover: Hsla, + pub panel_indent_guide_active: Hsla, pub pane_focused_border: Hsla, pub pane_group_border: Hsla, /// The color of the scrollbar thumb. diff --git a/crates/ui/src/components.rs b/crates/ui/src/components.rs index 98d103e163b45b0965e55ac880f48e589546fb14..7a13ff6917f886e0667751d43c7c8696d799112d 100644 --- a/crates/ui/src/components.rs +++ b/crates/ui/src/components.rs @@ -8,6 +8,7 @@ mod dropdown_menu; mod facepile; mod icon; mod image; +mod indent_guides; mod indicator; mod keybinding; mod label; @@ -40,6 +41,7 @@ pub use dropdown_menu::*; pub use facepile::*; pub use icon::*; pub use image::*; +pub use indent_guides::*; pub use indicator::*; pub use keybinding::*; pub use label::*; diff --git a/crates/ui/src/components/indent_guides.rs b/crates/ui/src/components/indent_guides.rs new file mode 100644 index 0000000000000000000000000000000000000000..e45404429ce4f92897491b8981e9d19dafbe5d03 --- /dev/null +++ b/crates/ui/src/components/indent_guides.rs @@ -0,0 +1,504 @@ +#![allow(missing_docs)] +use std::{cmp::Ordering, ops::Range, rc::Rc}; + +use gpui::{ + fill, point, size, AnyElement, AppContext, Bounds, Hsla, Point, UniformListDecoration, View, +}; +use smallvec::SmallVec; + +use crate::prelude::*; + +/// Represents the colors used for different states of indent guides. +#[derive(Debug, Clone)] +pub struct IndentGuideColors { + /// The color of the indent guide when it's neither active nor hovered. + pub default: Hsla, + /// The color of the indent guide when it's hovered. + pub hover: Hsla, + /// The color of the indent guide when it's active. + pub active: Hsla, +} + +impl IndentGuideColors { + /// Returns the indent guide colors that should be used for panels. + pub fn panel(cx: &AppContext) -> Self { + Self { + default: cx.theme().colors().panel_indent_guide, + hover: cx.theme().colors().panel_indent_guide_hover, + active: cx.theme().colors().panel_indent_guide_active, + } + } +} + +pub struct IndentGuides { + colors: IndentGuideColors, + indent_size: Pixels, + compute_indents_fn: Box, &mut WindowContext) -> SmallVec<[usize; 64]>>, + render_fn: Option< + Box< + dyn Fn( + RenderIndentGuideParams, + &mut WindowContext, + ) -> SmallVec<[RenderedIndentGuide; 12]>, + >, + >, + on_click: Option>, +} + +pub fn indent_guides( + view: View, + indent_size: Pixels, + colors: IndentGuideColors, + compute_indents_fn: impl Fn(&mut V, Range, &mut ViewContext) -> SmallVec<[usize; 64]> + + 'static, +) -> IndentGuides { + let compute_indents_fn = Box::new(move |range, cx: &mut WindowContext| { + view.update(cx, |this, cx| compute_indents_fn(this, range, cx)) + }); + IndentGuides { + colors, + indent_size, + compute_indents_fn, + render_fn: None, + on_click: None, + } +} + +impl IndentGuides { + /// Sets the callback that will be called when the user clicks on an indent guide. + pub fn on_click( + mut self, + on_click: impl Fn(&IndentGuideLayout, &mut WindowContext) + 'static, + ) -> Self { + self.on_click = Some(Rc::new(on_click)); + self + } + + /// Sets a custom callback that will be called when the indent guides need to be rendered. + pub fn with_render_fn( + mut self, + view: View, + render_fn: impl Fn( + &mut V, + RenderIndentGuideParams, + &mut WindowContext, + ) -> SmallVec<[RenderedIndentGuide; 12]> + + 'static, + ) -> Self { + let render_fn = move |params, cx: &mut WindowContext| { + view.update(cx, |this, cx| render_fn(this, params, cx)) + }; + self.render_fn = Some(Box::new(render_fn)); + self + } +} + +/// Parameters for rendering indent guides. +pub struct RenderIndentGuideParams { + /// The calculated layouts for the indent guides to be rendered. + pub indent_guides: SmallVec<[IndentGuideLayout; 12]>, + /// The size of each indentation level in pixels. + pub indent_size: Pixels, + /// The height of each item in pixels. + pub item_height: Pixels, +} + +/// Represents a rendered indent guide with its visual properties and interaction areas. +pub struct RenderedIndentGuide { + /// The bounds of the rendered indent guide in pixels. + pub bounds: Bounds, + /// The layout information for the indent guide. + pub layout: IndentGuideLayout, + /// Indicates whether the indent guide is currently active. + pub is_active: bool, + /// Can be used to customize the hitbox of the indent guide, + /// if this is set to `None`, the bounds of the indent guide will be used. + pub hitbox: Option>, +} + +/// Represents the layout information for an indent guide. +#[derive(Debug, PartialEq, Eq, Hash)] +pub struct IndentGuideLayout { + /// The starting position of the indent guide, where x is the indentation level + /// and y is the starting row. + pub offset: Point, + /// The length of the indent guide in rows. + pub length: usize, + /// Indicates whether the indent guide continues beyond the visible bounds. + pub continues_offscreen: bool, +} + +/// Implements the necessary functionality for rendering indent guides inside a uniform list. +mod uniform_list { + use gpui::{DispatchPhase, Hitbox, MouseButton, MouseDownEvent, MouseMoveEvent}; + + use super::*; + + impl UniformListDecoration for IndentGuides { + fn compute( + &self, + visible_range: Range, + bounds: Bounds, + item_height: Pixels, + cx: &mut WindowContext, + ) -> AnyElement { + let mut visible_range = visible_range.clone(); + visible_range.end += 1; + let visible_entries = &(self.compute_indents_fn)(visible_range.clone(), cx); + // Check if we have an additional indent that is outside of the visible range + let includes_trailing_indent = visible_entries.len() == visible_range.len(); + let indent_guides = compute_indent_guides( + &visible_entries, + visible_range.start, + includes_trailing_indent, + ); + let mut indent_guides = if let Some(ref custom_render) = self.render_fn { + let params = RenderIndentGuideParams { + indent_guides, + indent_size: self.indent_size, + item_height, + }; + custom_render(params, cx) + } else { + indent_guides + .into_iter() + .map(|layout| RenderedIndentGuide { + bounds: Bounds::new( + point( + px(layout.offset.x as f32) * self.indent_size, + px(layout.offset.y as f32) * item_height, + ), + size(px(1.), px(layout.length as f32) * item_height), + ), + layout, + is_active: false, + hitbox: None, + }) + .collect() + }; + for guide in &mut indent_guides { + guide.bounds.origin += bounds.origin; + if let Some(hitbox) = guide.hitbox.as_mut() { + hitbox.origin += bounds.origin; + } + } + + let indent_guides = IndentGuidesElement { + indent_guides: Rc::new(indent_guides), + colors: self.colors.clone(), + on_hovered_indent_guide_click: self.on_click.clone(), + }; + indent_guides.into_any_element() + } + } + + struct IndentGuidesElement { + colors: IndentGuideColors, + indent_guides: Rc>, + on_hovered_indent_guide_click: Option>, + } + + struct IndentGuidesElementPrepaintState { + hitboxes: SmallVec<[Hitbox; 12]>, + } + + impl Element for IndentGuidesElement { + type RequestLayoutState = (); + type PrepaintState = IndentGuidesElementPrepaintState; + + fn id(&self) -> Option { + None + } + + fn request_layout( + &mut self, + _id: Option<&gpui::GlobalElementId>, + cx: &mut WindowContext, + ) -> (gpui::LayoutId, Self::RequestLayoutState) { + (cx.request_layout(gpui::Style::default(), []), ()) + } + + fn prepaint( + &mut self, + _id: Option<&gpui::GlobalElementId>, + _bounds: Bounds, + _request_layout: &mut Self::RequestLayoutState, + cx: &mut WindowContext, + ) -> Self::PrepaintState { + let mut hitboxes = SmallVec::new(); + for guide in self.indent_guides.as_ref().iter() { + hitboxes.push(cx.insert_hitbox(guide.hitbox.unwrap_or(guide.bounds), false)); + } + Self::PrepaintState { hitboxes } + } + + fn paint( + &mut self, + _id: Option<&gpui::GlobalElementId>, + _bounds: Bounds, + _request_layout: &mut Self::RequestLayoutState, + prepaint: &mut Self::PrepaintState, + cx: &mut WindowContext, + ) { + let callback = self.on_hovered_indent_guide_click.clone(); + if let Some(callback) = callback { + cx.on_mouse_event({ + let hitboxes = prepaint.hitboxes.clone(); + let indent_guides = self.indent_guides.clone(); + move |event: &MouseDownEvent, phase, cx| { + if phase == DispatchPhase::Bubble && event.button == MouseButton::Left { + let mut active_hitbox_ix = None; + for (i, hitbox) in hitboxes.iter().enumerate() { + if hitbox.is_hovered(cx) { + active_hitbox_ix = Some(i); + break; + } + } + + let Some(active_hitbox_ix) = active_hitbox_ix else { + return; + }; + + let active_indent_guide = &indent_guides[active_hitbox_ix].layout; + callback(active_indent_guide, cx); + + cx.stop_propagation(); + cx.prevent_default(); + } + } + }); + } + + let mut hovered_hitbox_id = None; + for (i, hitbox) in prepaint.hitboxes.iter().enumerate() { + cx.set_cursor_style(gpui::CursorStyle::PointingHand, hitbox); + let indent_guide = &self.indent_guides[i]; + let fill_color = if hitbox.is_hovered(cx) { + hovered_hitbox_id = Some(hitbox.id); + self.colors.hover + } else if indent_guide.is_active { + self.colors.active + } else { + self.colors.default + }; + + cx.paint_quad(fill(indent_guide.bounds, fill_color)); + } + + cx.on_mouse_event({ + let prev_hovered_hitbox_id = hovered_hitbox_id; + let hitboxes = prepaint.hitboxes.clone(); + move |_: &MouseMoveEvent, phase, cx| { + let mut hovered_hitbox_id = None; + for hitbox in &hitboxes { + if hitbox.is_hovered(cx) { + hovered_hitbox_id = Some(hitbox.id); + break; + } + } + if phase == DispatchPhase::Capture { + // If the hovered hitbox has changed, we need to re-paint the indent guides. + match (prev_hovered_hitbox_id, hovered_hitbox_id) { + (Some(prev_id), Some(id)) => { + if prev_id != id { + cx.refresh(); + } + } + (None, Some(_)) => { + cx.refresh(); + } + (Some(_), None) => { + cx.refresh(); + } + (None, None) => {} + } + } + } + }); + } + } + + impl IntoElement for IndentGuidesElement { + type Element = Self; + + fn into_element(self) -> Self::Element { + self + } + } +} + +fn compute_indent_guides( + indents: &[usize], + offset: usize, + includes_trailing_indent: bool, +) -> SmallVec<[IndentGuideLayout; 12]> { + let mut indent_guides = SmallVec::<[IndentGuideLayout; 12]>::new(); + let mut indent_stack = SmallVec::<[IndentGuideLayout; 8]>::new(); + + let mut min_depth = usize::MAX; + for (row, &depth) in indents.iter().enumerate() { + if includes_trailing_indent && row == indents.len() - 1 { + continue; + } + + let current_row = row + offset; + let current_depth = indent_stack.len(); + if depth < min_depth { + min_depth = depth; + } + + match depth.cmp(¤t_depth) { + Ordering::Less => { + for _ in 0..(current_depth - depth) { + if let Some(guide) = indent_stack.pop() { + indent_guides.push(guide); + } + } + } + Ordering::Greater => { + for new_depth in current_depth..depth { + indent_stack.push(IndentGuideLayout { + offset: Point::new(new_depth, current_row), + length: current_row, + continues_offscreen: false, + }); + } + } + _ => {} + } + + for indent in indent_stack.iter_mut() { + indent.length = current_row - indent.offset.y + 1; + } + } + + indent_guides.extend(indent_stack); + + for guide in indent_guides.iter_mut() { + if includes_trailing_indent + && guide.offset.y + guide.length == offset + indents.len().saturating_sub(1) + { + guide.continues_offscreen = indents + .last() + .map(|last_indent| guide.offset.x < *last_indent) + .unwrap_or(false); + } + } + + indent_guides +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_compute_indent_guides() { + fn assert_compute_indent_guides( + input: &[usize], + offset: usize, + includes_trailing_indent: bool, + expected: Vec, + ) { + use std::collections::HashSet; + assert_eq!( + compute_indent_guides(input, offset, includes_trailing_indent) + .into_vec() + .into_iter() + .collect::>(), + expected.into_iter().collect::>(), + ); + } + + assert_compute_indent_guides( + &[0, 1, 2, 2, 1, 0], + 0, + false, + vec![ + IndentGuideLayout { + offset: Point::new(0, 1), + length: 4, + continues_offscreen: false, + }, + IndentGuideLayout { + offset: Point::new(1, 2), + length: 2, + continues_offscreen: false, + }, + ], + ); + + assert_compute_indent_guides( + &[2, 2, 2, 1, 1], + 0, + false, + vec![ + IndentGuideLayout { + offset: Point::new(0, 0), + length: 5, + continues_offscreen: false, + }, + IndentGuideLayout { + offset: Point::new(1, 0), + length: 3, + continues_offscreen: false, + }, + ], + ); + + assert_compute_indent_guides( + &[1, 2, 3, 2, 1], + 0, + false, + vec![ + IndentGuideLayout { + offset: Point::new(0, 0), + length: 5, + continues_offscreen: false, + }, + IndentGuideLayout { + offset: Point::new(1, 1), + length: 3, + continues_offscreen: false, + }, + IndentGuideLayout { + offset: Point::new(2, 2), + length: 1, + continues_offscreen: false, + }, + ], + ); + + assert_compute_indent_guides( + &[0, 1, 0], + 0, + true, + vec![IndentGuideLayout { + offset: Point::new(0, 1), + length: 1, + continues_offscreen: false, + }], + ); + + assert_compute_indent_guides( + &[0, 1, 1], + 0, + true, + vec![IndentGuideLayout { + offset: Point::new(0, 1), + length: 1, + continues_offscreen: true, + }], + ); + assert_compute_indent_guides( + &[0, 1, 2], + 0, + true, + vec![IndentGuideLayout { + offset: Point::new(0, 1), + length: 1, + continues_offscreen: true, + }], + ); + } +} From efc4d3efdf7455676bfb47db1b995291b344489c Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 24 Oct 2024 13:52:26 +0200 Subject: [PATCH 14/76] ssh remoting: Fix wrong working directory for SSH terminals (#19672) Before this change, we would save the working directory *on the client* of each shell that was running in a terminal. While it's technically right, it's wrong in all of these cases where `working_directory` was used: - in inline assistant - when resolving file paths in the terminal output - when serializing the current working dir and deserializing it on restart Release Notes: - Fixed terminals opened on remote hosts failing to deserialize with an error message after restarting Zed. --- crates/project/src/terminals.rs | 1 + crates/terminal/src/terminal.rs | 28 ++++++++++++++++++----- crates/terminal_view/src/terminal_view.rs | 2 +- 3 files changed, 24 insertions(+), 7 deletions(-) diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index 3d1821ce666e0dce062446ab78182e22cd115b4e..d5166105900c0cf158cd23c6a3fb36df6014280a 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -240,6 +240,7 @@ impl Project { settings.cursor_shape.unwrap_or_default(), settings.alternate_scroll, settings.max_scroll_history_lines, + ssh_details.is_some(), window, completion_tx, cx, diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index 95f5555b7ef03219e03f05dbc4de0e9d745a6f2b..0b3e341485c6a843015f3f43ed398e22bb971167 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -330,6 +330,7 @@ impl TerminalBuilder { cursor_shape: CursorShape, alternate_scroll: AlternateScroll, max_scroll_history_lines: Option, + is_ssh_terminal: bool, window: AnyWindowHandle, completion_tx: Sender<()>, cx: &AppContext, @@ -469,6 +470,7 @@ impl TerminalBuilder { url_regex: RegexSearch::new(URL_REGEX).unwrap(), word_regex: RegexSearch::new(WORD_REGEX).unwrap(), vi_mode_enabled: false, + is_ssh_terminal, }; Ok(TerminalBuilder { @@ -626,6 +628,7 @@ pub struct Terminal { word_regex: RegexSearch, task: Option, vi_mode_enabled: bool, + is_ssh_terminal: bool, } pub struct TaskState { @@ -734,10 +737,6 @@ impl Terminal { self.selection_phase == SelectionPhase::Selecting } - pub fn get_cwd(&self) -> Option { - self.pty_info.current.as_ref().map(|info| info.cwd.clone()) - } - ///Takes events from Alacritty and translates them to behavior on this view fn process_terminal_event( &mut self, @@ -951,7 +950,7 @@ impl Terminal { } else { MaybeNavigationTarget::PathLike(PathLikeTarget { maybe_path: maybe_url_or_path, - terminal_dir: self.get_cwd(), + terminal_dir: self.working_directory(), }) }; cx.emit(Event::Open(target)); @@ -1006,7 +1005,7 @@ impl Terminal { } else { MaybeNavigationTarget::PathLike(PathLikeTarget { maybe_path: word, - terminal_dir: self.get_cwd(), + terminal_dir: self.working_directory(), }) }; cx.emit(Event::NewNavigationTarget(Some(navigation_target))); @@ -1636,6 +1635,23 @@ impl Terminal { } pub fn working_directory(&self) -> Option { + if self.is_ssh_terminal { + // We can't yet reliably detect the working directory of a shell on the + // SSH host. Until we can do that, it doesn't make sense to display + // the working directory on the client and persist that. + None + } else { + self.client_side_working_directory() + } + } + + /// Returns the working directory of the process that's connected to the PTY. + /// That means it returns the working directory of the local shell or program + /// that's running inside the terminal. + /// + /// This does *not* return the working directory of the shell that runs on the + /// remote host, in case Zed is connected to a remote host. + fn client_side_working_directory(&self) -> Option { self.pty_info .current .as_ref() diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index b2cbae0dfe695fd8a14d215ee6b8e6e97745ed3d..eed8c8123b1be8956e2ca4a204805b28798b3dc1 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -1192,7 +1192,7 @@ impl SerializableItem for TerminalView { return None; } - if let Some((cwd, workspace_id)) = terminal.get_cwd().zip(self.workspace_id) { + if let Some((cwd, workspace_id)) = terminal.working_directory().zip(self.workspace_id) { Some(cx.background_executor().spawn(async move { TERMINAL_DB .save_working_directory(item_id, workspace_id, cwd) From 3c6a5051665894de758ecced403a9a4ce0ffcb28 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 24 Oct 2024 09:35:59 -0300 Subject: [PATCH 15/76] docs: Add tweaks to the Remote Development page (#19674) Just making just we also add the other keybinding to open the Remote Projects dialog and capitalize every "SSH" mention for consistency. Tiny stuff! Release Notes: - N/A --- docs/src/remote-development.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/src/remote-development.md b/docs/src/remote-development.md index aacccf62278fa2d09929107257faaf2c0ba1fdbe..07f15379e77d1ada2fe30845d780634ec4f9481c 100644 --- a/docs/src/remote-development.md +++ b/docs/src/remote-development.md @@ -13,8 +13,8 @@ Remote development requires two computers, your local machine that runs the Zed ## Setup 1. Download and install the latest [Zed Preview](https://zed.dev/releases/preview). You need at least Zed v0.159. -1. Open the remote projects dialogue with `cmd-shift-p remote`. -1. Click "New Server" and enter the command you use to ssh into the server. See [Supported SSH options](#supported-ssh-options) for options you can pass. +1. Open the remote projects dialogue with cmd-shift-p remote or cmd-control-o. +1. Click "Connect New Server" and enter the command you use to SSH into the server. See [Supported SSH options](#supported-ssh-options) for options you can pass. 1. Your local machine will attempt to connect to the remote server using the `ssh` binary on your path. Assuming the connection is successful, it will download the latest version of the Zed server and upload it to the remote over SSH. 1. Once the Zed server is running, you will be prompted to choose a path to open on the remote server. > **Note:** Zed does not currently handle opening very large directories (for example, `/` or `~` that may have >100,000 files) very well. We are working on improving this, but suggest in the meantime opening only specific projects, or subfolders of very large mono-repos. @@ -37,11 +37,11 @@ When opening a remote project there are three relevant settings locations: - The server Zed settings (in the same place) on the remote server. - The project settings (in `.zed/settings.json` or `.editorconfig` of your project) -Both the local Zed and the server Zed read the project settings, but they are not aware of the other's main settings.json. +Both the local Zed and the server Zed read the project settings, but they are not aware of the other's main `settings.json`. Depending on the kind of setting you want to make, which settings file you should use: -- Project settings should be used for things that affect the project: indentation settings, which formatter / language server to use etc. +- Project settings should be used for things that affect the project: indentation settings, which formatter / language server to use, etc. - Server settings should be used for things that affect the server: paths to language servers, etc. - Local settings should be used for things that affect the UI: font size, etc. @@ -67,9 +67,9 @@ If you are struggling with connection issues, you should be able to see more inf ## Supported SSH Options -Under the hood, Zed shells out to the `ssh` binary to connect to the remote server. We create one SSH control master per project, and use then use that to multiplex ssh connections for the Zed protocol itself, any terminals you open and tasks you run. We read settings from your ssh config file, but if you want to specify additional options to the ssh control master you can configure Zed to set them. +Under the hood, Zed shells out to the `ssh` binary to connect to the remote server. We create one SSH control master per project, and use then use that to multiplex SSH connections for the Zed protocol itself, any terminals you open and tasks you run. We read settings from your SSH config file, but if you want to specify additional options to the SSH control master you can configure Zed to set them. -When typing in the "New Server" dialogue, you can use bash-style quoting to pass options containing a space. Once you have created a server it will be added to the `"ssh_connections": []` array in your settings file. You can edit the settings file directly to make changes to SSH connections. +When typing in the "Connect New Server" dialogue, you can use bash-style quoting to pass options containing a space. Once you have created a server it will be added to the `"ssh_connections": []` array in your settings file. You can edit the settings file directly to make changes to SSH connections. Supported options: From b5aea548a8b73d4347b0693529a92405bf30e6d0 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 24 Oct 2024 09:43:35 -0300 Subject: [PATCH 16/76] ssh: Capitalize error and connection strings (#19675) Another tiny PR for the sake of consistency :) Release Notes: - N/A --- crates/recent_projects/src/ssh_connections.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index e0bd59a248db731f8a8bac46678688e59fd1c9a9..0718c4f983d5479b60450c20833bf29b990d4cdf 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -527,7 +527,7 @@ impl SshClientDelegate { .await .map_err(|e| { anyhow!( - "failed to get remote server binary download url (os: {}, arch: {}): {}", + "Failed to get remote server binary download url (os: {}, arch: {}): {}", platform.os, platform.arch, e @@ -542,7 +542,7 @@ impl SshClientDelegate { version, )) } else { - self.update_status(Some("checking for latest version of remote server"), cx); + self.update_status(Some("Checking for latest version of remote server"), cx); let binary_path = AutoUpdater::get_latest_remote_server_release( platform.os, platform.arch, @@ -552,7 +552,7 @@ impl SshClientDelegate { .await .map_err(|e| { anyhow!( - "failed to download remote server binary (os: {}, arch: {}): {}", + "Failed to download remote server binary (os: {}, arch: {}): {}", platform.os, platform.arch, e @@ -579,7 +579,7 @@ impl SshClientDelegate { .output() .await?; if !output.status.success() { - Err(anyhow!("failed to run command: {:?}", command))?; + Err(anyhow!("Failed to run command: {:?}", command))?; } Ok(()) } From 499e1459ebb2e05456cafcc9f4154d94f9a5f40a Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 24 Oct 2024 15:57:24 +0200 Subject: [PATCH 17/76] Fix crash in collab when sending worktree updates (#19678) This pull request does a couple of things: - In 29c2df73e1448bb510aebfbc954deb067fc88032, we introduced a safety guard that prevents this crash from happening again in the future by returning an error instead of panicking when the payload is too large. - In 3e7a2e5c3067c53bb12cd0c76c6e7b09af7c8fcf, we introduced chunking for updates coming from SSH servers (previously, we were sending the whole changeset and initial set of paths in their entirety). - In 122b5b4, we introduced a panic hook that sends panics to Axiom. For posterity, this is how we figured out what the panic was: ``` kubectl logs current-pod-name --previous --namespace=production ``` Release Notes: - N/A --------- Co-authored-by: Thorsten Co-authored-by: Bennet Co-authored-by: Kirill --- crates/collab/src/db/queries/projects.rs | 10 ++++++++++ crates/collab/src/main.rs | 19 +++++++++++++++++++ crates/collab/src/rpc.rs | 14 ++------------ crates/proto/src/proto.rs | 20 ++++++++++++++------ crates/worktree/src/worktree.rs | 22 ++++++++++++---------- 5 files changed, 57 insertions(+), 28 deletions(-) diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index b04d88242e40a60a642be232004892e6dc250db1..b03cead5891890327e8793b001ba67803529c40a 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -272,6 +272,16 @@ impl Database { update: &proto::UpdateWorktree, connection: ConnectionId, ) -> Result>> { + if update.removed_entries.len() > proto::MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE + || update.updated_entries.len() > proto::MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE + { + return Err(anyhow!( + "invalid worktree update. removed entries: {}, updated entries: {}", + update.removed_entries.len(), + update.updated_entries.len() + ))?; + } + let project_id = ProjectId::from_proto(update.project_id); let worktree_id = update.worktree_id as i64; self.project_transaction(project_id, |tx| async move { diff --git a/crates/collab/src/main.rs b/crates/collab/src/main.rs index ee95b6d41f53500f1d3288efba03292e4d505fec..9e5c3dd048502194102a8023151058cc97af9f64 100644 --- a/crates/collab/src/main.rs +++ b/crates/collab/src/main.rs @@ -84,6 +84,8 @@ async fn main() -> Result<()> { let config = envy::from_env::().expect("error loading config"); init_tracing(&config); + init_panic_hook(); + let mut app = Router::new() .route("/", get(handle_root)) .route("/healthz", get(handle_liveness_probe)) @@ -378,3 +380,20 @@ pub fn init_tracing(config: &Config) -> Option<()> { None } + +fn init_panic_hook() { + std::panic::set_hook(Box::new(move |panic_info| { + let panic_message = match panic_info.payload().downcast_ref::<&'static str>() { + Some(message) => *message, + None => match panic_info.payload().downcast_ref::() { + Some(message) => message.as_str(), + None => "Box", + }, + }; + let backtrace = std::backtrace::Backtrace::force_capture(); + let location = panic_info + .location() + .map(|loc| format!("{}:{}", loc.file(), loc.line())); + tracing::error!(panic = true, ?location, %panic_message, %backtrace, "Server Panic"); + })); +} diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index e805893567b907fc6090c3fc46bbe57bcb6756c4..c162129db6d2a6965533495e9ac66e2b5a4e2071 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -1713,11 +1713,6 @@ fn notify_rejoined_projects( for project in rejoined_projects { for worktree in mem::take(&mut project.worktrees) { - #[cfg(any(test, feature = "test-support"))] - const MAX_CHUNK_SIZE: usize = 2; - #[cfg(not(any(test, feature = "test-support")))] - const MAX_CHUNK_SIZE: usize = 256; - // Stream this worktree's entries. let message = proto::UpdateWorktree { project_id: project.id.to_proto(), @@ -1731,7 +1726,7 @@ fn notify_rejoined_projects( updated_repositories: worktree.updated_repositories, removed_repositories: worktree.removed_repositories, }; - for update in proto::split_worktree_update(message, MAX_CHUNK_SIZE) { + for update in proto::split_worktree_update(message) { session.peer.send(session.connection_id, update.clone())?; } @@ -2195,11 +2190,6 @@ fn join_project_internal( })?; for (worktree_id, worktree) in mem::take(&mut project.worktrees) { - #[cfg(any(test, feature = "test-support"))] - const MAX_CHUNK_SIZE: usize = 2; - #[cfg(not(any(test, feature = "test-support")))] - const MAX_CHUNK_SIZE: usize = 256; - // Stream this worktree's entries. let message = proto::UpdateWorktree { project_id: project_id.to_proto(), @@ -2213,7 +2203,7 @@ fn join_project_internal( updated_repositories: worktree.repository_entries.into_values().collect(), removed_repositories: Default::default(), }; - for update in proto::split_worktree_update(message, MAX_CHUNK_SIZE) { + for update in proto::split_worktree_update(message) { session.peer.send(session.connection_id, update.clone())?; } diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index d60cd3cd87e4f1700a425b1cce8496187ddd604c..85d9f572ee5ad4646218b88cd552b8889cae3a59 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -630,10 +630,12 @@ impl From for u128 { } } -pub fn split_worktree_update( - mut message: UpdateWorktree, - max_chunk_size: usize, -) -> impl Iterator { +#[cfg(any(test, feature = "test-support"))] +pub const MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE: usize = 2; +#[cfg(not(any(test, feature = "test-support")))] +pub const MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE: usize = 256; + +pub fn split_worktree_update(mut message: UpdateWorktree) -> impl Iterator { let mut done_files = false; let mut repository_map = message @@ -647,13 +649,19 @@ pub fn split_worktree_update( return None; } - let updated_entries_chunk_size = cmp::min(message.updated_entries.len(), max_chunk_size); + let updated_entries_chunk_size = cmp::min( + message.updated_entries.len(), + MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE, + ); let updated_entries: Vec<_> = message .updated_entries .drain(..updated_entries_chunk_size) .collect(); - let removed_entries_chunk_size = cmp::min(message.removed_entries.len(), max_chunk_size); + let removed_entries_chunk_size = cmp::min( + message.removed_entries.len(), + MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE, + ); let removed_entries = message .removed_entries .drain(..removed_entries_chunk_size) diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index abf1f744a3e25d2f8210699ab20724982fc53873..722a7b3f0abfd44f2109c81296a4b01c3bf364a4 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -36,7 +36,10 @@ use postage::{ prelude::{Sink as _, Stream as _}, watch, }; -use rpc::{proto, AnyProtoClient}; +use rpc::{ + proto::{self, split_worktree_update}, + AnyProtoClient, +}; pub use settings::WorktreeId; use settings::{Settings, SettingsLocation, SettingsStore}; use smallvec::{smallvec, SmallVec}; @@ -1721,11 +1724,6 @@ impl LocalWorktree { F: 'static + Send + Fn(proto::UpdateWorktree) -> Fut, Fut: Send + Future, { - #[cfg(any(test, feature = "test-support"))] - const MAX_CHUNK_SIZE: usize = 2; - #[cfg(not(any(test, feature = "test-support")))] - const MAX_CHUNK_SIZE: usize = 256; - if let Some(observer) = self.update_observer.as_mut() { *observer.resume_updates.borrow_mut() = (); return; @@ -1751,7 +1749,7 @@ impl LocalWorktree { snapshot.build_update(project_id, worktree_id, entry_changes, repo_changes); } - for update in proto::split_worktree_update(update, MAX_CHUNK_SIZE) { + for update in proto::split_worktree_update(update) { let _ = resume_updates_rx.try_recv(); loop { let result = callback(update.clone()); @@ -1817,13 +1815,17 @@ impl RemoteWorktree { self.update_observer = Some(tx); cx.spawn(|this, mut cx| async move { let mut update = initial_update; - loop { + 'outer: loop { // SSH projects use a special project ID of 0, and we need to // remap it to the correct one here. update.project_id = project_id; - if !callback(update).await { - break; + + for chunk in split_worktree_update(update) { + if !callback(chunk).await { + break 'outer; + } } + if let Some(next_update) = rx.next().await { update = next_update; } else { From b5f816dde52432b48bc7ac8ab9cc15514317877a Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 24 Oct 2024 13:39:06 -0400 Subject: [PATCH 18/76] assistant: Add implementation for `/delta` argument completion (#19693) This PR fixes a panic that could occur when trying to complete arguments for the `/delta` slash command. We were using `unimplemented!()` instead of providing a default no-op implementation like we do for other slash commands that do not support completing arguments. Closes https://github.com/zed-industries/zed/issues/19686. Release Notes: - Fixed a panic that could occur when trying to complete arguments with the `/delta` command. --- crates/assistant/src/slash_command/delta_command.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/assistant/src/slash_command/delta_command.rs b/crates/assistant/src/slash_command/delta_command.rs index a37d33e2af5619c72925f416c9baafa0d2119d63..c9985d9f00e8dd409e248e7638e1459f8c0fbc2a 100644 --- a/crates/assistant/src/slash_command/delta_command.rs +++ b/crates/assistant/src/slash_command/delta_command.rs @@ -1,5 +1,5 @@ use crate::slash_command::file_command::{FileCommandMetadata, FileSlashCommand}; -use anyhow::Result; +use anyhow::{anyhow, Result}; use assistant_slash_command::{ ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, SlashCommandResult, @@ -38,7 +38,7 @@ impl SlashCommand for DeltaSlashCommand { _workspace: Option>, _cx: &mut WindowContext, ) -> Task>> { - unimplemented!() + Task::ready(Err(anyhow!("this command does not require argument"))) } fn run( From 02718284efda71192d2d2d51931f9d295c563557 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 24 Oct 2024 11:14:03 -0700 Subject: [PATCH 19/76] Remove dev servers (#19638) TODO: - [ ] Check that workspace migration worked - [ ] Add server migrations and make sure SeaORM files are in sync (maybe?) Release Notes: - N/A --------- Co-authored-by: Conrad Co-authored-by: Conrad Irwin --- Cargo.lock | 45 - Cargo.toml | 4 - crates/assistant/src/assistant_panel.rs | 2 +- crates/call/src/room.rs | 27 +- crates/cli/src/cli.rs | 1 - crates/cli/src/main.rs | 7 +- crates/client/src/client.rs | 58 +- crates/client/src/test.rs | 4 +- crates/client/src/user.rs | 3 - crates/collab/Cargo.toml | 2 - crates/collab/src/auth.rs | 54 +- crates/collab/src/db.rs | 1 - crates/collab/src/db/ids.rs | 8 - crates/collab/src/db/queries.rs | 2 - .../src/db/queries/dev_server_projects.rs | 364 ----- crates/collab/src/db/queries/dev_servers.rs | 221 --- crates/collab/src/db/queries/projects.rs | 170 +-- crates/collab/src/db/queries/rooms.rs | 54 +- crates/collab/src/db/tables.rs | 2 - crates/collab/src/db/tables/dev_server.rs | 39 - .../src/db/tables/dev_server_project.rs | 59 - crates/collab/src/db/tables/project.rs | 15 +- crates/collab/src/db/tests/db_tests.rs | 6 +- crates/collab/src/rpc.rs | 1279 +++-------------- crates/collab/src/rpc/connection_pool.rs | 127 +- crates/collab/src/tests.rs | 1 - crates/collab/src/tests/dev_server_tests.rs | 643 --------- crates/collab/src/tests/test_server.rs | 135 +- crates/dev_server_projects/Cargo.toml | 23 - crates/dev_server_projects/LICENSE-GPL | 1 - .../src/dev_server_projects.rs | 248 ---- crates/headless/Cargo.toml | 37 - crates/headless/LICENSE-GPL | 1 - crates/headless/src/headless.rs | 397 ----- crates/project/Cargo.toml | 1 - crates/project/src/project.rs | 110 +- crates/project/src/terminals.rs | 28 +- crates/project/src/worktree_store.rs | 63 +- crates/project_panel/src/project_panel.rs | 13 +- crates/proto/proto/zed.proto | 141 +- crates/proto/src/proto.rs | 30 - crates/recent_projects/Cargo.toml | 4 - .../src/disconnected_overlay.rs | 74 +- crates/recent_projects/src/recent_projects.rs | 248 +--- crates/recent_projects/src/remote_servers.rs | 158 -- crates/title_bar/Cargo.toml | 1 - crates/title_bar/src/collab.rs | 5 +- crates/title_bar/src/title_bar.rs | 35 +- crates/workspace/Cargo.toml | 1 - crates/workspace/src/persistence.rs | 171 +-- crates/workspace/src/persistence/model.rs | 48 - crates/workspace/src/workspace.rs | 105 +- crates/zed/Cargo.toml | 2 - crates/zed/src/main.rs | 113 +- crates/zed/src/zed/open_listener.rs | 36 +- 55 files changed, 397 insertions(+), 5030 deletions(-) delete mode 100644 crates/collab/src/db/tables/dev_server.rs delete mode 100644 crates/collab/src/db/tables/dev_server_project.rs delete mode 100644 crates/collab/src/tests/dev_server_tests.rs delete mode 100644 crates/dev_server_projects/Cargo.toml delete mode 120000 crates/dev_server_projects/LICENSE-GPL delete mode 100644 crates/headless/Cargo.toml delete mode 120000 crates/headless/LICENSE-GPL delete mode 100644 crates/headless/src/headless.rs diff --git a/Cargo.lock b/Cargo.lock index 4e86627d803856f1230d66f29e5e2c3de0ef9246..f4e84f7a030eefc3375d305438b695a43c09dd6e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2550,7 +2550,6 @@ dependencies = [ "ctor", "dashmap 6.0.1", "derive_more", - "dev_server_projects", "editor", "env_logger", "envy", @@ -2561,7 +2560,6 @@ dependencies = [ "git_hosting_providers", "google_ai", "gpui", - "headless", "hex", "http_client", "hyper 0.14.30", @@ -3476,18 +3474,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "dev_server_projects" -version = "0.1.0" -dependencies = [ - "anyhow", - "client", - "gpui", - "rpc", - "serde", - "serde_json", -] - [[package]] name = "diagnostics" version = "0.1.0" @@ -5274,28 +5260,6 @@ dependencies = [ "http 0.2.12", ] -[[package]] -name = "headless" -version = "0.1.0" -dependencies = [ - "anyhow", - "client", - "extension", - "fs", - "futures 0.3.30", - "gpui", - "language", - "log", - "node_runtime", - "postage", - "project", - "proto", - "settings", - "shellexpand 2.1.2", - "signal-hook", - "util", -] - [[package]] name = "heck" version = "0.3.3" @@ -8443,7 +8407,6 @@ dependencies = [ "client", "clock", "collections", - "dev_server_projects", "env_logger", "fs", "futures 0.3.30", @@ -8981,8 +8944,6 @@ version = "0.1.0" dependencies = [ "anyhow", "auto_update", - "client", - "dev_server_projects", "editor", "file_finder", "futures 0.3.30", @@ -8999,14 +8960,12 @@ dependencies = [ "project", "release_channel", "remote", - "rpc", "schemars", "serde", "serde_json", "settings", "smol", "task", - "terminal_view", "theme", "ui", "util", @@ -11912,7 +11871,6 @@ dependencies = [ "client", "collections", "command_palette", - "dev_server_projects", "editor", "extensions_ui", "feature_flags", @@ -14309,7 +14267,6 @@ dependencies = [ "collections", "db", "derive_more", - "dev_server_projects", "env_logger", "fs", "futures 0.3.30", @@ -14628,7 +14585,6 @@ dependencies = [ "command_palette_hooks", "copilot", "db", - "dev_server_projects", "diagnostics", "editor", "env_logger", @@ -14644,7 +14600,6 @@ dependencies = [ "git_hosting_providers", "go_to_line", "gpui", - "headless", "http_client", "image_viewer", "inline_completion_button", diff --git a/Cargo.toml b/Cargo.toml index a64be70661041aa7c8f96c894c1de40f871de91a..732306a9aff4a3d9636863c599a68f5f1c456ade 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -23,7 +23,6 @@ members = [ "crates/context_servers", "crates/copilot", "crates/db", - "crates/dev_server_projects", "crates/diagnostics", "crates/docs_preprocessor", "crates/editor", @@ -45,7 +44,6 @@ members = [ "crates/google_ai", "crates/gpui", "crates/gpui_macros", - "crates/headless", "crates/html_to_markdown", "crates/http_client", "crates/image_viewer", @@ -201,7 +199,6 @@ command_palette_hooks = { path = "crates/command_palette_hooks" } context_servers = { path = "crates/context_servers" } copilot = { path = "crates/copilot" } db = { path = "crates/db" } -dev_server_projects = { path = "crates/dev_server_projects" } diagnostics = { path = "crates/diagnostics" } editor = { path = "crates/editor" } extension = { path = "crates/extension" } @@ -219,7 +216,6 @@ go_to_line = { path = "crates/go_to_line" } google_ai = { path = "crates/google_ai" } gpui = { path = "crates/gpui", default-features = false, features = ["http_client"]} gpui_macros = { path = "crates/gpui_macros" } -headless = { path = "crates/headless" } html_to_markdown = { path = "crates/html_to_markdown" } http_client = { path = "crates/http_client" } image_viewer = { path = "crates/image_viewer" } diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index f9a277f8277d770d6481e08c5b5d8c621db49cea..d4da36adedeca2cd5ae9f57cbc4158e8ce8ac486 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -963,7 +963,7 @@ impl AssistantPanel { fn new_context(&mut self, cx: &mut ViewContext) -> Option> { let project = self.project.read(cx); - if project.is_via_collab() && project.dev_server_project_id().is_none() { + if project.is_via_collab() { let task = self .context_store .update(cx, |store, cx| store.create_remote_context(cx)); diff --git a/crates/call/src/room.rs b/crates/call/src/room.rs index a637bfd43fdf65a64948254ac05f92e5d3cefc10..3eb98f3109ff49576e10a1cdda38c37c64df6d14 100644 --- a/crates/call/src/room.rs +++ b/crates/call/src/room.rs @@ -1194,26 +1194,15 @@ impl Room { project: Model, cx: &mut ModelContext, ) -> Task> { - let request = if let Some(dev_server_project_id) = project.read(cx).dev_server_project_id() - { - self.client.request(proto::ShareProject { - room_id: self.id(), - worktrees: vec![], - dev_server_project_id: Some(dev_server_project_id.0), - is_ssh_project: false, - }) - } else { - if let Some(project_id) = project.read(cx).remote_id() { - return Task::ready(Ok(project_id)); - } + if let Some(project_id) = project.read(cx).remote_id() { + return Task::ready(Ok(project_id)); + } - self.client.request(proto::ShareProject { - room_id: self.id(), - worktrees: project.read(cx).worktree_metadata_protos(cx), - dev_server_project_id: None, - is_ssh_project: project.read(cx).is_via_ssh(), - }) - }; + let request = self.client.request(proto::ShareProject { + room_id: self.id(), + worktrees: project.read(cx).worktree_metadata_protos(cx), + is_ssh_project: project.read(cx).is_via_ssh(), + }); cx.spawn(|this, mut cx| async move { let response = request.await?; diff --git a/crates/cli/src/cli.rs b/crates/cli/src/cli.rs index 8e76ae759c66bf51a9529fcf14a5c44fba570b9a..9d23cf7ad5b0308a123e31cd081ec1d7beb45334 100644 --- a/crates/cli/src/cli.rs +++ b/crates/cli/src/cli.rs @@ -15,7 +15,6 @@ pub enum CliRequest { urls: Vec, wait: bool, open_new_workspace: Option, - dev_server_token: Option, env: Option>, }, } diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index e69183d1ea98f7c6de8eb227efcc0a6ea7c30123..cb457b8a9d6ccd7c739ea56e6bbfe741ddb44edf 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -151,6 +151,12 @@ fn main() -> Result<()> { } } + if let Some(_) = args.dev_server_token { + return Err(anyhow::anyhow!( + "Dev servers were removed in v0.157.x please upgrade to SSH remoting: https://zed.dev/docs/remote-development" + ))?; + } + let sender: JoinHandle> = thread::spawn({ let exit_status = exit_status.clone(); move || { @@ -162,7 +168,6 @@ fn main() -> Result<()> { urls, wait: args.wait, open_new_workspace, - dev_server_token: args.dev_server_token, env, })?; diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 4a42554ebfda8c09b438c449528bb95004edffaf..b145ef99d00f76ddbe92143caad3570e9a49ad0b 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -30,7 +30,6 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; use socks::connect_socks_proxy_stream; -use std::fmt; use std::pin::Pin; use std::{ any::TypeId, @@ -54,15 +53,6 @@ pub use rpc::*; pub use telemetry_events::Event; pub use user::*; -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct DevServerToken(pub String); - -impl fmt::Display for DevServerToken { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.0) - } -} - static ZED_SERVER_URL: LazyLock> = LazyLock::new(|| std::env::var("ZED_SERVER_URL").ok()); static ZED_RPC_URL: LazyLock> = LazyLock::new(|| std::env::var("ZED_RPC_URL").ok()); @@ -304,20 +294,14 @@ struct ClientState { } #[derive(Clone, Debug, Eq, PartialEq)] -pub enum Credentials { - DevServer { token: DevServerToken }, - User { user_id: u64, access_token: String }, +pub struct Credentials { + pub user_id: u64, + pub access_token: String, } impl Credentials { pub fn authorization_header(&self) -> String { - match self { - Credentials::DevServer { token } => format!("dev-server-token {}", token), - Credentials::User { - user_id, - access_token, - } => format!("{} {}", user_id, access_token), - } + format!("{} {}", self.user_id, self.access_token) } } @@ -600,11 +584,11 @@ impl Client { } pub fn user_id(&self) -> Option { - if let Some(Credentials::User { user_id, .. }) = self.state.read().credentials.as_ref() { - Some(*user_id) - } else { - None - } + self.state + .read() + .credentials + .as_ref() + .map(|credentials| credentials.user_id) } pub fn peer_id(&self) -> Option { @@ -793,11 +777,6 @@ impl Client { .is_some() } - pub fn set_dev_server_token(&self, token: DevServerToken) -> &Self { - self.state.write().credentials = Some(Credentials::DevServer { token }); - self - } - #[async_recursion(?Send)] pub async fn authenticate_and_connect( self: &Arc, @@ -848,9 +827,7 @@ impl Client { } } let credentials = credentials.unwrap(); - if let Credentials::User { user_id, .. } = &credentials { - self.set_id(*user_id); - } + self.set_id(credentials.user_id); if was_disconnected { self.set_status(Status::Connecting, cx); @@ -866,9 +843,8 @@ impl Client { Ok(conn) => { self.state.write().credentials = Some(credentials.clone()); if !read_from_provider && IMPERSONATE_LOGIN.is_none() { - if let Credentials::User{user_id, access_token} = credentials { - self.credentials_provider.write_credentials(user_id, access_token, cx).await.log_err(); - } + self.credentials_provider.write_credentials(credentials.user_id, credentials.access_token, cx).await.log_err(); + } futures::select_biased! { @@ -1301,7 +1277,7 @@ impl Client { .decrypt_string(&access_token) .context("failed to decrypt access token")?; - Ok(Credentials::User { + Ok(Credentials { user_id: user_id.parse()?, access_token, }) @@ -1422,7 +1398,7 @@ impl Client { // Use the admin API token to authenticate as the impersonated user. api_token.insert_str(0, "ADMIN_TOKEN:"); - Ok(Credentials::User { + Ok(Credentials { user_id: response.user.id, access_token: api_token, }) @@ -1667,7 +1643,7 @@ impl CredentialsProvider for DevelopmentCredentialsProvider { let credentials: DevelopmentCredentials = serde_json::from_slice(&json).log_err()?; - Some(Credentials::User { + Some(Credentials { user_id: credentials.user_id, access_token: credentials.access_token, }) @@ -1721,7 +1697,7 @@ impl CredentialsProvider for KeychainCredentialsProvider { .await .log_err()??; - Some(Credentials::User { + Some(Credentials { user_id: user_id.parse().ok()?, access_token: String::from_utf8(access_token).ok()?, }) @@ -1855,7 +1831,7 @@ mod tests { // Time out when client tries to connect. client.override_authenticate(move |cx| { cx.background_executor().spawn(async move { - Ok(Credentials::User { + Ok(Credentials { user_id, access_token: "token".into(), }) diff --git a/crates/client/src/test.rs b/crates/client/src/test.rs index bc39661e2901b311fde802ca57484e4ca2b8dfc8..5a93c5edd984c80661cd23ef0ffc37078b1c13a2 100644 --- a/crates/client/src/test.rs +++ b/crates/client/src/test.rs @@ -49,7 +49,7 @@ impl FakeServer { let mut state = state.lock(); state.auth_count += 1; let access_token = state.access_token.to_string(); - Ok(Credentials::User { + Ok(Credentials { user_id: client_user_id, access_token, }) @@ -73,7 +73,7 @@ impl FakeServer { } if credentials - != (Credentials::User { + != (Credentials { user_id: client_user_id, access_token: state.lock().access_token.to_string(), }) diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index a312dd349507f72443223eaae91296271409b3c1..f6ee279dc832206cc913bf6c1b71677956848eaa 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -28,9 +28,6 @@ impl std::fmt::Display for ChannelId { #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] pub struct ProjectId(pub u64); -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] -pub struct DevServerId(pub u64); - #[derive( Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize, )] diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index ad2c013668193602de8eb6c9b2f156053b89dfa5..417353e39ded1b71e8d2e80d419a19fe539ec1a3 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -86,7 +86,6 @@ client = { workspace = true, features = ["test-support"] } collab_ui = { workspace = true, features = ["test-support"] } collections = { workspace = true, features = ["test-support"] } ctor.workspace = true -dev_server_projects.workspace = true editor = { workspace = true, features = ["test-support"] } env_logger.workspace = true file_finder.workspace = true @@ -94,7 +93,6 @@ fs = { workspace = true, features = ["test-support"] } git = { workspace = true, features = ["test-support"] } git_hosting_providers.workspace = true gpui = { workspace = true, features = ["test-support"] } -headless.workspace = true hyper.workspace = true indoc.workspace = true language = { workspace = true, features = ["test-support"] } diff --git a/crates/collab/src/auth.rs b/crates/collab/src/auth.rs index 13c1c75fdeb1a9164c252bd5cdc6e759cc5ee7e0..1d7edd8172c6246d6044092ccf00b6358fb04a0a 100644 --- a/crates/collab/src/auth.rs +++ b/crates/collab/src/auth.rs @@ -1,5 +1,5 @@ use crate::{ - db::{self, dev_server, AccessTokenId, Database, DevServerId, UserId}, + db::{self, AccessTokenId, Database, UserId}, rpc::Principal, AppState, Error, Result, }; @@ -44,19 +44,10 @@ pub async fn validate_header(mut req: Request, next: Next) -> impl Into let first = auth_header.next().unwrap_or(""); if first == "dev-server-token" { - let dev_server_token = auth_header.next().ok_or_else(|| { - Error::http( - StatusCode::BAD_REQUEST, - "missing dev-server-token token in authorization header".to_string(), - ) - })?; - let dev_server = verify_dev_server_token(dev_server_token, &state.db) - .await - .map_err(|e| Error::http(StatusCode::UNAUTHORIZED, format!("{}", e)))?; - - req.extensions_mut() - .insert(Principal::DevServer(dev_server)); - return Ok::<_, Error>(next.run(req).await); + Err(Error::http( + StatusCode::UNAUTHORIZED, + "Dev servers were removed in Zed 0.157 please upgrade to SSH remoting".to_string(), + ))?; } let user_id = UserId(first.parse().map_err(|_| { @@ -240,41 +231,6 @@ pub async fn verify_access_token( }) } -pub fn generate_dev_server_token(id: usize, access_token: String) -> String { - format!("{}.{}", id, access_token) -} - -pub async fn verify_dev_server_token( - dev_server_token: &str, - db: &Arc, -) -> anyhow::Result { - let (id, token) = split_dev_server_token(dev_server_token)?; - let token_hash = hash_access_token(token); - let server = db.get_dev_server(id).await?; - - if server - .hashed_token - .as_bytes() - .ct_eq(token_hash.as_ref()) - .into() - { - Ok(server) - } else { - Err(anyhow!("wrong token for dev server")) - } -} - -// a dev_server_token has the format .. This is to make them -// relatively easy to copy/paste around. -pub fn split_dev_server_token(dev_server_token: &str) -> anyhow::Result<(DevServerId, &str)> { - let mut parts = dev_server_token.splitn(2, '.'); - let id = DevServerId(parts.next().unwrap_or_default().parse()?); - let token = parts - .next() - .ok_or_else(|| anyhow!("invalid dev server token format"))?; - Ok((id, token)) -} - #[cfg(test)] mod test { use rand::thread_rng; diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index e9665484931434ca379027582f24337d0a212c83..9c02e0c801c826ef160d91448aa87389488e122b 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -726,7 +726,6 @@ pub struct Project { pub collaborators: Vec, pub worktrees: BTreeMap, pub language_servers: Vec, - pub dev_server_project_id: Option, } pub struct ProjectCollaborator { diff --git a/crates/collab/src/db/ids.rs b/crates/collab/src/db/ids.rs index 3a5bcff558db838d57d3be558f374e45ac2201f0..698b1c5693337e4090d6c111988b41d596a3d23a 100644 --- a/crates/collab/src/db/ids.rs +++ b/crates/collab/src/db/ids.rs @@ -79,7 +79,6 @@ id_type!(ChannelChatParticipantId); id_type!(ChannelId); id_type!(ChannelMemberId); id_type!(ContactId); -id_type!(DevServerId); id_type!(ExtensionId); id_type!(FlagId); id_type!(FollowerId); @@ -89,7 +88,6 @@ id_type!(NotificationId); id_type!(NotificationKindId); id_type!(ProjectCollaboratorId); id_type!(ProjectId); -id_type!(DevServerProjectId); id_type!(ReplicaId); id_type!(RoomId); id_type!(RoomParticipantId); @@ -277,12 +275,6 @@ impl From for i32 { } } -#[derive(Copy, Clone, Debug, Serialize, PartialEq)] -pub enum PrincipalId { - UserId(UserId), - DevServerId(DevServerId), -} - /// Indicate whether a [Buffer] has permissions to edit. #[derive(PartialEq, Clone, Copy, Debug)] pub enum Capability { diff --git a/crates/collab/src/db/queries.rs b/crates/collab/src/db/queries.rs index 9c277790f9f084ff411c36c946c5b8adddf33766..79523444ab276063c4c73a76f72e76cc3605c315 100644 --- a/crates/collab/src/db/queries.rs +++ b/crates/collab/src/db/queries.rs @@ -8,8 +8,6 @@ pub mod buffers; pub mod channels; pub mod contacts; pub mod contributors; -pub mod dev_server_projects; -pub mod dev_servers; pub mod embeddings; pub mod extensions; pub mod hosted_projects; diff --git a/crates/collab/src/db/queries/dev_server_projects.rs b/crates/collab/src/db/queries/dev_server_projects.rs index dae550b66801e6ed2d05dff4b3e5904eca11f9e2..8b137891791fe96927ad78e64b0aad7bded08bdc 100644 --- a/crates/collab/src/db/queries/dev_server_projects.rs +++ b/crates/collab/src/db/queries/dev_server_projects.rs @@ -1,365 +1 @@ -use anyhow::anyhow; -use rpc::{ - proto::{self}, - ConnectionId, -}; -use sea_orm::{ - ActiveModelTrait, ActiveValue, ColumnTrait, Condition, DatabaseTransaction, EntityTrait, - IntoActiveModel, ModelTrait, QueryFilter, -}; -use crate::db::ProjectId; - -use super::{ - dev_server, dev_server_project, project, project_collaborator, worktree, Database, DevServerId, - DevServerProjectId, RejoinedProject, ResharedProject, ServerId, UserId, -}; - -impl Database { - pub async fn get_dev_server_project( - &self, - dev_server_project_id: DevServerProjectId, - ) -> crate::Result { - self.transaction(|tx| async move { - Ok( - dev_server_project::Entity::find_by_id(dev_server_project_id) - .one(&*tx) - .await? - .ok_or_else(|| { - anyhow!("no dev server project with id {}", dev_server_project_id) - })?, - ) - }) - .await - } - - pub async fn get_projects_for_dev_server( - &self, - dev_server_id: DevServerId, - ) -> crate::Result> { - self.transaction(|tx| async move { - self.get_projects_for_dev_server_internal(dev_server_id, &tx) - .await - }) - .await - } - - pub async fn get_projects_for_dev_server_internal( - &self, - dev_server_id: DevServerId, - tx: &DatabaseTransaction, - ) -> crate::Result> { - let servers = dev_server_project::Entity::find() - .filter(dev_server_project::Column::DevServerId.eq(dev_server_id)) - .find_also_related(project::Entity) - .all(tx) - .await?; - Ok(servers - .into_iter() - .map(|(dev_server_project, project)| dev_server_project.to_proto(project)) - .collect()) - } - - pub async fn dev_server_project_ids_for_user( - &self, - user_id: UserId, - tx: &DatabaseTransaction, - ) -> crate::Result> { - let dev_servers = dev_server::Entity::find() - .filter(dev_server::Column::UserId.eq(user_id)) - .find_with_related(dev_server_project::Entity) - .all(tx) - .await?; - - Ok(dev_servers - .into_iter() - .flat_map(|(_, projects)| projects.into_iter().map(|p| p.id)) - .collect()) - } - - pub async fn owner_for_dev_server_project( - &self, - dev_server_project_id: DevServerProjectId, - tx: &DatabaseTransaction, - ) -> crate::Result { - let dev_server = dev_server_project::Entity::find_by_id(dev_server_project_id) - .find_also_related(dev_server::Entity) - .one(tx) - .await? - .and_then(|(_, dev_server)| dev_server) - .ok_or_else(|| anyhow!("no dev server project"))?; - - Ok(dev_server.user_id) - } - - pub async fn get_stale_dev_server_projects( - &self, - connection: ConnectionId, - ) -> crate::Result> { - self.transaction(|tx| async move { - let projects = project::Entity::find() - .filter( - Condition::all() - .add(project::Column::HostConnectionId.eq(connection.id)) - .add(project::Column::HostConnectionServerId.eq(connection.owner_id)), - ) - .all(&*tx) - .await?; - - Ok(projects.into_iter().map(|p| p.id).collect()) - }) - .await - } - - pub async fn create_dev_server_project( - &self, - dev_server_id: DevServerId, - path: &str, - user_id: UserId, - ) -> crate::Result<(dev_server_project::Model, proto::DevServerProjectsUpdate)> { - self.transaction(|tx| async move { - let dev_server = dev_server::Entity::find_by_id(dev_server_id) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?; - if dev_server.user_id != user_id { - return Err(anyhow!("not your dev server"))?; - } - - let project = dev_server_project::Entity::insert(dev_server_project::ActiveModel { - id: ActiveValue::NotSet, - dev_server_id: ActiveValue::Set(dev_server_id), - paths: ActiveValue::Set(dev_server_project::JSONPaths(vec![path.to_string()])), - }) - .exec_with_returning(&*tx) - .await?; - - let status = self - .dev_server_projects_update_internal(user_id, &tx) - .await?; - - Ok((project, status)) - }) - .await - } - - pub async fn update_dev_server_project( - &self, - id: DevServerProjectId, - paths: &[String], - user_id: UserId, - ) -> crate::Result<(dev_server_project::Model, proto::DevServerProjectsUpdate)> { - self.transaction(move |tx| async move { - let paths = paths.to_owned(); - let Some((project, Some(dev_server))) = dev_server_project::Entity::find_by_id(id) - .find_also_related(dev_server::Entity) - .one(&*tx) - .await? - else { - return Err(anyhow!("no such dev server project"))?; - }; - - if dev_server.user_id != user_id { - return Err(anyhow!("not your dev server"))?; - } - let mut project = project.into_active_model(); - project.paths = ActiveValue::Set(dev_server_project::JSONPaths(paths)); - let project = project.update(&*tx).await?; - - let status = self - .dev_server_projects_update_internal(user_id, &tx) - .await?; - - Ok((project, status)) - }) - .await - } - - pub async fn delete_dev_server_project( - &self, - dev_server_project_id: DevServerProjectId, - dev_server_id: DevServerId, - user_id: UserId, - ) -> crate::Result<(Vec, proto::DevServerProjectsUpdate)> { - self.transaction(|tx| async move { - project::Entity::delete_many() - .filter(project::Column::DevServerProjectId.eq(dev_server_project_id)) - .exec(&*tx) - .await?; - let result = dev_server_project::Entity::delete_by_id(dev_server_project_id) - .exec(&*tx) - .await?; - if result.rows_affected != 1 { - return Err(anyhow!( - "no dev server project with id {}", - dev_server_project_id - ))?; - } - - let status = self - .dev_server_projects_update_internal(user_id, &tx) - .await?; - - let projects = self - .get_projects_for_dev_server_internal(dev_server_id, &tx) - .await?; - Ok((projects, status)) - }) - .await - } - - pub async fn share_dev_server_project( - &self, - dev_server_project_id: DevServerProjectId, - dev_server_id: DevServerId, - connection: ConnectionId, - worktrees: &[proto::WorktreeMetadata], - ) -> crate::Result<( - proto::DevServerProject, - UserId, - proto::DevServerProjectsUpdate, - )> { - self.transaction(|tx| async move { - let dev_server = dev_server::Entity::find_by_id(dev_server_id) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?; - - let dev_server_project = dev_server_project::Entity::find_by_id(dev_server_project_id) - .one(&*tx) - .await? - .ok_or_else(|| { - anyhow!("no dev server project with id {}", dev_server_project_id) - })?; - - if dev_server_project.dev_server_id != dev_server_id { - return Err(anyhow!("dev server project shared from wrong server"))?; - } - - let project = project::ActiveModel { - room_id: ActiveValue::Set(None), - host_user_id: ActiveValue::Set(None), - host_connection_id: ActiveValue::set(Some(connection.id as i32)), - host_connection_server_id: ActiveValue::set(Some(ServerId( - connection.owner_id as i32, - ))), - id: ActiveValue::NotSet, - hosted_project_id: ActiveValue::Set(None), - dev_server_project_id: ActiveValue::Set(Some(dev_server_project_id)), - } - .insert(&*tx) - .await?; - - if !worktrees.is_empty() { - worktree::Entity::insert_many(worktrees.iter().map(|worktree| { - worktree::ActiveModel { - id: ActiveValue::set(worktree.id as i64), - project_id: ActiveValue::set(project.id), - abs_path: ActiveValue::set(worktree.abs_path.clone()), - root_name: ActiveValue::set(worktree.root_name.clone()), - visible: ActiveValue::set(worktree.visible), - scan_id: ActiveValue::set(0), - completed_scan_id: ActiveValue::set(0), - } - })) - .exec(&*tx) - .await?; - } - - let status = self - .dev_server_projects_update_internal(dev_server.user_id, &tx) - .await?; - - Ok(( - dev_server_project.to_proto(Some(project)), - dev_server.user_id, - status, - )) - }) - .await - } - - pub async fn reshare_dev_server_projects( - &self, - reshared_projects: &Vec, - dev_server_id: DevServerId, - connection: ConnectionId, - ) -> crate::Result> { - self.transaction(|tx| async move { - let mut ret = Vec::new(); - for reshared_project in reshared_projects { - let project_id = ProjectId::from_proto(reshared_project.project_id); - let (project, dev_server_project) = project::Entity::find_by_id(project_id) - .find_also_related(dev_server_project::Entity) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("project does not exist"))?; - - if dev_server_project.map(|rp| rp.dev_server_id) != Some(dev_server_id) { - return Err(anyhow!("dev server project reshared from wrong server"))?; - } - - let Ok(old_connection_id) = project.host_connection() else { - return Err(anyhow!("dev server project was not shared"))?; - }; - - project::Entity::update(project::ActiveModel { - id: ActiveValue::set(project_id), - host_connection_id: ActiveValue::set(Some(connection.id as i32)), - host_connection_server_id: ActiveValue::set(Some(ServerId( - connection.owner_id as i32, - ))), - ..Default::default() - }) - .exec(&*tx) - .await?; - - let collaborators = project - .find_related(project_collaborator::Entity) - .all(&*tx) - .await?; - - self.update_project_worktrees(project_id, &reshared_project.worktrees, &tx) - .await?; - - ret.push(super::ResharedProject { - id: project_id, - old_connection_id, - collaborators: collaborators - .iter() - .map(|collaborator| super::ProjectCollaborator { - connection_id: collaborator.connection(), - user_id: collaborator.user_id, - replica_id: collaborator.replica_id, - is_host: collaborator.is_host, - }) - .collect(), - worktrees: reshared_project.worktrees.clone(), - }); - } - Ok(ret) - }) - .await - } - - pub async fn rejoin_dev_server_projects( - &self, - rejoined_projects: &Vec, - user_id: UserId, - connection_id: ConnectionId, - ) -> crate::Result> { - self.transaction(|tx| async move { - let mut ret = Vec::new(); - for rejoined_project in rejoined_projects { - if let Some(project) = self - .rejoin_project_internal(&tx, rejoined_project, user_id, connection_id) - .await? - { - ret.push(project); - } - } - Ok(ret) - }) - .await - } -} diff --git a/crates/collab/src/db/queries/dev_servers.rs b/crates/collab/src/db/queries/dev_servers.rs index 16cbfedee33e504130f77195012af976bf3d7435..8b137891791fe96927ad78e64b0aad7bded08bdc 100644 --- a/crates/collab/src/db/queries/dev_servers.rs +++ b/crates/collab/src/db/queries/dev_servers.rs @@ -1,222 +1 @@ -use rpc::proto; -use sea_orm::{ - ActiveValue, ColumnTrait, DatabaseTransaction, EntityTrait, IntoActiveModel, QueryFilter, -}; -use super::{dev_server, dev_server_project, Database, DevServerId, UserId}; - -impl Database { - pub async fn get_dev_server( - &self, - dev_server_id: DevServerId, - ) -> crate::Result { - self.transaction(|tx| async move { - Ok(dev_server::Entity::find_by_id(dev_server_id) - .one(&*tx) - .await? - .ok_or_else(|| anyhow::anyhow!("no dev server with id {}", dev_server_id))?) - }) - .await - } - - pub async fn get_dev_server_for_user( - &self, - dev_server_id: DevServerId, - user_id: UserId, - ) -> crate::Result { - self.transaction(|tx| async move { - let server = dev_server::Entity::find_by_id(dev_server_id) - .one(&*tx) - .await? - .ok_or_else(|| anyhow::anyhow!("no dev server with id {}", dev_server_id))?; - if server.user_id != user_id { - return Err(anyhow::anyhow!( - "dev server {} is not owned by user {}", - dev_server_id, - user_id - ))?; - } - Ok(server) - }) - .await - } - - pub async fn get_dev_servers(&self, user_id: UserId) -> crate::Result> { - self.transaction(|tx| async move { - Ok(dev_server::Entity::find() - .filter(dev_server::Column::UserId.eq(user_id)) - .all(&*tx) - .await?) - }) - .await - } - - pub async fn dev_server_projects_update( - &self, - user_id: UserId, - ) -> crate::Result { - self.transaction(|tx| async move { - self.dev_server_projects_update_internal(user_id, &tx).await - }) - .await - } - - pub async fn dev_server_projects_update_internal( - &self, - user_id: UserId, - tx: &DatabaseTransaction, - ) -> crate::Result { - let dev_servers = dev_server::Entity::find() - .filter(dev_server::Column::UserId.eq(user_id)) - .all(tx) - .await?; - - let dev_server_projects = dev_server_project::Entity::find() - .filter( - dev_server_project::Column::DevServerId - .is_in(dev_servers.iter().map(|d| d.id).collect::>()), - ) - .find_also_related(super::project::Entity) - .all(tx) - .await?; - - Ok(proto::DevServerProjectsUpdate { - dev_servers: dev_servers - .into_iter() - .map(|d| d.to_proto(proto::DevServerStatus::Offline)) - .collect(), - dev_server_projects: dev_server_projects - .into_iter() - .map(|(dev_server_project, project)| dev_server_project.to_proto(project)) - .collect(), - }) - } - - pub async fn create_dev_server( - &self, - name: &str, - ssh_connection_string: Option<&str>, - hashed_access_token: &str, - user_id: UserId, - ) -> crate::Result<(dev_server::Model, proto::DevServerProjectsUpdate)> { - self.transaction(|tx| async move { - if name.trim().is_empty() { - return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?; - } - - let dev_server = dev_server::Entity::insert(dev_server::ActiveModel { - id: ActiveValue::NotSet, - hashed_token: ActiveValue::Set(hashed_access_token.to_string()), - name: ActiveValue::Set(name.trim().to_string()), - user_id: ActiveValue::Set(user_id), - ssh_connection_string: ActiveValue::Set( - ssh_connection_string.map(ToOwned::to_owned), - ), - }) - .exec_with_returning(&*tx) - .await?; - - let dev_server_projects = self - .dev_server_projects_update_internal(user_id, &tx) - .await?; - - Ok((dev_server, dev_server_projects)) - }) - .await - } - - pub async fn update_dev_server_token( - &self, - id: DevServerId, - hashed_token: &str, - user_id: UserId, - ) -> crate::Result { - self.transaction(|tx| async move { - let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else { - return Err(anyhow::anyhow!("no dev server with id {}", id))?; - }; - if dev_server.user_id != user_id { - return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?; - } - - dev_server::Entity::update(dev_server::ActiveModel { - hashed_token: ActiveValue::Set(hashed_token.to_string()), - ..dev_server.clone().into_active_model() - }) - .exec(&*tx) - .await?; - - let dev_server_projects = self - .dev_server_projects_update_internal(user_id, &tx) - .await?; - - Ok(dev_server_projects) - }) - .await - } - - pub async fn rename_dev_server( - &self, - id: DevServerId, - name: &str, - ssh_connection_string: Option<&str>, - user_id: UserId, - ) -> crate::Result { - self.transaction(|tx| async move { - let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else { - return Err(anyhow::anyhow!("no dev server with id {}", id))?; - }; - if dev_server.user_id != user_id || name.trim().is_empty() { - return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?; - } - - dev_server::Entity::update(dev_server::ActiveModel { - name: ActiveValue::Set(name.trim().to_string()), - ssh_connection_string: ActiveValue::Set( - ssh_connection_string.map(ToOwned::to_owned), - ), - ..dev_server.clone().into_active_model() - }) - .exec(&*tx) - .await?; - - let dev_server_projects = self - .dev_server_projects_update_internal(user_id, &tx) - .await?; - - Ok(dev_server_projects) - }) - .await - } - - pub async fn delete_dev_server( - &self, - id: DevServerId, - user_id: UserId, - ) -> crate::Result { - self.transaction(|tx| async move { - let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else { - return Err(anyhow::anyhow!("no dev server with id {}", id))?; - }; - if dev_server.user_id != user_id { - return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?; - } - - dev_server_project::Entity::delete_many() - .filter(dev_server_project::Column::DevServerId.eq(id)) - .exec(&*tx) - .await?; - - dev_server::Entity::delete(dev_server.into_active_model()) - .exec(&*tx) - .await?; - - let dev_server_projects = self - .dev_server_projects_update_internal(user_id, &tx) - .await?; - - Ok(dev_server_projects) - }) - .await - } -} diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index b03cead5891890327e8793b001ba67803529c40a..27bec21ca1cddd27b2e0056f6a21c34098fc78bc 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -32,7 +32,6 @@ impl Database { connection: ConnectionId, worktrees: &[proto::WorktreeMetadata], is_ssh_project: bool, - dev_server_project_id: Option, ) -> Result> { self.room_transaction(room_id, |tx| async move { let participant = room_participant::Entity::find() @@ -61,38 +60,6 @@ impl Database { return Err(anyhow!("guests cannot share projects"))?; } - if let Some(dev_server_project_id) = dev_server_project_id { - let project = project::Entity::find() - .filter(project::Column::DevServerProjectId.eq(Some(dev_server_project_id))) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("no remote project"))?; - - let (_, dev_server) = dev_server_project::Entity::find_by_id(dev_server_project_id) - .find_also_related(dev_server::Entity) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("no dev_server_project"))?; - - if !dev_server.is_some_and(|dev_server| dev_server.user_id == participant.user_id) { - return Err(anyhow!("not your dev server"))?; - } - - if project.room_id.is_some() { - return Err(anyhow!("project already shared"))?; - }; - - let project = project::Entity::update(project::ActiveModel { - room_id: ActiveValue::Set(Some(room_id)), - ..project.into_active_model() - }) - .exec(&*tx) - .await?; - - let room = self.get_room(room_id, &tx).await?; - return Ok((project.id, room)); - } - let project = project::ActiveModel { room_id: ActiveValue::set(Some(participant.room_id)), host_user_id: ActiveValue::set(Some(participant.user_id)), @@ -102,7 +69,6 @@ impl Database { ))), id: ActiveValue::NotSet, hosted_project_id: ActiveValue::Set(None), - dev_server_project_id: ActiveValue::Set(None), } .insert(&*tx) .await?; @@ -156,7 +122,6 @@ impl Database { &self, project_id: ProjectId, connection: ConnectionId, - user_id: Option, ) -> Result, Vec)>> { self.project_transaction(project_id, |tx| async move { let guest_connection_ids = self.project_guest_connection_ids(project_id, &tx).await?; @@ -172,25 +137,6 @@ impl Database { if project.host_connection()? == connection { return Ok((true, room, guest_connection_ids)); } - if let Some(dev_server_project_id) = project.dev_server_project_id { - if let Some(user_id) = user_id { - if user_id - != self - .owner_for_dev_server_project(dev_server_project_id, &tx) - .await? - { - Err(anyhow!("cannot unshare a project hosted by another user"))? - } - project::Entity::update(project::ActiveModel { - room_id: ActiveValue::Set(None), - ..project.into_active_model() - }) - .exec(&*tx) - .await?; - return Ok((false, room, guest_connection_ids)); - } - } - Err(anyhow!("cannot unshare a project hosted by another user"))? }) .await @@ -633,17 +579,6 @@ impl Database { .await } - pub async fn find_dev_server_project(&self, id: DevServerProjectId) -> Result { - self.transaction(|tx| async move { - Ok(project::Entity::find() - .filter(project::Column::DevServerProjectId.eq(id)) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("no such project"))?) - }) - .await - } - /// Adds the given connection to the specified project /// in the current room. pub async fn join_project( @@ -654,13 +589,7 @@ impl Database { ) -> Result> { self.project_transaction(project_id, |tx| async move { let (project, role) = self - .access_project( - project_id, - connection, - PrincipalId::UserId(user_id), - Capability::ReadOnly, - &tx, - ) + .access_project(project_id, connection, Capability::ReadOnly, &tx) .await?; self.join_project_internal(project, user_id, connection, role, &tx) .await @@ -851,7 +780,6 @@ impl Database { worktree_id: None, }) .collect(), - dev_server_project_id: project.dev_server_project_id, }; Ok((project, replica_id as ReplicaId)) } @@ -1007,29 +935,14 @@ impl Database { &self, project_id: ProjectId, connection_id: ConnectionId, - principal_id: PrincipalId, capability: Capability, tx: &DatabaseTransaction, ) -> Result<(project::Model, ChannelRole)> { - let (mut project, dev_server_project) = project::Entity::find_by_id(project_id) - .find_also_related(dev_server_project::Entity) + let project = project::Entity::find_by_id(project_id) .one(tx) .await? .ok_or_else(|| anyhow!("no such project"))?; - let user_id = match principal_id { - PrincipalId::DevServerId(_) => { - if project - .host_connection() - .is_ok_and(|connection| connection == connection_id) - { - return Ok((project, ChannelRole::Admin)); - } - return Err(anyhow!("not the project host"))?; - } - PrincipalId::UserId(user_id) => user_id, - }; - let role_from_room = if let Some(room_id) = project.room_id { room_participant::Entity::find() .filter(room_participant::Column::RoomId.eq(room_id)) @@ -1040,34 +953,8 @@ impl Database { } else { None }; - let role_from_dev_server = if let Some(dev_server_project) = dev_server_project { - let dev_server = dev_server::Entity::find_by_id(dev_server_project.dev_server_id) - .one(tx) - .await? - .ok_or_else(|| anyhow!("no such channel"))?; - if user_id == dev_server.user_id { - // If the user left the room "uncleanly" they may rejoin the - // remote project before leave_room runs. IN that case kick - // the project out of the room pre-emptively. - if role_from_room.is_none() { - project = project::Entity::update(project::ActiveModel { - room_id: ActiveValue::Set(None), - ..project.into_active_model() - }) - .exec(tx) - .await?; - } - Some(ChannelRole::Admin) - } else { - None - } - } else { - None - }; - let role = role_from_dev_server - .or(role_from_room) - .unwrap_or(ChannelRole::Banned); + let role = role_from_room.unwrap_or(ChannelRole::Banned); match capability { Capability::ReadWrite => { @@ -1090,17 +977,10 @@ impl Database { &self, project_id: ProjectId, connection_id: ConnectionId, - user_id: UserId, ) -> Result { self.project_transaction(project_id, |tx| async move { let (project, _) = self - .access_project( - project_id, - connection_id, - PrincipalId::UserId(user_id), - Capability::ReadOnly, - &tx, - ) + .access_project(project_id, connection_id, Capability::ReadOnly, &tx) .await?; project.host_connection() }) @@ -1113,17 +993,10 @@ impl Database { &self, project_id: ProjectId, connection_id: ConnectionId, - user_id: UserId, ) -> Result { self.project_transaction(project_id, |tx| async move { let (project, _) = self - .access_project( - project_id, - connection_id, - PrincipalId::UserId(user_id), - Capability::ReadWrite, - &tx, - ) + .access_project(project_id, connection_id, Capability::ReadWrite, &tx) .await?; project.host_connection() }) @@ -1131,47 +1004,16 @@ impl Database { .map(|guard| guard.into_inner()) } - /// Returns the host connection for a request to join a shared project. - pub async fn host_for_owner_project_request( - &self, - project_id: ProjectId, - _connection_id: ConnectionId, - user_id: UserId, - ) -> Result { - self.project_transaction(project_id, |tx| async move { - let (project, dev_server_project) = project::Entity::find_by_id(project_id) - .find_also_related(dev_server_project::Entity) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("no such project"))?; - - let Some(dev_server_project) = dev_server_project else { - return Err(anyhow!("not a dev server project"))?; - }; - let dev_server = dev_server::Entity::find_by_id(dev_server_project.dev_server_id) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("no such dev server"))?; - if dev_server.user_id != user_id { - return Err(anyhow!("not your project"))?; - } - project.host_connection() - }) - .await - .map(|guard| guard.into_inner()) - } - pub async fn connections_for_buffer_update( &self, project_id: ProjectId, - principal_id: PrincipalId, connection_id: ConnectionId, capability: Capability, ) -> Result)>> { self.project_transaction(project_id, |tx| async move { // Authorize let (project, _) = self - .access_project(project_id, connection_id, principal_id, capability, &tx) + .access_project(project_id, connection_id, capability, &tx) .await?; let host_connection_id = project.host_connection()?; diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index 9235b8550b378a10a274045a3ed583f0808104b8..682c4ed38949e210e6cd2a402867b7fd97102f3e 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -858,25 +858,6 @@ impl Database { .all(&*tx) .await?; - // if any project in the room has a remote-project-id that belongs to a dev server that this user owns. - let dev_server_projects_for_user = self - .dev_server_project_ids_for_user(leaving_participant.user_id, &tx) - .await?; - - let dev_server_projects_to_unshare = project::Entity::find() - .filter( - Condition::all() - .add(project::Column::RoomId.eq(room_id)) - .add( - project::Column::DevServerProjectId - .is_in(dev_server_projects_for_user.clone()), - ), - ) - .all(&*tx) - .await? - .into_iter() - .map(|project| project.id) - .collect::>(); let mut left_projects = HashMap::default(); let mut collaborators = project_collaborator::Entity::find() .filter(project_collaborator::Column::ProjectId.is_in(project_ids)) @@ -899,9 +880,7 @@ impl Database { left_project.connection_ids.push(collaborator_connection_id); } - if (collaborator.is_host && collaborator.connection() == connection) - || dev_server_projects_to_unshare.contains(&collaborator.project_id) - { + if collaborator.is_host && collaborator.connection() == connection { left_project.should_unshare = true; } } @@ -944,17 +923,6 @@ impl Database { .exec(&*tx) .await?; - if !dev_server_projects_to_unshare.is_empty() { - project::Entity::update_many() - .filter(project::Column::Id.is_in(dev_server_projects_to_unshare)) - .set(project::ActiveModel { - room_id: ActiveValue::Set(None), - ..Default::default() - }) - .exec(&*tx) - .await?; - } - let (channel, room) = self.get_channel_room(room_id, &tx).await?; let deleted = if room.participants.is_empty() { let result = room::Entity::delete_by_id(room_id).exec(&*tx).await?; @@ -1323,26 +1291,6 @@ impl Database { project.worktree_root_names.push(db_worktree.root_name); } } - } else if let Some(dev_server_project_id) = db_project.dev_server_project_id { - let host = self - .owner_for_dev_server_project(dev_server_project_id, tx) - .await?; - if let Some((_, participant)) = participants - .iter_mut() - .find(|(_, v)| v.user_id == host.to_proto()) - { - participant.projects.push(proto::ParticipantProject { - id: db_project.id.to_proto(), - worktree_root_names: Default::default(), - }); - let project = participant.projects.last_mut().unwrap(); - - for db_worktree in db_worktrees { - if db_worktree.visible { - project.worktree_root_names.push(db_worktree.root_name); - } - } - } } } diff --git a/crates/collab/src/db/tables.rs b/crates/collab/src/db/tables.rs index 01d3835dc1c79a5cd4fe156b39aa2f1402caaae7..23dced800b56ba36f73ffbf039cd934536fbbc80 100644 --- a/crates/collab/src/db/tables.rs +++ b/crates/collab/src/db/tables.rs @@ -13,8 +13,6 @@ pub mod channel_message; pub mod channel_message_mention; pub mod contact; pub mod contributor; -pub mod dev_server; -pub mod dev_server_project; pub mod embedding; pub mod extension; pub mod extension_version; diff --git a/crates/collab/src/db/tables/dev_server.rs b/crates/collab/src/db/tables/dev_server.rs deleted file mode 100644 index a9615ca14b8ac6dcdc6948a8a23fea31440e759b..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/tables/dev_server.rs +++ /dev/null @@ -1,39 +0,0 @@ -use crate::db::{DevServerId, UserId}; -use rpc::proto; -use sea_orm::entity::prelude::*; - -#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] -#[sea_orm(table_name = "dev_servers")] -pub struct Model { - #[sea_orm(primary_key)] - pub id: DevServerId, - pub name: String, - pub user_id: UserId, - pub hashed_token: String, - pub ssh_connection_string: Option, -} - -impl ActiveModelBehavior for ActiveModel {} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm(has_many = "super::dev_server_project::Entity")] - RemoteProject, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::RemoteProject.def() - } -} - -impl Model { - pub fn to_proto(&self, status: proto::DevServerStatus) -> proto::DevServer { - proto::DevServer { - dev_server_id: self.id.to_proto(), - name: self.name.clone(), - status: status as i32, - ssh_connection_string: self.ssh_connection_string.clone(), - } - } -} diff --git a/crates/collab/src/db/tables/dev_server_project.rs b/crates/collab/src/db/tables/dev_server_project.rs deleted file mode 100644 index ba487b8d02c25ae062dbacdf18501e7f446af7dc..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/tables/dev_server_project.rs +++ /dev/null @@ -1,59 +0,0 @@ -use super::project; -use crate::db::{DevServerId, DevServerProjectId}; -use rpc::proto; -use sea_orm::{entity::prelude::*, FromJsonQueryResult}; -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] -#[sea_orm(table_name = "dev_server_projects")] -pub struct Model { - #[sea_orm(primary_key)] - pub id: DevServerProjectId, - pub dev_server_id: DevServerId, - pub paths: JSONPaths, -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)] -pub struct JSONPaths(pub Vec); - -impl ActiveModelBehavior for ActiveModel {} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm(has_one = "super::project::Entity")] - Project, - #[sea_orm( - belongs_to = "super::dev_server::Entity", - from = "Column::DevServerId", - to = "super::dev_server::Column::Id" - )] - DevServer, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::Project.def() - } -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::DevServer.def() - } -} - -impl Model { - pub fn to_proto(&self, project: Option) -> proto::DevServerProject { - proto::DevServerProject { - id: self.id.to_proto(), - project_id: project.map(|p| p.id.to_proto()), - dev_server_id: self.dev_server_id.to_proto(), - path: self.paths().first().cloned().unwrap_or_default(), - paths: self.paths().clone(), - } - } - - pub fn paths(&self) -> &Vec { - &self.paths.0 - } -} diff --git a/crates/collab/src/db/tables/project.rs b/crates/collab/src/db/tables/project.rs index 6858af02377844c062c8570f0979afdca589fab5..a357634aff614ccb6d4377c6ff1a42522e862a5d 100644 --- a/crates/collab/src/db/tables/project.rs +++ b/crates/collab/src/db/tables/project.rs @@ -1,4 +1,4 @@ -use crate::db::{DevServerProjectId, HostedProjectId, ProjectId, Result, RoomId, ServerId, UserId}; +use crate::db::{HostedProjectId, ProjectId, Result, RoomId, ServerId, UserId}; use anyhow::anyhow; use rpc::ConnectionId; use sea_orm::entity::prelude::*; @@ -13,7 +13,6 @@ pub struct Model { pub host_connection_id: Option, pub host_connection_server_id: Option, pub hosted_project_id: Option, - pub dev_server_project_id: Option, } impl Model { @@ -57,12 +56,6 @@ pub enum Relation { to = "super::hosted_project::Column::Id" )] HostedProject, - #[sea_orm( - belongs_to = "super::dev_server_project::Entity", - from = "Column::DevServerProjectId", - to = "super::dev_server_project::Column::Id" - )] - RemoteProject, } impl Related for Entity { @@ -101,10 +94,4 @@ impl Related for Entity { } } -impl Related for Entity { - fn to() -> RelationDef { - Relation::RemoteProject.def() - } -} - impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tests/db_tests.rs b/crates/collab/src/db/tests/db_tests.rs index 626335028770ecb460bbf7f12692648da05aa074..cd3a19435746ab2e044c789d0f4e7a33b8f20ffd 100644 --- a/crates/collab/src/db/tests/db_tests.rs +++ b/crates/collab/src/db/tests/db_tests.rs @@ -540,18 +540,18 @@ async fn test_project_count(db: &Arc) { .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0); - db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, None) + db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false) .await .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1); - db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, None) + db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false) .await .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2); // Projects shared by admins aren't counted. - db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false, None) + db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false) .await .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2); diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index c162129db6d2a6965533495e9ac66e2b5a4e2071..90277242f1b1c67037253e9d6011cf34f39cfe65 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -5,11 +5,10 @@ use crate::llm::LlmTokenClaims; use crate::{ auth, db::{ - self, dev_server, BufferId, Capability, Channel, ChannelId, ChannelRole, ChannelsForUser, - CreatedChannelMessage, Database, DevServerId, DevServerProjectId, InviteMemberResult, - MembershipUpdated, MessageId, NotificationId, PrincipalId, Project, ProjectId, - RejoinedProject, RemoveChannelMemberResult, ReplicaId, RespondToChannelInvite, RoomId, - ServerId, UpdatedChannelMessage, User, UserId, + self, BufferId, Capability, Channel, ChannelId, ChannelRole, ChannelsForUser, + CreatedChannelMessage, Database, InviteMemberResult, MembershipUpdated, MessageId, + NotificationId, Project, ProjectId, RejoinedProject, RemoveChannelMemberResult, ReplicaId, + RespondToChannelInvite, RoomId, ServerId, UpdatedChannelMessage, User, UserId, }, executor::Executor, AppState, Config, Error, RateLimit, Result, @@ -42,10 +41,8 @@ use sha2::Digest; use supermaven_api::{CreateExternalUserRequest, SupermavenAdminApi}; use futures::{ - channel::oneshot, - future::{self, BoxFuture}, - stream::FuturesUnordered, - FutureExt, SinkExt, StreamExt, TryStreamExt, + channel::oneshot, future::BoxFuture, stream::FuturesUnordered, FutureExt, SinkExt, StreamExt, + TryStreamExt, }; use prometheus::{register_int_gauge, IntGauge}; use rpc::{ @@ -109,7 +106,6 @@ impl Response { pub enum Principal { User(User), Impersonated { user: User, admin: User }, - DevServer(dev_server::Model), } impl Principal { @@ -124,9 +120,6 @@ impl Principal { span.record("login", &user.github_login); span.record("impersonator", &admin.github_login); } - Principal::DevServer(dev_server) => { - span.record("dev_server_id", dev_server.id.0); - } } } } @@ -167,27 +160,10 @@ impl Session { } } - fn for_user(self) -> Option { - UserSession::new(self) - } - - fn for_dev_server(self) -> Option { - DevServerSession::new(self) - } - - fn user_id(&self) -> Option { - match &self.principal { - Principal::User(user) => Some(user.id), - Principal::Impersonated { user, .. } => Some(user.id), - Principal::DevServer(_) => None, - } - } - fn is_staff(&self) -> bool { match &self.principal { Principal::User(user) => user.admin, Principal::Impersonated { .. } => true, - Principal::DevServer(_) => false, } } @@ -199,9 +175,7 @@ impl Session { return Ok(true); } - let Some(user_id) = self.user_id() else { - return Ok(false); - }; + let user_id = self.user_id(); Ok(db.has_active_billing_subscription(user_id).await?) } @@ -217,18 +191,17 @@ impl Session { } } - fn dev_server_id(&self) -> Option { + fn user_id(&self) -> UserId { match &self.principal { - Principal::User(_) | Principal::Impersonated { .. } => None, - Principal::DevServer(dev_server) => Some(dev_server.id), + Principal::User(user) => user.id, + Principal::Impersonated { user, .. } => user.id, } } - fn principal_id(&self) -> PrincipalId { + pub fn email(&self) -> Option { match &self.principal { - Principal::User(user) => PrincipalId::UserId(user.id), - Principal::Impersonated { user, .. } => PrincipalId::UserId(user.id), - Principal::DevServer(dev_server) => PrincipalId::DevServerId(dev_server.id), + Principal::User(user) => user.email_address.clone(), + Principal::Impersonated { user, .. } => user.email_address.clone(), } } } @@ -244,143 +217,11 @@ impl Debug for Session { result.field("user", &user.github_login); result.field("impersonator", &admin.github_login); } - Principal::DevServer(dev_server) => { - result.field("dev_server", &dev_server.id); - } } result.field("connection_id", &self.connection_id).finish() } } -struct UserSession(Session); - -impl UserSession { - pub fn new(s: Session) -> Option { - s.user_id().map(|_| UserSession(s)) - } - pub fn user_id(&self) -> UserId { - self.0.user_id().unwrap() - } - - pub fn email(&self) -> Option { - match &self.0.principal { - Principal::User(user) => user.email_address.clone(), - Principal::Impersonated { user, .. } => user.email_address.clone(), - Principal::DevServer(..) => None, - } - } -} - -impl Deref for UserSession { - type Target = Session; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for UserSession { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -struct DevServerSession(Session); - -impl DevServerSession { - pub fn new(s: Session) -> Option { - s.dev_server_id().map(|_| DevServerSession(s)) - } - pub fn dev_server_id(&self) -> DevServerId { - self.0.dev_server_id().unwrap() - } - - fn dev_server(&self) -> &dev_server::Model { - match &self.0.principal { - Principal::DevServer(dev_server) => dev_server, - _ => unreachable!(), - } - } -} - -impl Deref for DevServerSession { - type Target = Session; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for DevServerSession { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -fn user_handler( - handler: impl 'static + Send + Sync + Fn(M, Response, UserSession) -> Fut, -) -> impl 'static + Send + Sync + Fn(M, Response, Session) -> BoxFuture<'static, Result<()>> -where - Fut: Send + Future>, -{ - let handler = Arc::new(handler); - move |message, response, session| { - let handler = handler.clone(); - Box::pin(async move { - if let Some(user_session) = session.for_user() { - Ok(handler(message, response, user_session).await?) - } else { - Err(Error::Internal(anyhow!( - "must be a user to call {}", - M::NAME - ))) - } - }) - } -} - -fn dev_server_handler( - handler: impl 'static + Send + Sync + Fn(M, Response, DevServerSession) -> Fut, -) -> impl 'static + Send + Sync + Fn(M, Response, Session) -> BoxFuture<'static, Result<()>> -where - Fut: Send + Future>, -{ - let handler = Arc::new(handler); - move |message, response, session| { - let handler = handler.clone(); - Box::pin(async move { - if let Some(dev_server_session) = session.for_dev_server() { - Ok(handler(message, response, dev_server_session).await?) - } else { - Err(Error::Internal(anyhow!( - "must be a dev server to call {}", - M::NAME - ))) - } - }) - } -} - -fn user_message_handler( - handler: impl 'static + Send + Sync + Fn(M, UserSession) -> InnertRetFut, -) -> impl 'static + Send + Sync + Fn(M, Session) -> BoxFuture<'static, Result<()>> -where - InnertRetFut: Send + Future>, -{ - let handler = Arc::new(handler); - move |message, session| { - let handler = handler.clone(); - Box::pin(async move { - if let Some(user_session) = session.for_user() { - Ok(handler(message, user_session).await?) - } else { - Err(Error::Internal(anyhow!( - "must be a user to call {}", - M::NAME - ))) - } - }) - } -} - struct DbHandle(Arc); impl Deref for DbHandle { @@ -434,141 +275,64 @@ impl Server { server .add_request_handler(ping) - .add_request_handler(user_handler(create_room)) - .add_request_handler(user_handler(join_room)) - .add_request_handler(user_handler(rejoin_room)) - .add_request_handler(user_handler(leave_room)) - .add_request_handler(user_handler(set_room_participant_role)) - .add_request_handler(user_handler(call)) - .add_request_handler(user_handler(cancel_call)) - .add_message_handler(user_message_handler(decline_call)) - .add_request_handler(user_handler(update_participant_location)) - .add_request_handler(user_handler(share_project)) + .add_request_handler(create_room) + .add_request_handler(join_room) + .add_request_handler(rejoin_room) + .add_request_handler(leave_room) + .add_request_handler(set_room_participant_role) + .add_request_handler(call) + .add_request_handler(cancel_call) + .add_message_handler(decline_call) + .add_request_handler(update_participant_location) + .add_request_handler(share_project) .add_message_handler(unshare_project) - .add_request_handler(user_handler(join_project)) - .add_request_handler(user_handler(join_hosted_project)) - .add_request_handler(user_handler(rejoin_dev_server_projects)) - .add_request_handler(user_handler(create_dev_server_project)) - .add_request_handler(user_handler(update_dev_server_project)) - .add_request_handler(user_handler(delete_dev_server_project)) - .add_request_handler(user_handler(create_dev_server)) - .add_request_handler(user_handler(regenerate_dev_server_token)) - .add_request_handler(user_handler(rename_dev_server)) - .add_request_handler(user_handler(delete_dev_server)) - .add_request_handler(user_handler(list_remote_directory)) - .add_request_handler(dev_server_handler(share_dev_server_project)) - .add_request_handler(dev_server_handler(shutdown_dev_server)) - .add_request_handler(dev_server_handler(reconnect_dev_server)) - .add_message_handler(user_message_handler(leave_project)) + .add_request_handler(join_project) + .add_request_handler(join_hosted_project) + .add_message_handler(leave_project) .add_request_handler(update_project) .add_request_handler(update_worktree) .add_message_handler(start_language_server) .add_message_handler(update_language_server) .add_message_handler(update_diagnostic_summary) .add_message_handler(update_worktree_settings) - .add_request_handler(user_handler( - forward_project_request_for_owner::, - )) - .add_request_handler(user_handler( - forward_read_only_project_request::, - )) - .add_request_handler(user_handler( - forward_read_only_project_request::, - )) - .add_request_handler(user_handler( - forward_read_only_project_request::, - )) - .add_request_handler(user_handler( - forward_read_only_project_request::, - )) - .add_request_handler(user_handler(forward_find_search_candidates_request)) - .add_request_handler(user_handler( - forward_read_only_project_request::, - )) - .add_request_handler(user_handler( - forward_read_only_project_request::, - )) - .add_request_handler(user_handler( - forward_read_only_project_request::, - )) - .add_request_handler(user_handler( - forward_read_only_project_request::, - )) - .add_request_handler(user_handler( - forward_read_only_project_request::, - )) - .add_request_handler(user_handler( - forward_read_only_project_request::, - )) - .add_request_handler(user_handler( - forward_read_only_project_request::, - )) - .add_request_handler(user_handler( - forward_read_only_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_find_search_candidates_request) + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler( forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( + ) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler( forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) + ) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) .add_message_handler(create_buffer_for_peer) .add_request_handler(update_buffer) .add_message_handler(broadcast_project_message_from_host::) @@ -577,53 +341,47 @@ impl Server { .add_message_handler(broadcast_project_message_from_host::) .add_message_handler(broadcast_project_message_from_host::) .add_request_handler(get_users) - .add_request_handler(user_handler(fuzzy_search_users)) - .add_request_handler(user_handler(request_contact)) - .add_request_handler(user_handler(remove_contact)) - .add_request_handler(user_handler(respond_to_contact_request)) + .add_request_handler(fuzzy_search_users) + .add_request_handler(request_contact) + .add_request_handler(remove_contact) + .add_request_handler(respond_to_contact_request) .add_message_handler(subscribe_to_channels) - .add_request_handler(user_handler(create_channel)) - .add_request_handler(user_handler(delete_channel)) - .add_request_handler(user_handler(invite_channel_member)) - .add_request_handler(user_handler(remove_channel_member)) - .add_request_handler(user_handler(set_channel_member_role)) - .add_request_handler(user_handler(set_channel_visibility)) - .add_request_handler(user_handler(rename_channel)) - .add_request_handler(user_handler(join_channel_buffer)) - .add_request_handler(user_handler(leave_channel_buffer)) - .add_message_handler(user_message_handler(update_channel_buffer)) - .add_request_handler(user_handler(rejoin_channel_buffers)) - .add_request_handler(user_handler(get_channel_members)) - .add_request_handler(user_handler(respond_to_channel_invite)) - .add_request_handler(user_handler(join_channel)) - .add_request_handler(user_handler(join_channel_chat)) - .add_message_handler(user_message_handler(leave_channel_chat)) - .add_request_handler(user_handler(send_channel_message)) - .add_request_handler(user_handler(remove_channel_message)) - .add_request_handler(user_handler(update_channel_message)) - .add_request_handler(user_handler(get_channel_messages)) - .add_request_handler(user_handler(get_channel_messages_by_id)) - .add_request_handler(user_handler(get_notifications)) - .add_request_handler(user_handler(mark_notification_as_read)) - .add_request_handler(user_handler(move_channel)) - .add_request_handler(user_handler(follow)) - .add_message_handler(user_message_handler(unfollow)) - .add_message_handler(user_message_handler(update_followers)) - .add_request_handler(user_handler(get_private_user_info)) - .add_request_handler(user_handler(get_llm_api_token)) - .add_request_handler(user_handler(accept_terms_of_service)) - .add_message_handler(user_message_handler(acknowledge_channel_message)) - .add_message_handler(user_message_handler(acknowledge_buffer_version)) - .add_request_handler(user_handler(get_supermaven_api_key)) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) - .add_request_handler(user_handler( - forward_mutating_project_request::, - )) + .add_request_handler(create_channel) + .add_request_handler(delete_channel) + .add_request_handler(invite_channel_member) + .add_request_handler(remove_channel_member) + .add_request_handler(set_channel_member_role) + .add_request_handler(set_channel_visibility) + .add_request_handler(rename_channel) + .add_request_handler(join_channel_buffer) + .add_request_handler(leave_channel_buffer) + .add_message_handler(update_channel_buffer) + .add_request_handler(rejoin_channel_buffers) + .add_request_handler(get_channel_members) + .add_request_handler(respond_to_channel_invite) + .add_request_handler(join_channel) + .add_request_handler(join_channel_chat) + .add_message_handler(leave_channel_chat) + .add_request_handler(send_channel_message) + .add_request_handler(remove_channel_message) + .add_request_handler(update_channel_message) + .add_request_handler(get_channel_messages) + .add_request_handler(get_channel_messages_by_id) + .add_request_handler(get_notifications) + .add_request_handler(mark_notification_as_read) + .add_request_handler(move_channel) + .add_request_handler(follow) + .add_message_handler(unfollow) + .add_message_handler(update_followers) + .add_request_handler(get_private_user_info) + .add_request_handler(get_llm_api_token) + .add_request_handler(accept_terms_of_service) + .add_message_handler(acknowledge_channel_message) + .add_message_handler(acknowledge_buffer_version) + .add_request_handler(get_supermaven_api_key) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_mutating_project_request::) .add_message_handler(broadcast_project_message_from_host::) .add_message_handler(update_context) .add_request_handler({ @@ -636,21 +394,17 @@ impl Server { } } }) - .add_request_handler({ - user_handler(move |request, response, session| { - get_cached_embeddings(request, response, session) - }) - }) + .add_request_handler(get_cached_embeddings) .add_request_handler({ let app_state = app_state.clone(); - user_handler(move |request, response, session| { + move |request, response, session| { compute_embeddings( request, response, session, app_state.config.openai_api_key.clone(), ) - }) + } }); Arc::new(server) @@ -936,7 +690,6 @@ impl Server { user_id=field::Empty, login=field::Empty, impersonator=field::Empty, - dev_server_id=field::Empty, geoip_country_code=field::Empty ); principal.update_span(&span); @@ -1031,7 +784,6 @@ impl Server { user_id=field::Empty, login=field::Empty, impersonator=field::Empty, - dev_server_id=field::Empty ); principal.update_span(&span); let span_enter = span.enter(); @@ -1100,11 +852,7 @@ impl Server { update_user_plan(user.id, session).await?; - let (contacts, dev_server_projects) = future::try_join( - self.app_state.db.get_contacts(user.id), - self.app_state.db.dev_server_projects_update(user.id), - ) - .await?; + let contacts = self.app_state.db.get_contacts(user.id).await?; { let mut pool = self.connection_pool.lock(); @@ -1119,8 +867,6 @@ impl Server { subscribe_user_to_channels(user.id, session).await?; } - send_dev_server_projects_update(user.id, dev_server_projects, session).await; - if let Some(incoming_call) = self.app_state.db.incoming_call_for_user(user.id).await? { @@ -1129,39 +875,6 @@ impl Server { update_user_contacts(user.id, session).await?; } - Principal::DevServer(dev_server) => { - { - let mut pool = self.connection_pool.lock(); - if let Some(stale_connection_id) = pool.dev_server_connection_id(dev_server.id) - { - self.peer.send( - stale_connection_id, - proto::ShutdownDevServer { - reason: Some( - "another dev server connected with the same token".to_string(), - ), - }, - )?; - pool.remove_connection(stale_connection_id)?; - }; - pool.add_dev_server(connection_id, dev_server.id, zed_version); - } - - let projects = self - .app_state - .db - .get_projects_for_dev_server(dev_server.id) - .await?; - self.peer - .send(connection_id, proto::DevServerInstructions { projects })?; - - let status = self - .app_state - .db - .dev_server_projects_update(dev_server.user_id) - .await?; - send_dev_server_projects_update(dev_server.user_id, status, session).await; - } } Ok(()) @@ -1452,33 +1165,25 @@ async fn connection_lost( futures::select_biased! { _ = executor.sleep(RECONNECT_TIMEOUT).fuse() => { - match &session.principal { - Principal::User(_) | Principal::Impersonated{ user: _, admin:_ } => { - let session = session.for_user().unwrap(); - - log::info!("connection lost, removing all resources for user:{}, connection:{:?}", session.user_id(), session.connection_id); - leave_room_for_session(&session, session.connection_id).await.trace_err(); - leave_channel_buffers_for_session(&session) - .await - .trace_err(); - - if !session - .connection_pool() - .await - .is_user_online(session.user_id()) - { - let db = session.db().await; - if let Some(room) = db.decline_call(None, session.user_id()).await.trace_err().flatten() { - room_updated(&room, &session.peer); - } - } - update_user_contacts(session.user_id(), &session).await?; - }, - Principal::DevServer(_) => { - lost_dev_server_connection(&session.for_dev_server().unwrap()).await?; - }, - } + log::info!("connection lost, removing all resources for user:{}, connection:{:?}", session.user_id(), session.connection_id); + leave_room_for_session(&session, session.connection_id).await.trace_err(); + leave_channel_buffers_for_session(&session) + .await + .trace_err(); + + if !session + .connection_pool() + .await + .is_user_online(session.user_id()) + { + let db = session.db().await; + if let Some(room) = db.decline_call(None, session.user_id()).await.trace_err().flatten() { + room_updated(&room, &session.peer); + } + } + + update_user_contacts(session.user_id(), &session).await?; }, _ = teardown.changed().fuse() => {} } @@ -1496,7 +1201,7 @@ async fn ping(_: proto::Ping, response: Response, _session: Session async fn create_room( _request: proto::CreateRoom, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let live_kit_room = nanoid::nanoid!(30); @@ -1536,7 +1241,7 @@ async fn create_room( async fn join_room( request: proto::JoinRoom, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let room_id = RoomId::from_proto(request.id); @@ -1603,7 +1308,7 @@ async fn join_room( async fn rejoin_room( request: proto::RejoinRoom, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let room; let channel; @@ -1693,7 +1398,7 @@ async fn rejoin_room( fn notify_rejoined_projects( rejoined_projects: &mut Vec, - session: &UserSession, + session: &Session, ) -> Result<()> { for project in rejoined_projects.iter() { for collaborator in &project.collaborators { @@ -1778,7 +1483,7 @@ fn notify_rejoined_projects( async fn leave_room( _: proto::LeaveRoom, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { leave_room_for_session(&session, session.connection_id).await?; response.send(proto::Ack {})?; @@ -1789,7 +1494,7 @@ async fn leave_room( async fn set_room_participant_role( request: proto::SetRoomParticipantRole, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let user_id = UserId::from_proto(request.user_id); let role = ChannelRole::from(request.role()); @@ -1837,7 +1542,7 @@ async fn set_room_participant_role( async fn call( request: proto::Call, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let room_id = RoomId::from_proto(request.room_id); let calling_user_id = session.user_id(); @@ -1906,7 +1611,7 @@ async fn call( async fn cancel_call( request: proto::CancelCall, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let called_user_id = UserId::from_proto(request.called_user_id); let room_id = RoomId::from_proto(request.room_id); @@ -1941,7 +1646,7 @@ async fn cancel_call( } /// Decline an incoming call. -async fn decline_call(message: proto::DeclineCall, session: UserSession) -> Result<()> { +async fn decline_call(message: proto::DeclineCall, session: Session) -> Result<()> { let room_id = RoomId::from_proto(message.room_id); { let room = session @@ -1976,7 +1681,7 @@ async fn decline_call(message: proto::DeclineCall, session: UserSession) -> Resu async fn update_participant_location( request: proto::UpdateParticipantLocation, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let room_id = RoomId::from_proto(request.room_id); let location = request @@ -1997,7 +1702,7 @@ async fn update_participant_location( async fn share_project( request: proto::ShareProject, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let (project_id, room) = &*session .db() @@ -2007,9 +1712,6 @@ async fn share_project( session.connection_id, &request.worktrees, request.is_ssh_project, - request - .dev_server_project_id - .map(DevServerProjectId::from_proto), ) .await?; response.send(proto::ShareProjectResponse { @@ -2023,26 +1725,19 @@ async fn share_project( /// Unshare a project from the room. async fn unshare_project(message: proto::UnshareProject, session: Session) -> Result<()> { let project_id = ProjectId::from_proto(message.project_id); - unshare_project_internal( - project_id, - session.connection_id, - session.user_id(), - &session, - ) - .await + unshare_project_internal(project_id, session.connection_id, &session).await } async fn unshare_project_internal( project_id: ProjectId, connection_id: ConnectionId, - user_id: Option, session: &Session, ) -> Result<()> { let delete = { let room_guard = session .db() .await - .unshare_project(project_id, connection_id, user_id) + .unshare_project(project_id, connection_id) .await?; let (delete, room, guest_connection_ids) = &*room_guard; @@ -2071,38 +1766,11 @@ async fn unshare_project_internal( Ok(()) } -/// DevServer makes a project available online -async fn share_dev_server_project( - request: proto::ShareDevServerProject, - response: Response, - session: DevServerSession, -) -> Result<()> { - let (dev_server_project, user_id, status) = session - .db() - .await - .share_dev_server_project( - DevServerProjectId::from_proto(request.dev_server_project_id), - session.dev_server_id(), - session.connection_id, - &request.worktrees, - ) - .await?; - let Some(project_id) = dev_server_project.project_id else { - return Err(anyhow!("failed to share remote project"))?; - }; - - send_dev_server_projects_update(user_id, status, &session).await; - - response.send(proto::ShareProjectResponse { project_id })?; - - Ok(()) -} - /// Join someone elses shared project. async fn join_project( request: proto::JoinProject, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let project_id = ProjectId::from_proto(request.project_id); @@ -2133,7 +1801,7 @@ impl JoinProjectInternalResponse for Response { fn join_project_internal( response: impl JoinProjectInternalResponse, - session: UserSession, + session: Session, project: &mut Project, replica_id: &ReplicaId, ) -> Result<()> { @@ -2184,9 +1852,6 @@ fn join_project_internal( collaborators: collaborators.clone(), language_servers: project.language_servers.clone(), role: project.role.into(), - dev_server_project_id: project - .dev_server_project_id - .map(|dev_server_project_id| dev_server_project_id.0 as u64), })?; for (worktree_id, worktree) in mem::take(&mut project.worktrees) { @@ -2252,7 +1917,7 @@ fn join_project_internal( } /// Leave someone elses shared project. -async fn leave_project(request: proto::LeaveProject, session: UserSession) -> Result<()> { +async fn leave_project(request: proto::LeaveProject, session: Session) -> Result<()> { let sender_id = session.connection_id; let project_id = ProjectId::from_proto(request.project_id); let db = session.db().await; @@ -2279,7 +1944,7 @@ async fn leave_project(request: proto::LeaveProject, session: UserSession) -> Re async fn join_hosted_project( request: proto::JoinHostedProject, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let (mut project, replica_id) = session .db() @@ -2294,481 +1959,6 @@ async fn join_hosted_project( join_project_internal(response, session, &mut project, &replica_id) } -async fn list_remote_directory( - request: proto::ListRemoteDirectory, - response: Response, - session: UserSession, -) -> Result<()> { - let dev_server_id = DevServerId(request.dev_server_id as i32); - let dev_server_connection_id = session - .connection_pool() - .await - .online_dev_server_connection_id(dev_server_id)?; - - session - .db() - .await - .get_dev_server_for_user(dev_server_id, session.user_id()) - .await?; - - response.send( - session - .peer - .forward_request(session.connection_id, dev_server_connection_id, request) - .await?, - )?; - Ok(()) -} - -async fn update_dev_server_project( - request: proto::UpdateDevServerProject, - response: Response, - session: UserSession, -) -> Result<()> { - let dev_server_project_id = DevServerProjectId(request.dev_server_project_id as i32); - - let (dev_server_project, update) = session - .db() - .await - .update_dev_server_project(dev_server_project_id, &request.paths, session.user_id()) - .await?; - - let projects = session - .db() - .await - .get_projects_for_dev_server(dev_server_project.dev_server_id) - .await?; - - let dev_server_connection_id = session - .connection_pool() - .await - .online_dev_server_connection_id(dev_server_project.dev_server_id)?; - - session.peer.send( - dev_server_connection_id, - proto::DevServerInstructions { projects }, - )?; - - send_dev_server_projects_update(session.user_id(), update, &session).await; - - response.send(proto::Ack {}) -} - -async fn create_dev_server_project( - request: proto::CreateDevServerProject, - response: Response, - session: UserSession, -) -> Result<()> { - let dev_server_id = DevServerId(request.dev_server_id as i32); - let dev_server_connection_id = session - .connection_pool() - .await - .dev_server_connection_id(dev_server_id); - let Some(dev_server_connection_id) = dev_server_connection_id else { - Err(ErrorCode::DevServerOffline - .message("Cannot create a remote project when the dev server is offline".to_string()) - .anyhow())? - }; - - let path = request.path.clone(); - //Check that the path exists on the dev server - session - .peer - .forward_request( - session.connection_id, - dev_server_connection_id, - proto::ValidateDevServerProjectRequest { path: path.clone() }, - ) - .await?; - - let (dev_server_project, update) = session - .db() - .await - .create_dev_server_project( - DevServerId(request.dev_server_id as i32), - &request.path, - session.user_id(), - ) - .await?; - - let projects = session - .db() - .await - .get_projects_for_dev_server(dev_server_project.dev_server_id) - .await?; - - session.peer.send( - dev_server_connection_id, - proto::DevServerInstructions { projects }, - )?; - - send_dev_server_projects_update(session.user_id(), update, &session).await; - - response.send(proto::CreateDevServerProjectResponse { - dev_server_project: Some(dev_server_project.to_proto(None)), - })?; - Ok(()) -} - -async fn create_dev_server( - request: proto::CreateDevServer, - response: Response, - session: UserSession, -) -> Result<()> { - let access_token = auth::random_token(); - let hashed_access_token = auth::hash_access_token(&access_token); - - if request.name.is_empty() { - return Err(proto::ErrorCode::Forbidden - .message("Dev server name cannot be empty".to_string()) - .anyhow())?; - } - - let (dev_server, status) = session - .db() - .await - .create_dev_server( - &request.name, - request.ssh_connection_string.as_deref(), - &hashed_access_token, - session.user_id(), - ) - .await?; - - send_dev_server_projects_update(session.user_id(), status, &session).await; - - response.send(proto::CreateDevServerResponse { - dev_server_id: dev_server.id.0 as u64, - access_token: auth::generate_dev_server_token(dev_server.id.0 as usize, access_token), - name: request.name, - })?; - Ok(()) -} - -async fn regenerate_dev_server_token( - request: proto::RegenerateDevServerToken, - response: Response, - session: UserSession, -) -> Result<()> { - let dev_server_id = DevServerId(request.dev_server_id as i32); - let access_token = auth::random_token(); - let hashed_access_token = auth::hash_access_token(&access_token); - - let connection_id = session - .connection_pool() - .await - .dev_server_connection_id(dev_server_id); - if let Some(connection_id) = connection_id { - shutdown_dev_server_internal(dev_server_id, connection_id, &session).await?; - session.peer.send( - connection_id, - proto::ShutdownDevServer { - reason: Some("dev server token was regenerated".to_string()), - }, - )?; - let _ = remove_dev_server_connection(dev_server_id, &session).await; - } - - let status = session - .db() - .await - .update_dev_server_token(dev_server_id, &hashed_access_token, session.user_id()) - .await?; - - send_dev_server_projects_update(session.user_id(), status, &session).await; - - response.send(proto::RegenerateDevServerTokenResponse { - dev_server_id: dev_server_id.to_proto(), - access_token: auth::generate_dev_server_token(dev_server_id.0 as usize, access_token), - })?; - Ok(()) -} - -async fn rename_dev_server( - request: proto::RenameDevServer, - response: Response, - session: UserSession, -) -> Result<()> { - if request.name.trim().is_empty() { - return Err(proto::ErrorCode::Forbidden - .message("Dev server name cannot be empty".to_string()) - .anyhow())?; - } - - let dev_server_id = DevServerId(request.dev_server_id as i32); - let dev_server = session.db().await.get_dev_server(dev_server_id).await?; - if dev_server.user_id != session.user_id() { - return Err(anyhow!(ErrorCode::Forbidden))?; - } - - let status = session - .db() - .await - .rename_dev_server( - dev_server_id, - &request.name, - request.ssh_connection_string.as_deref(), - session.user_id(), - ) - .await?; - - send_dev_server_projects_update(session.user_id(), status, &session).await; - - response.send(proto::Ack {})?; - Ok(()) -} - -async fn delete_dev_server( - request: proto::DeleteDevServer, - response: Response, - session: UserSession, -) -> Result<()> { - let dev_server_id = DevServerId(request.dev_server_id as i32); - let dev_server = session.db().await.get_dev_server(dev_server_id).await?; - if dev_server.user_id != session.user_id() { - return Err(anyhow!(ErrorCode::Forbidden))?; - } - - let connection_id = session - .connection_pool() - .await - .dev_server_connection_id(dev_server_id); - if let Some(connection_id) = connection_id { - shutdown_dev_server_internal(dev_server_id, connection_id, &session).await?; - session.peer.send( - connection_id, - proto::ShutdownDevServer { - reason: Some("dev server was deleted".to_string()), - }, - )?; - let _ = remove_dev_server_connection(dev_server_id, &session).await; - } - - let status = session - .db() - .await - .delete_dev_server(dev_server_id, session.user_id()) - .await?; - - send_dev_server_projects_update(session.user_id(), status, &session).await; - - response.send(proto::Ack {})?; - Ok(()) -} - -async fn delete_dev_server_project( - request: proto::DeleteDevServerProject, - response: Response, - session: UserSession, -) -> Result<()> { - let dev_server_project_id = DevServerProjectId(request.dev_server_project_id as i32); - let dev_server_project = session - .db() - .await - .get_dev_server_project(dev_server_project_id) - .await?; - - let dev_server = session - .db() - .await - .get_dev_server(dev_server_project.dev_server_id) - .await?; - if dev_server.user_id != session.user_id() { - return Err(anyhow!(ErrorCode::Forbidden))?; - } - - let dev_server_connection_id = session - .connection_pool() - .await - .dev_server_connection_id(dev_server.id); - - if let Some(dev_server_connection_id) = dev_server_connection_id { - let project = session - .db() - .await - .find_dev_server_project(dev_server_project_id) - .await; - if let Ok(project) = project { - unshare_project_internal( - project.id, - dev_server_connection_id, - Some(session.user_id()), - &session, - ) - .await?; - } - } - - let (projects, status) = session - .db() - .await - .delete_dev_server_project(dev_server_project_id, dev_server.id, session.user_id()) - .await?; - - if let Some(dev_server_connection_id) = dev_server_connection_id { - session.peer.send( - dev_server_connection_id, - proto::DevServerInstructions { projects }, - )?; - } - - send_dev_server_projects_update(session.user_id(), status, &session).await; - - response.send(proto::Ack {})?; - Ok(()) -} - -async fn rejoin_dev_server_projects( - request: proto::RejoinRemoteProjects, - response: Response, - session: UserSession, -) -> Result<()> { - let mut rejoined_projects = { - let db = session.db().await; - db.rejoin_dev_server_projects( - &request.rejoined_projects, - session.user_id(), - session.0.connection_id, - ) - .await? - }; - response.send(proto::RejoinRemoteProjectsResponse { - rejoined_projects: rejoined_projects - .iter() - .map(|project| project.to_proto()) - .collect(), - })?; - notify_rejoined_projects(&mut rejoined_projects, &session) -} - -async fn reconnect_dev_server( - request: proto::ReconnectDevServer, - response: Response, - session: DevServerSession, -) -> Result<()> { - let reshared_projects = { - let db = session.db().await; - db.reshare_dev_server_projects( - &request.reshared_projects, - session.dev_server_id(), - session.0.connection_id, - ) - .await? - }; - - for project in &reshared_projects { - for collaborator in &project.collaborators { - session - .peer - .send( - collaborator.connection_id, - proto::UpdateProjectCollaborator { - project_id: project.id.to_proto(), - old_peer_id: Some(project.old_connection_id.into()), - new_peer_id: Some(session.connection_id.into()), - }, - ) - .trace_err(); - } - - broadcast( - Some(session.connection_id), - project - .collaborators - .iter() - .map(|collaborator| collaborator.connection_id), - |connection_id| { - session.peer.forward_send( - session.connection_id, - connection_id, - proto::UpdateProject { - project_id: project.id.to_proto(), - worktrees: project.worktrees.clone(), - }, - ) - }, - ); - } - - response.send(proto::ReconnectDevServerResponse { - reshared_projects: reshared_projects - .iter() - .map(|project| proto::ResharedProject { - id: project.id.to_proto(), - collaborators: project - .collaborators - .iter() - .map(|collaborator| collaborator.to_proto()) - .collect(), - }) - .collect(), - })?; - - Ok(()) -} - -async fn shutdown_dev_server( - _: proto::ShutdownDevServer, - response: Response, - session: DevServerSession, -) -> Result<()> { - response.send(proto::Ack {})?; - shutdown_dev_server_internal(session.dev_server_id(), session.connection_id, &session).await?; - remove_dev_server_connection(session.dev_server_id(), &session).await -} - -async fn shutdown_dev_server_internal( - dev_server_id: DevServerId, - connection_id: ConnectionId, - session: &Session, -) -> Result<()> { - let (dev_server_projects, dev_server) = { - let db = session.db().await; - let dev_server_projects = db.get_projects_for_dev_server(dev_server_id).await?; - let dev_server = db.get_dev_server(dev_server_id).await?; - (dev_server_projects, dev_server) - }; - - for project_id in dev_server_projects.iter().filter_map(|p| p.project_id) { - unshare_project_internal( - ProjectId::from_proto(project_id), - connection_id, - None, - session, - ) - .await?; - } - - session - .connection_pool() - .await - .set_dev_server_offline(dev_server_id); - - let status = session - .db() - .await - .dev_server_projects_update(dev_server.user_id) - .await?; - send_dev_server_projects_update(dev_server.user_id, status, session).await; - - Ok(()) -} - -async fn remove_dev_server_connection(dev_server_id: DevServerId, session: &Session) -> Result<()> { - let dev_server_connection = session - .connection_pool() - .await - .dev_server_connection_id(dev_server_id); - - if let Some(dev_server_connection) = dev_server_connection { - session - .connection_pool() - .await - .remove_connection(dev_server_connection)?; - } - Ok(()) -} - /// Updates other participants with changes to the project async fn update_project( request: proto::UpdateProject, @@ -2922,7 +2112,7 @@ async fn update_language_server( async fn forward_read_only_project_request( request: T, response: Response, - session: UserSession, + session: Session, ) -> Result<()> where T: EntityMessage + RequestMessage, @@ -2931,7 +2121,7 @@ where let host_connection_id = session .db() .await - .host_for_read_only_project_request(project_id, session.connection_id, session.user_id()) + .host_for_read_only_project_request(project_id, session.connection_id) .await?; let payload = session .peer @@ -2944,38 +2134,13 @@ where async fn forward_find_search_candidates_request( request: proto::FindSearchCandidates, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let project_id = ProjectId::from_proto(request.remote_entity_id()); let host_connection_id = session .db() .await - .host_for_read_only_project_request(project_id, session.connection_id, session.user_id()) - .await?; - let payload = session - .peer - .forward_request(session.connection_id, host_connection_id, request) - .await?; - response.send(payload)?; - Ok(()) -} - -/// forward a project request to the dev server. Only allowed -/// if it's your dev server. -async fn forward_project_request_for_owner( - request: T, - response: Response, - session: UserSession, -) -> Result<()> -where - T: EntityMessage + RequestMessage, -{ - let project_id = ProjectId::from_proto(request.remote_entity_id()); - - let host_connection_id = session - .db() - .await - .host_for_owner_project_request(project_id, session.connection_id, session.user_id()) + .host_for_read_only_project_request(project_id, session.connection_id) .await?; let payload = session .peer @@ -2990,7 +2155,7 @@ where async fn forward_mutating_project_request( request: T, response: Response, - session: UserSession, + session: Session, ) -> Result<()> where T: EntityMessage + RequestMessage, @@ -3000,7 +2165,7 @@ where let host_connection_id = session .db() .await - .host_for_mutating_project_request(project_id, session.connection_id, session.user_id()) + .host_for_mutating_project_request(project_id, session.connection_id) .await?; let payload = session .peer @@ -3051,12 +2216,7 @@ async fn update_buffer( let guard = session .db() .await - .connections_for_buffer_update( - project_id, - session.principal_id(), - session.connection_id, - capability, - ) + .connections_for_buffer_update(project_id, session.connection_id, capability) .await?; let (host, guests) = &*guard; @@ -3109,12 +2269,7 @@ async fn update_context(message: proto::UpdateContext, session: Session) -> Resu let guard = session .db() .await - .connections_for_buffer_update( - project_id, - session.principal_id(), - session.connection_id, - capability, - ) + .connections_for_buffer_update(project_id, session.connection_id, capability) .await?; let (host, guests) = &*guard; @@ -3160,7 +2315,7 @@ async fn broadcast_project_message_from_host, - session: UserSession, + session: Session, ) -> Result<()> { let room_id = RoomId::from_proto(request.room_id); let project_id = request.project_id.map(ProjectId::from_proto); @@ -3195,7 +2350,7 @@ async fn follow( } /// Stop following another user in a call. -async fn unfollow(request: proto::Unfollow, session: UserSession) -> Result<()> { +async fn unfollow(request: proto::Unfollow, session: Session) -> Result<()> { let room_id = RoomId::from_proto(request.room_id); let project_id = request.project_id.map(ProjectId::from_proto); let leader_id = request @@ -3227,7 +2382,7 @@ async fn unfollow(request: proto::Unfollow, session: UserSession) -> Result<()> } /// Notify everyone following you of your current location. -async fn update_followers(request: proto::UpdateFollowers, session: UserSession) -> Result<()> { +async fn update_followers(request: proto::UpdateFollowers, session: Session) -> Result<()> { let room_id = RoomId::from_proto(request.room_id); let database = session.db.lock().await; @@ -3289,7 +2444,7 @@ async fn get_users( async fn fuzzy_search_users( request: proto::FuzzySearchUsers, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let query = request.query; let users = match query.len() { @@ -3320,7 +2475,7 @@ async fn fuzzy_search_users( async fn request_contact( request: proto::RequestContact, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let requester_id = session.user_id(); let responder_id = UserId::from_proto(request.responder_id); @@ -3367,7 +2522,7 @@ async fn request_contact( async fn respond_to_contact_request( request: proto::RespondToContactRequest, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let responder_id = session.user_id(); let requester_id = UserId::from_proto(request.requester_id); @@ -3425,7 +2580,7 @@ async fn respond_to_contact_request( async fn remove_contact( request: proto::RemoveContact, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let requester_id = session.user_id(); let responder_id = UserId::from_proto(request.user_id); @@ -3491,11 +2646,7 @@ async fn update_user_plan(_user_id: UserId, session: &Session) -> Result<()> { } async fn subscribe_to_channels(_: proto::SubscribeToChannels, session: Session) -> Result<()> { - subscribe_user_to_channels( - session.user_id().ok_or_else(|| anyhow!("must be a user"))?, - &session, - ) - .await?; + subscribe_user_to_channels(session.user_id(), &session).await?; Ok(()) } @@ -3520,7 +2671,7 @@ async fn subscribe_user_to_channels(user_id: UserId, session: &Session) -> Resul async fn create_channel( request: proto::CreateChannel, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let db = session.db().await; @@ -3575,7 +2726,7 @@ async fn create_channel( async fn delete_channel( request: proto::DeleteChannel, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let db = session.db().await; @@ -3603,7 +2754,7 @@ async fn delete_channel( async fn invite_channel_member( request: proto::InviteChannelMember, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let db = session.db().await; let channel_id = ChannelId::from_proto(request.channel_id); @@ -3640,7 +2791,7 @@ async fn invite_channel_member( async fn remove_channel_member( request: proto::RemoveChannelMember, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let db = session.db().await; let channel_id = ChannelId::from_proto(request.channel_id); @@ -3684,7 +2835,7 @@ async fn remove_channel_member( async fn set_channel_visibility( request: proto::SetChannelVisibility, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let db = session.db().await; let channel_id = ChannelId::from_proto(request.channel_id); @@ -3729,7 +2880,7 @@ async fn set_channel_visibility( async fn set_channel_member_role( request: proto::SetChannelMemberRole, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let db = session.db().await; let channel_id = ChannelId::from_proto(request.channel_id); @@ -3777,7 +2928,7 @@ async fn set_channel_member_role( async fn rename_channel( request: proto::RenameChannel, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let db = session.db().await; let channel_id = ChannelId::from_proto(request.channel_id); @@ -3809,7 +2960,7 @@ async fn rename_channel( async fn move_channel( request: proto::MoveChannel, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let channel_id = ChannelId::from_proto(request.channel_id); let to = ChannelId::from_proto(request.to); @@ -3852,7 +3003,7 @@ async fn move_channel( async fn get_channel_members( request: proto::GetChannelMembers, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let db = session.db().await; let channel_id = ChannelId::from_proto(request.channel_id); @@ -3872,7 +3023,7 @@ async fn get_channel_members( async fn respond_to_channel_invite( request: proto::RespondToChannelInvite, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let db = session.db().await; let channel_id = ChannelId::from_proto(request.channel_id); @@ -3913,7 +3064,7 @@ async fn respond_to_channel_invite( async fn join_channel( request: proto::JoinChannel, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let channel_id = ChannelId::from_proto(request.channel_id); join_channel_internal(channel_id, Box::new(response), session).await @@ -3936,7 +3087,7 @@ impl JoinChannelInternalResponse for Response { async fn join_channel_internal( channel_id: ChannelId, response: Box, - session: UserSession, + session: Session, ) -> Result<()> { let joined_room = { let mut db = session.db().await; @@ -4033,7 +3184,7 @@ async fn join_channel_internal( async fn join_channel_buffer( request: proto::JoinChannelBuffer, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let db = session.db().await; let channel_id = ChannelId::from_proto(request.channel_id); @@ -4064,7 +3215,7 @@ async fn join_channel_buffer( /// Edit the channel notes async fn update_channel_buffer( request: proto::UpdateChannelBuffer, - session: UserSession, + session: Session, ) -> Result<()> { let db = session.db().await; let channel_id = ChannelId::from_proto(request.channel_id); @@ -4116,7 +3267,7 @@ async fn update_channel_buffer( async fn rejoin_channel_buffers( request: proto::RejoinChannelBuffers, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let db = session.db().await; let buffers = db @@ -4151,7 +3302,7 @@ async fn rejoin_channel_buffers( async fn leave_channel_buffer( request: proto::LeaveChannelBuffer, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let db = session.db().await; let channel_id = ChannelId::from_proto(request.channel_id); @@ -4213,7 +3364,7 @@ fn send_notifications( async fn send_channel_message( request: proto::SendChannelMessage, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { // Validate the message body. let body = request.body.trim().to_string(); @@ -4308,7 +3459,7 @@ async fn send_channel_message( async fn remove_channel_message( request: proto::RemoveChannelMessage, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let channel_id = ChannelId::from_proto(request.channel_id); let message_id = MessageId::from_proto(request.message_id); @@ -4343,7 +3494,7 @@ async fn remove_channel_message( async fn update_channel_message( request: proto::UpdateChannelMessage, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let channel_id = ChannelId::from_proto(request.channel_id); let message_id = MessageId::from_proto(request.message_id); @@ -4430,7 +3581,7 @@ async fn update_channel_message( /// Mark a channel message as read async fn acknowledge_channel_message( request: proto::AckChannelMessage, - session: UserSession, + session: Session, ) -> Result<()> { let channel_id = ChannelId::from_proto(request.channel_id); let message_id = MessageId::from_proto(request.message_id); @@ -4450,7 +3601,7 @@ async fn acknowledge_channel_message( /// Mark a buffer version as synced async fn acknowledge_buffer_version( request: proto::AckBufferOperation, - session: UserSession, + session: Session, ) -> Result<()> { let buffer_id = BufferId::from_proto(request.buffer_id); session @@ -4472,9 +3623,6 @@ async fn count_language_model_tokens( session: Session, config: &Config, ) -> Result<()> { - let Some(session) = session.for_user() else { - return Err(anyhow!("user not found"))?; - }; authorize_access_to_legacy_llm_endpoints(&session).await?; let rate_limit: Box = match session.current_plan(&session.db().await).await? { @@ -4592,7 +3740,7 @@ impl RateLimit for FreeComputeEmbeddingsRateLimit { async fn compute_embeddings( request: proto::ComputeEmbeddings, response: Response, - session: UserSession, + session: Session, api_key: Option>, ) -> Result<()> { let api_key = api_key.context("no OpenAI API key configured on the server")?; @@ -4658,7 +3806,7 @@ async fn compute_embeddings( async fn get_cached_embeddings( request: proto::GetCachedEmbeddings, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { authorize_access_to_legacy_llm_endpoints(&session).await?; @@ -4677,7 +3825,7 @@ async fn get_cached_embeddings( /// This is leftover from before the LLM service. /// /// The endpoints protected by this check will be moved there eventually. -async fn authorize_access_to_legacy_llm_endpoints(session: &UserSession) -> Result<(), Error> { +async fn authorize_access_to_legacy_llm_endpoints(session: &Session) -> Result<(), Error> { if session.is_staff() { Ok(()) } else { @@ -4689,7 +3837,7 @@ async fn authorize_access_to_legacy_llm_endpoints(session: &UserSession) -> Resu async fn get_supermaven_api_key( _request: proto::GetSupermavenApiKey, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let user_id: String = session.user_id().to_string(); if !session.is_staff() { @@ -4720,7 +3868,7 @@ async fn get_supermaven_api_key( async fn join_channel_chat( request: proto::JoinChannelChat, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let channel_id = ChannelId::from_proto(request.channel_id); @@ -4738,7 +3886,7 @@ async fn join_channel_chat( } /// Stop receiving chat updates for a channel -async fn leave_channel_chat(request: proto::LeaveChannelChat, session: UserSession) -> Result<()> { +async fn leave_channel_chat(request: proto::LeaveChannelChat, session: Session) -> Result<()> { let channel_id = ChannelId::from_proto(request.channel_id); session .db() @@ -4752,7 +3900,7 @@ async fn leave_channel_chat(request: proto::LeaveChannelChat, session: UserSessi async fn get_channel_messages( request: proto::GetChannelMessages, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let channel_id = ChannelId::from_proto(request.channel_id); let messages = session @@ -4776,7 +3924,7 @@ async fn get_channel_messages( async fn get_channel_messages_by_id( request: proto::GetChannelMessagesById, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let message_ids = request .message_ids @@ -4799,7 +3947,7 @@ async fn get_channel_messages_by_id( async fn get_notifications( request: proto::GetNotifications, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let notifications = session .db() @@ -4821,7 +3969,7 @@ async fn get_notifications( async fn mark_notification_as_read( request: proto::MarkNotificationRead, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let database = &session.db().await; let notifications = database @@ -4843,7 +3991,7 @@ async fn mark_notification_as_read( async fn get_private_user_info( _request: proto::GetPrivateUserInfo, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let db = session.db().await; @@ -4867,7 +4015,7 @@ async fn get_private_user_info( async fn accept_terms_of_service( _request: proto::AcceptTermsOfService, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let db = session.db().await; @@ -4887,7 +4035,7 @@ const MIN_ACCOUNT_AGE_FOR_LLM_USE: chrono::Duration = chrono::Duration::days(30) async fn get_llm_api_token( _request: proto::GetLlmToken, response: Response, - session: UserSession, + session: Session, ) -> Result<()> { let db = session.db().await; @@ -5140,22 +4288,6 @@ fn channel_updated( ); } -async fn send_dev_server_projects_update( - user_id: UserId, - mut status: proto::DevServerProjectsUpdate, - session: &Session, -) { - let pool = session.connection_pool().await; - for dev_server in &mut status.dev_servers { - dev_server.status = - pool.dev_server_status(DevServerId(dev_server.dev_server_id as i32)) as i32; - } - let connections = pool.user_connection_ids(user_id); - for connection_id in connections { - session.peer.send(connection_id, status.clone()).trace_err(); - } -} - async fn update_user_contacts(user_id: UserId, session: &Session) -> Result<()> { let db = session.db().await; @@ -5191,32 +4323,7 @@ async fn update_user_contacts(user_id: UserId, session: &Session) -> Result<()> Ok(()) } -async fn lost_dev_server_connection(session: &DevServerSession) -> Result<()> { - log::info!("lost dev server connection, unsharing projects"); - let project_ids = session - .db() - .await - .get_stale_dev_server_projects(session.connection_id) - .await?; - - for project_id in project_ids { - // not unshare re-checks the connection ids match, so we get away with no transaction - unshare_project_internal(project_id, session.connection_id, None, session).await?; - } - - let user_id = session.dev_server().user_id; - let update = session - .db() - .await - .dev_server_projects_update(user_id) - .await?; - - send_dev_server_projects_update(user_id, update, session).await; - - Ok(()) -} - -async fn leave_room_for_session(session: &UserSession, connection_id: ConnectionId) -> Result<()> { +async fn leave_room_for_session(session: &Session, connection_id: ConnectionId) -> Result<()> { let mut contacts_to_update = HashSet::default(); let room_id; @@ -5312,7 +4419,7 @@ async fn leave_channel_buffers_for_session(session: &Session) -> Result<()> { Ok(()) } -fn project_left(project: &db::LeftProject, session: &UserSession) { +fn project_left(project: &db::LeftProject, session: &Session) { for connection_id in &project.connection_ids { if project.should_unshare { session diff --git a/crates/collab/src/rpc/connection_pool.rs b/crates/collab/src/rpc/connection_pool.rs index 96deefba7949c7607520c32a6b519557acdf28c0..6af90770dc615ce6befb1bce6edfc94813f00a30 100644 --- a/crates/collab/src/rpc/connection_pool.rs +++ b/crates/collab/src/rpc/connection_pool.rs @@ -1,7 +1,7 @@ -use crate::db::{ChannelId, ChannelRole, DevServerId, PrincipalId, UserId}; +use crate::db::{ChannelId, ChannelRole, UserId}; use anyhow::{anyhow, Result}; use collections::{BTreeMap, HashMap, HashSet}; -use rpc::{proto, ConnectionId}; +use rpc::ConnectionId; use semantic_version::SemanticVersion; use serde::Serialize; use std::fmt; @@ -11,9 +11,7 @@ use tracing::instrument; pub struct ConnectionPool { connections: BTreeMap, connected_users: BTreeMap, - connected_dev_servers: BTreeMap, channels: ChannelPool, - offline_dev_servers: HashSet, } #[derive(Default, Serialize)] @@ -32,13 +30,13 @@ impl fmt::Display for ZedVersion { impl ZedVersion { pub fn can_collaborate(&self) -> bool { - self.0 >= SemanticVersion::new(0, 151, 0) + self.0 >= SemanticVersion::new(0, 157, 0) } } #[derive(Serialize)] pub struct Connection { - pub principal_id: PrincipalId, + pub user_id: UserId, pub admin: bool, pub zed_version: ZedVersion, } @@ -47,7 +45,6 @@ impl ConnectionPool { pub fn reset(&mut self) { self.connections.clear(); self.connected_users.clear(); - self.connected_dev_servers.clear(); self.channels.clear(); } @@ -66,7 +63,7 @@ impl ConnectionPool { self.connections.insert( connection_id, Connection { - principal_id: PrincipalId::UserId(user_id), + user_id, admin, zed_version, }, @@ -75,25 +72,6 @@ impl ConnectionPool { connected_user.connection_ids.insert(connection_id); } - pub fn add_dev_server( - &mut self, - connection_id: ConnectionId, - dev_server_id: DevServerId, - zed_version: ZedVersion, - ) { - self.connections.insert( - connection_id, - Connection { - principal_id: PrincipalId::DevServerId(dev_server_id), - admin: false, - zed_version, - }, - ); - - self.connected_dev_servers - .insert(dev_server_id, connection_id); - } - #[instrument(skip(self))] pub fn remove_connection(&mut self, connection_id: ConnectionId) -> Result<()> { let connection = self @@ -101,28 +79,18 @@ impl ConnectionPool { .get_mut(&connection_id) .ok_or_else(|| anyhow!("no such connection"))?; - match connection.principal_id { - PrincipalId::UserId(user_id) => { - let connected_user = self.connected_users.get_mut(&user_id).unwrap(); - connected_user.connection_ids.remove(&connection_id); - if connected_user.connection_ids.is_empty() { - self.connected_users.remove(&user_id); - self.channels.remove_user(&user_id); - } - } - PrincipalId::DevServerId(dev_server_id) => { - self.connected_dev_servers.remove(&dev_server_id); - self.offline_dev_servers.remove(&dev_server_id); - } - } + let user_id = connection.user_id; + + let connected_user = self.connected_users.get_mut(&user_id).unwrap(); + connected_user.connection_ids.remove(&connection_id); + if connected_user.connection_ids.is_empty() { + self.connected_users.remove(&user_id); + self.channels.remove_user(&user_id); + }; self.connections.remove(&connection_id).unwrap(); Ok(()) } - pub fn set_dev_server_offline(&mut self, dev_server_id: DevServerId) { - self.offline_dev_servers.insert(dev_server_id); - } - pub fn connections(&self) -> impl Iterator { self.connections.values() } @@ -147,42 +115,6 @@ impl ConnectionPool { .copied() } - pub fn dev_server_status(&self, dev_server_id: DevServerId) -> proto::DevServerStatus { - if self.dev_server_connection_id(dev_server_id).is_some() - && !self.offline_dev_servers.contains(&dev_server_id) - { - proto::DevServerStatus::Online - } else { - proto::DevServerStatus::Offline - } - } - - pub fn dev_server_connection_id(&self, dev_server_id: DevServerId) -> Option { - self.connected_dev_servers.get(&dev_server_id).copied() - } - - pub fn online_dev_server_connection_id( - &self, - dev_server_id: DevServerId, - ) -> Result { - match self.connected_dev_servers.get(&dev_server_id) { - Some(cid) => Ok(*cid), - None => Err(anyhow!(proto::ErrorCode::DevServerOffline)), - } - } - - pub fn dev_server_connection_id_supporting( - &self, - dev_server_id: DevServerId, - required: ZedVersion, - ) -> Result { - match self.connected_dev_servers.get(&dev_server_id) { - Some(cid) if self.connections[cid].zed_version >= required => Ok(*cid), - Some(_) => Err(anyhow!(proto::ErrorCode::RemoteUpgradeRequired)), - None => Err(anyhow!(proto::ErrorCode::DevServerOffline)), - } - } - pub fn channel_user_ids( &self, channel_id: ChannelId, @@ -227,39 +159,22 @@ impl ConnectionPool { #[cfg(test)] pub fn check_invariants(&self) { for (connection_id, connection) in &self.connections { - match &connection.principal_id { - PrincipalId::UserId(user_id) => { - assert!(self - .connected_users - .get(user_id) - .unwrap() - .connection_ids - .contains(connection_id)); - } - PrincipalId::DevServerId(dev_server_id) => { - assert_eq!( - self.connected_dev_servers.get(dev_server_id).unwrap(), - connection_id - ); - } - } + assert!(self + .connected_users + .get(&connection.user_id) + .unwrap() + .connection_ids + .contains(connection_id)); } for (user_id, state) in &self.connected_users { for connection_id in &state.connection_ids { assert_eq!( - self.connections.get(connection_id).unwrap().principal_id, - PrincipalId::UserId(*user_id) + self.connections.get(connection_id).unwrap().user_id, + *user_id ); } } - - for (dev_server_id, connection_id) in &self.connected_dev_servers { - assert_eq!( - self.connections.get(connection_id).unwrap().principal_id, - PrincipalId::DevServerId(*dev_server_id) - ); - } } } diff --git a/crates/collab/src/tests.rs b/crates/collab/src/tests.rs index f6e0bc3036b1c5b518eed97304fe2f3ac4e3291c..29373bc6ea170d962e3f967f83448e5751174c73 100644 --- a/crates/collab/src/tests.rs +++ b/crates/collab/src/tests.rs @@ -8,7 +8,6 @@ mod channel_buffer_tests; mod channel_guest_tests; mod channel_message_tests; mod channel_tests; -mod dev_server_tests; mod editor_tests; mod following_tests; mod integration_tests; diff --git a/crates/collab/src/tests/dev_server_tests.rs b/crates/collab/src/tests/dev_server_tests.rs deleted file mode 100644 index cbeb2a85a0ae0e09f995d8a34681d85e91755923..0000000000000000000000000000000000000000 --- a/crates/collab/src/tests/dev_server_tests.rs +++ /dev/null @@ -1,643 +0,0 @@ -use std::{path::Path, sync::Arc}; - -use call::ActiveCall; -use editor::Editor; -use fs::Fs; -use gpui::{TestAppContext, VisualTestContext, WindowHandle}; -use rpc::{proto::DevServerStatus, ErrorCode, ErrorExt}; -use serde_json::json; -use workspace::{AppState, Workspace}; - -use crate::tests::{following_tests::join_channel, TestServer}; - -use super::TestClient; - -#[gpui::test] -async fn test_dev_server(cx: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) { - let (server, client) = TestServer::start1(cx).await; - - let store = cx.update(|cx| dev_server_projects::Store::global(cx).clone()); - - let resp = store - .update(cx, |store, cx| { - store.create_dev_server("server-1".to_string(), None, cx) - }) - .await - .unwrap(); - - store.update(cx, |store, _| { - assert_eq!(store.dev_servers().len(), 1); - assert_eq!(store.dev_servers()[0].name, "server-1"); - assert_eq!(store.dev_servers()[0].status, DevServerStatus::Offline); - }); - - let dev_server = server.create_dev_server(resp.access_token, cx2).await; - cx.executor().run_until_parked(); - store.update(cx, |store, _| { - assert_eq!(store.dev_servers()[0].status, DevServerStatus::Online); - }); - - dev_server - .fs() - .insert_tree( - "/remote", - json!({ - "1.txt": "remote\nremote\nremote", - "2.js": "function two() { return 2; }", - "3.rs": "mod test", - }), - ) - .await; - - store - .update(cx, |store, cx| { - store.create_dev_server_project( - client::DevServerId(resp.dev_server_id), - "/remote".to_string(), - cx, - ) - }) - .await - .unwrap(); - - cx.executor().run_until_parked(); - - let remote_workspace = store - .update(cx, |store, cx| { - let projects = store.dev_server_projects(); - assert_eq!(projects.len(), 1); - assert_eq!(projects[0].paths, vec!["/remote"]); - workspace::join_dev_server_project( - projects[0].id, - projects[0].project_id.unwrap(), - client.app_state.clone(), - None, - cx, - ) - }) - .await - .unwrap(); - - cx.executor().run_until_parked(); - - let cx = VisualTestContext::from_window(remote_workspace.into(), cx).as_mut(); - cx.simulate_keystrokes("cmd-p 1 enter"); - - let editor = remote_workspace - .update(cx, |ws, cx| { - ws.active_item_as::(cx).unwrap().clone() - }) - .unwrap(); - editor.update(cx, |ed, cx| { - assert_eq!(ed.text(cx).to_string(), "remote\nremote\nremote"); - }); - cx.simulate_input("wow!"); - cx.simulate_keystrokes("cmd-s"); - - let content = dev_server - .fs() - .load(Path::new("/remote/1.txt")) - .await - .unwrap(); - assert_eq!(content, "wow!remote\nremote\nremote\n"); -} - -#[gpui::test] -async fn test_dev_server_env_files( - cx1: &mut gpui::TestAppContext, - cx2: &mut gpui::TestAppContext, - cx3: &mut gpui::TestAppContext, -) { - let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await; - - let (_dev_server, remote_workspace) = - create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await; - - cx1.executor().run_until_parked(); - - let cx1 = VisualTestContext::from_window(remote_workspace.into(), cx1).as_mut(); - cx1.simulate_keystrokes("cmd-p . e enter"); - - let editor = remote_workspace - .update(cx1, |ws, cx| { - ws.active_item_as::(cx).unwrap().clone() - }) - .unwrap(); - editor.update(cx1, |ed, cx| { - assert_eq!(ed.text(cx).to_string(), "SECRET"); - }); - - cx1.update(|cx| { - workspace::join_channel( - channel_id, - client1.app_state.clone(), - Some(remote_workspace), - cx, - ) - }) - .await - .unwrap(); - cx1.executor().run_until_parked(); - - remote_workspace - .update(cx1, |ws, cx| { - assert!(ws.project().read(cx).is_shared()); - }) - .unwrap(); - - join_channel(channel_id, &client2, cx2).await.unwrap(); - cx2.executor().run_until_parked(); - - let (workspace2, cx2) = client2.active_workspace(cx2); - let editor = workspace2.update(cx2, |ws, cx| { - ws.active_item_as::(cx).unwrap().clone() - }); - // TODO: it'd be nice to hide .env files from other people - editor.update(cx2, |ed, cx| { - assert_eq!(ed.text(cx).to_string(), "SECRET"); - }); -} - -async fn create_dev_server_project( - server: &TestServer, - client_app_state: Arc, - cx: &mut TestAppContext, - cx_devserver: &mut TestAppContext, -) -> (TestClient, WindowHandle) { - let store = cx.update(|cx| dev_server_projects::Store::global(cx).clone()); - - let resp = store - .update(cx, |store, cx| { - store.create_dev_server("server-1".to_string(), None, cx) - }) - .await - .unwrap(); - let dev_server = server - .create_dev_server(resp.access_token, cx_devserver) - .await; - - cx.executor().run_until_parked(); - - dev_server - .fs() - .insert_tree( - "/remote", - json!({ - "1.txt": "remote\nremote\nremote", - ".env": "SECRET", - }), - ) - .await; - - store - .update(cx, |store, cx| { - store.create_dev_server_project( - client::DevServerId(resp.dev_server_id), - "/remote".to_string(), - cx, - ) - }) - .await - .unwrap(); - - cx.executor().run_until_parked(); - - let workspace = store - .update(cx, |store, cx| { - let projects = store.dev_server_projects(); - assert_eq!(projects.len(), 1); - assert_eq!(projects[0].paths, vec!["/remote"]); - workspace::join_dev_server_project( - projects[0].id, - projects[0].project_id.unwrap(), - client_app_state, - None, - cx, - ) - }) - .await - .unwrap(); - - cx.executor().run_until_parked(); - - (dev_server, workspace) -} - -#[gpui::test] -async fn test_dev_server_leave_room( - cx1: &mut gpui::TestAppContext, - cx2: &mut gpui::TestAppContext, - cx3: &mut gpui::TestAppContext, -) { - let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await; - - let (_dev_server, remote_workspace) = - create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await; - - cx1.update(|cx| { - workspace::join_channel( - channel_id, - client1.app_state.clone(), - Some(remote_workspace), - cx, - ) - }) - .await - .unwrap(); - cx1.executor().run_until_parked(); - - remote_workspace - .update(cx1, |ws, cx| { - assert!(ws.project().read(cx).is_shared()); - }) - .unwrap(); - - join_channel(channel_id, &client2, cx2).await.unwrap(); - cx2.executor().run_until_parked(); - - cx1.update(|cx| ActiveCall::global(cx).update(cx, |active_call, cx| active_call.hang_up(cx))) - .await - .unwrap(); - - cx1.executor().run_until_parked(); - - let (workspace, cx2) = client2.active_workspace(cx2); - cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected(cx))); -} - -#[gpui::test] -async fn test_dev_server_delete( - cx1: &mut gpui::TestAppContext, - cx2: &mut gpui::TestAppContext, - cx3: &mut gpui::TestAppContext, -) { - let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await; - - let (_dev_server, remote_workspace) = - create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await; - - cx1.update(|cx| { - workspace::join_channel( - channel_id, - client1.app_state.clone(), - Some(remote_workspace), - cx, - ) - }) - .await - .unwrap(); - cx1.executor().run_until_parked(); - - remote_workspace - .update(cx1, |ws, cx| { - assert!(ws.project().read(cx).is_shared()); - }) - .unwrap(); - - join_channel(channel_id, &client2, cx2).await.unwrap(); - cx2.executor().run_until_parked(); - - cx1.update(|cx| { - dev_server_projects::Store::global(cx).update(cx, |store, cx| { - store.delete_dev_server_project(store.dev_server_projects().first().unwrap().id, cx) - }) - }) - .await - .unwrap(); - - cx1.executor().run_until_parked(); - - let (workspace, cx2) = client2.active_workspace(cx2); - cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected(cx))); - - cx1.update(|cx| { - dev_server_projects::Store::global(cx).update(cx, |store, _| { - assert_eq!(store.dev_server_projects().len(), 0); - }) - }) -} - -#[gpui::test] -async fn test_dev_server_rename( - cx1: &mut gpui::TestAppContext, - cx2: &mut gpui::TestAppContext, - cx3: &mut gpui::TestAppContext, -) { - let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await; - - let (_dev_server, remote_workspace) = - create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await; - - cx1.update(|cx| { - workspace::join_channel( - channel_id, - client1.app_state.clone(), - Some(remote_workspace), - cx, - ) - }) - .await - .unwrap(); - cx1.executor().run_until_parked(); - - remote_workspace - .update(cx1, |ws, cx| { - assert!(ws.project().read(cx).is_shared()); - }) - .unwrap(); - - join_channel(channel_id, &client2, cx2).await.unwrap(); - cx2.executor().run_until_parked(); - - cx1.update(|cx| { - dev_server_projects::Store::global(cx).update(cx, |store, cx| { - store.rename_dev_server( - store.dev_servers().first().unwrap().id, - "name-edited".to_string(), - None, - cx, - ) - }) - }) - .await - .unwrap(); - - cx1.executor().run_until_parked(); - - cx1.update(|cx| { - dev_server_projects::Store::global(cx).update(cx, |store, _| { - assert_eq!(store.dev_servers().first().unwrap().name, "name-edited"); - }) - }) -} - -#[gpui::test] -async fn test_dev_server_refresh_access_token( - cx1: &mut gpui::TestAppContext, - cx2: &mut gpui::TestAppContext, - cx3: &mut gpui::TestAppContext, - cx4: &mut gpui::TestAppContext, -) { - let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await; - - let (_dev_server, remote_workspace) = - create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await; - - cx1.update(|cx| { - workspace::join_channel( - channel_id, - client1.app_state.clone(), - Some(remote_workspace), - cx, - ) - }) - .await - .unwrap(); - cx1.executor().run_until_parked(); - - remote_workspace - .update(cx1, |ws, cx| { - assert!(ws.project().read(cx).is_shared()); - }) - .unwrap(); - - join_channel(channel_id, &client2, cx2).await.unwrap(); - cx2.executor().run_until_parked(); - - // Regenerate the access token - let new_token_response = cx1 - .update(|cx| { - dev_server_projects::Store::global(cx).update(cx, |store, cx| { - store.regenerate_dev_server_token(store.dev_servers().first().unwrap().id, cx) - }) - }) - .await - .unwrap(); - - cx1.executor().run_until_parked(); - - // Assert that the other client was disconnected - let (workspace, cx2) = client2.active_workspace(cx2); - cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected(cx))); - - // Assert that the owner of the dev server does not see the dev server as online anymore - let (workspace, cx1) = client1.active_workspace(cx1); - cx1.update(|cx| { - assert!(workspace.read(cx).project().read(cx).is_disconnected(cx)); - dev_server_projects::Store::global(cx).update(cx, |store, _| { - assert_eq!( - store.dev_servers().first().unwrap().status, - DevServerStatus::Offline - ); - }) - }); - - // Reconnect the dev server with the new token - let _dev_server = server - .create_dev_server(new_token_response.access_token, cx4) - .await; - - cx1.executor().run_until_parked(); - - // Assert that the dev server is online again - cx1.update(|cx| { - dev_server_projects::Store::global(cx).update(cx, |store, _| { - assert_eq!(store.dev_servers().len(), 1); - assert_eq!( - store.dev_servers().first().unwrap().status, - DevServerStatus::Online - ); - }) - }); -} - -#[gpui::test] -async fn test_dev_server_reconnect( - cx1: &mut gpui::TestAppContext, - cx2: &mut gpui::TestAppContext, - cx3: &mut gpui::TestAppContext, -) { - let (mut server, client1) = TestServer::start1(cx1).await; - let channel_id = server - .make_channel("test", None, (&client1, cx1), &mut []) - .await; - - let (_dev_server, remote_workspace) = - create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await; - - cx1.update(|cx| { - workspace::join_channel( - channel_id, - client1.app_state.clone(), - Some(remote_workspace), - cx, - ) - }) - .await - .unwrap(); - cx1.executor().run_until_parked(); - - remote_workspace - .update(cx1, |ws, cx| { - assert!(ws.project().read(cx).is_shared()); - }) - .unwrap(); - - drop(client1); - - let client2 = server.create_client(cx2, "user_a").await; - - let store = cx2.update(|cx| dev_server_projects::Store::global(cx).clone()); - - store - .update(cx2, |store, cx| { - let projects = store.dev_server_projects(); - workspace::join_dev_server_project( - projects[0].id, - projects[0].project_id.unwrap(), - client2.app_state.clone(), - None, - cx, - ) - }) - .await - .unwrap(); -} - -#[gpui::test] -async fn test_dev_server_restart(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) { - let (server, client1) = TestServer::start1(cx1).await; - - let (_dev_server, remote_workspace) = - create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await; - let cx = VisualTestContext::from_window(remote_workspace.into(), cx1).as_mut(); - - server.reset().await; - cx.run_until_parked(); - - cx.simulate_keystrokes("cmd-p 1 enter"); - remote_workspace - .update(cx, |ws, cx| { - ws.active_item_as::(cx) - .unwrap() - .update(cx, |ed, cx| { - assert_eq!(ed.text(cx).to_string(), "remote\nremote\nremote"); - }) - }) - .unwrap(); -} - -#[gpui::test] -async fn test_create_dev_server_project_path_validation( - cx1: &mut gpui::TestAppContext, - cx2: &mut gpui::TestAppContext, - cx3: &mut gpui::TestAppContext, -) { - let (server, client1) = TestServer::start1(cx1).await; - let _channel_id = server - .make_channel("test", None, (&client1, cx1), &mut []) - .await; - - // Creating a project with a path that does exist should not fail - let (_dev_server, _) = - create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await; - - cx1.executor().run_until_parked(); - - let store = cx1.update(|cx| dev_server_projects::Store::global(cx).clone()); - - let resp = store - .update(cx1, |store, cx| { - store.create_dev_server("server-2".to_string(), None, cx) - }) - .await - .unwrap(); - - cx1.executor().run_until_parked(); - - let _dev_server = server.create_dev_server(resp.access_token, cx3).await; - - cx1.executor().run_until_parked(); - - // Creating a remote project with a path that does not exist should fail - let result = store - .update(cx1, |store, cx| { - store.create_dev_server_project( - client::DevServerId(resp.dev_server_id), - "/notfound".to_string(), - cx, - ) - }) - .await; - - cx1.executor().run_until_parked(); - - let error = result.unwrap_err(); - assert!(matches!( - error.error_code(), - ErrorCode::DevServerProjectPathDoesNotExist - )); -} - -#[gpui::test] -async fn test_save_as_remote(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) { - let (server, client1) = TestServer::start1(cx1).await; - - // Creating a project with a path that does exist should not fail - let (dev_server, remote_workspace) = - create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await; - - let mut cx = VisualTestContext::from_window(remote_workspace.into(), cx1); - - cx.simulate_keystrokes("cmd-p 1 enter"); - cx.simulate_keystrokes("cmd-shift-s"); - cx.simulate_input("2.txt"); - cx.simulate_keystrokes("enter"); - - cx.executor().run_until_parked(); - - let title = remote_workspace - .update(&mut cx, |ws, cx| { - let active_item = ws.active_item(cx).unwrap(); - active_item.tab_description(0, cx).unwrap() - }) - .unwrap(); - - assert_eq!(title, "2.txt"); - - let path = Path::new("/remote/2.txt"); - assert_eq!( - dev_server.fs().load(path).await.unwrap(), - "remote\nremote\nremote" - ); -} - -#[gpui::test] -async fn test_new_file_remote(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) { - let (server, client1) = TestServer::start1(cx1).await; - - // Creating a project with a path that does exist should not fail - let (dev_server, remote_workspace) = - create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await; - - let mut cx = VisualTestContext::from_window(remote_workspace.into(), cx1); - - cx.simulate_keystrokes("cmd-n"); - cx.simulate_input("new!"); - cx.simulate_keystrokes("cmd-shift-s"); - cx.simulate_input("2.txt"); - cx.simulate_keystrokes("enter"); - - cx.executor().run_until_parked(); - - let title = remote_workspace - .update(&mut cx, |ws, cx| { - ws.active_item(cx).unwrap().tab_description(0, cx).unwrap() - }) - .unwrap(); - - assert_eq!(title, "2.txt"); - - let path = Path::new("/remote/2.txt"); - assert_eq!(dev_server.fs().load(path).await.unwrap(), "new!"); -} diff --git a/crates/collab/src/tests/test_server.rs b/crates/collab/src/tests/test_server.rs index 210a049e0bcb30301ab89f813ab6be36e67999e9..0e8d0fd808cf769e9584aa4f96ac077456f964f4 100644 --- a/crates/collab/src/tests/test_server.rs +++ b/crates/collab/src/tests/test_server.rs @@ -1,5 +1,4 @@ use crate::{ - auth::split_dev_server_token, db::{tests::TestDb, NewUserParams, UserId}, executor::Executor, rpc::{Principal, Server, ZedVersion, CLEANUP_TIMEOUT, RECONNECT_TIMEOUT}, @@ -204,7 +203,7 @@ impl TestServer { .override_authenticate(move |cx| { cx.spawn(|_| async move { let access_token = "the-token".to_string(); - Ok(Credentials::User { + Ok(Credentials { user_id: user_id.to_proto(), access_token, }) @@ -213,7 +212,7 @@ impl TestServer { .override_establish_connection(move |credentials, cx| { assert_eq!( credentials, - &Credentials::User { + &Credentials { user_id: user_id.0 as u64, access_token: "the-token".into() } @@ -297,7 +296,6 @@ impl TestServer { collab_ui::init(&app_state, cx); file_finder::init(cx); menu::init(); - dev_server_projects::init(client.clone(), cx); settings::KeymapFile::load_asset(os_keymap, cx).unwrap(); language_model::LanguageModelRegistry::test(cx); assistant::context_store::init(&client.clone().into()); @@ -319,135 +317,6 @@ impl TestServer { client } - pub async fn create_dev_server( - &self, - access_token: String, - cx: &mut TestAppContext, - ) -> TestClient { - cx.update(|cx| { - if cx.has_global::() { - panic!("Same cx used to create two test clients") - } - let settings = SettingsStore::test(cx); - cx.set_global(settings); - release_channel::init(SemanticVersion::default(), cx); - client::init_settings(cx); - }); - let (dev_server_id, _) = split_dev_server_token(&access_token).unwrap(); - - let clock = Arc::new(FakeSystemClock::default()); - let http = FakeHttpClient::with_404_response(); - let mut client = cx.update(|cx| Client::new(clock, http.clone(), cx)); - let server = self.server.clone(); - let db = self.app_state.db.clone(); - let connection_killers = self.connection_killers.clone(); - let forbid_connections = self.forbid_connections.clone(); - Arc::get_mut(&mut client) - .unwrap() - .set_id(1) - .set_dev_server_token(client::DevServerToken(access_token.clone())) - .override_establish_connection(move |credentials, cx| { - assert_eq!( - credentials, - &Credentials::DevServer { - token: client::DevServerToken(access_token.to_string()) - } - ); - - let server = server.clone(); - let db = db.clone(); - let connection_killers = connection_killers.clone(); - let forbid_connections = forbid_connections.clone(); - cx.spawn(move |cx| async move { - if forbid_connections.load(SeqCst) { - Err(EstablishConnectionError::other(anyhow!( - "server is forbidding connections" - ))) - } else { - let (client_conn, server_conn, killed) = - Connection::in_memory(cx.background_executor().clone()); - let (connection_id_tx, connection_id_rx) = oneshot::channel(); - let dev_server = db - .get_dev_server(dev_server_id) - .await - .expect("retrieving dev_server failed"); - cx.background_executor() - .spawn(server.handle_connection( - server_conn, - "dev-server".to_string(), - Principal::DevServer(dev_server), - ZedVersion(SemanticVersion::new(1, 0, 0)), - None, - Some(connection_id_tx), - Executor::Deterministic(cx.background_executor().clone()), - )) - .detach(); - let connection_id = connection_id_rx.await.map_err(|e| { - EstablishConnectionError::Other(anyhow!( - "{} (is server shutting down?)", - e - )) - })?; - connection_killers - .lock() - .insert(connection_id.into(), killed); - Ok(client_conn) - } - }) - }); - - let fs = FakeFs::new(cx.executor()); - let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx)); - let workspace_store = cx.new_model(|cx| WorkspaceStore::new(client.clone(), cx)); - let language_registry = Arc::new(LanguageRegistry::test(cx.executor())); - let session = cx.new_model(|cx| AppSession::new(Session::test(), cx)); - let app_state = Arc::new(workspace::AppState { - client: client.clone(), - user_store: user_store.clone(), - workspace_store, - languages: language_registry, - fs: fs.clone(), - build_window_options: |_, _| Default::default(), - node_runtime: NodeRuntime::unavailable(), - session, - }); - - cx.update(|cx| { - theme::init(theme::LoadThemes::JustBase, cx); - Project::init(&client, cx); - client::init(&client, cx); - language::init(cx); - editor::init(cx); - workspace::init(app_state.clone(), cx); - call::init(client.clone(), user_store.clone(), cx); - channel::init(&client, user_store.clone(), cx); - notifications::init(client.clone(), user_store, cx); - collab_ui::init(&app_state, cx); - file_finder::init(cx); - menu::init(); - headless::init( - client.clone(), - headless::AppState { - languages: app_state.languages.clone(), - user_store: app_state.user_store.clone(), - fs: fs.clone(), - node_runtime: app_state.node_runtime.clone(), - }, - cx, - ) - }) - .await - .unwrap(); - - TestClient { - app_state, - username: "dev-server".to_string(), - channel_store: cx.read(ChannelStore::global).clone(), - notification_store: cx.read(NotificationStore::global).clone(), - state: Default::default(), - } - } - pub fn disconnect_client(&self, peer_id: PeerId) { self.connection_killers .lock() diff --git a/crates/dev_server_projects/Cargo.toml b/crates/dev_server_projects/Cargo.toml deleted file mode 100644 index 81d50301bc495b2aaa230ffa279ad4871451f0f1..0000000000000000000000000000000000000000 --- a/crates/dev_server_projects/Cargo.toml +++ /dev/null @@ -1,23 +0,0 @@ -[package] -name = "dev_server_projects" -version = "0.1.0" -edition = "2021" -publish = false -license = "GPL-3.0-or-later" - -[lints] -workspace = true - -[lib] -path = "src/dev_server_projects.rs" -doctest = false - -[dependencies] -anyhow.workspace = true -gpui.workspace = true -serde.workspace = true -client.workspace = true -rpc.workspace = true - -[dev-dependencies] -serde_json.workspace = true diff --git a/crates/dev_server_projects/LICENSE-GPL b/crates/dev_server_projects/LICENSE-GPL deleted file mode 120000 index 89e542f750cd3860a0598eff0dc34b56d7336dc4..0000000000000000000000000000000000000000 --- a/crates/dev_server_projects/LICENSE-GPL +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-GPL \ No newline at end of file diff --git a/crates/dev_server_projects/src/dev_server_projects.rs b/crates/dev_server_projects/src/dev_server_projects.rs index 0852b1d20f76ac82d12ddfc1b6866dfe43603c23..8b137891791fe96927ad78e64b0aad7bded08bdc 100644 --- a/crates/dev_server_projects/src/dev_server_projects.rs +++ b/crates/dev_server_projects/src/dev_server_projects.rs @@ -1,249 +1 @@ -use anyhow::Result; -use gpui::{AppContext, AsyncAppContext, Context, Global, Model, ModelContext, SharedString, Task}; -use rpc::{ - proto::{self, DevServerStatus}, - TypedEnvelope, -}; -use std::{collections::HashMap, sync::Arc}; -use client::{Client, ProjectId}; -pub use client::{DevServerId, DevServerProjectId}; - -pub struct Store { - dev_server_projects: HashMap, - dev_servers: HashMap, - _subscriptions: Vec, - client: Arc, -} - -#[derive(Debug, Clone)] -pub struct DevServerProject { - pub id: DevServerProjectId, - pub project_id: Option, - pub paths: Vec, - pub dev_server_id: DevServerId, -} - -impl From for DevServerProject { - fn from(project: proto::DevServerProject) -> Self { - Self { - id: DevServerProjectId(project.id), - project_id: project.project_id.map(ProjectId), - paths: project.paths.into_iter().map(|path| path.into()).collect(), - dev_server_id: DevServerId(project.dev_server_id), - } - } -} - -#[derive(Debug, Clone)] -pub struct DevServer { - pub id: DevServerId, - pub name: SharedString, - pub ssh_connection_string: Option, - pub status: DevServerStatus, -} - -impl From for DevServer { - fn from(dev_server: proto::DevServer) -> Self { - Self { - id: DevServerId(dev_server.dev_server_id), - status: dev_server.status(), - name: dev_server.name.into(), - ssh_connection_string: dev_server.ssh_connection_string.map(|s| s.into()), - } - } -} - -struct GlobalStore(Model); - -impl Global for GlobalStore {} - -pub fn init(client: Arc, cx: &mut AppContext) { - let store = cx.new_model(|cx| Store::new(client, cx)); - cx.set_global(GlobalStore(store)); -} - -impl Store { - pub fn global(cx: &AppContext) -> Model { - cx.global::().0.clone() - } - - pub fn new(client: Arc, cx: &ModelContext) -> Self { - Self { - dev_server_projects: Default::default(), - dev_servers: Default::default(), - _subscriptions: vec![client - .add_message_handler(cx.weak_model(), Self::handle_dev_server_projects_update)], - client, - } - } - - pub fn projects_for_server(&self, id: DevServerId) -> Vec { - let mut projects: Vec = self - .dev_server_projects - .values() - .filter(|project| project.dev_server_id == id) - .cloned() - .collect(); - projects.sort_by_key(|p| (p.paths.clone(), p.id)); - projects - } - - pub fn dev_servers(&self) -> Vec { - let mut dev_servers: Vec = self.dev_servers.values().cloned().collect(); - dev_servers.sort_by_key(|d| (d.status == DevServerStatus::Offline, d.name.clone(), d.id)); - dev_servers - } - - pub fn dev_server(&self, id: DevServerId) -> Option<&DevServer> { - self.dev_servers.get(&id) - } - - pub fn dev_server_status(&self, id: DevServerId) -> DevServerStatus { - self.dev_server(id) - .map(|server| server.status) - .unwrap_or(DevServerStatus::Offline) - } - - pub fn dev_server_projects(&self) -> Vec { - let mut projects: Vec = - self.dev_server_projects.values().cloned().collect(); - projects.sort_by_key(|p| (p.paths.clone(), p.id)); - projects - } - - pub fn dev_server_project(&self, id: DevServerProjectId) -> Option<&DevServerProject> { - self.dev_server_projects.get(&id) - } - - pub fn dev_server_for_project(&self, id: DevServerProjectId) -> Option<&DevServer> { - self.dev_server_project(id) - .and_then(|project| self.dev_server(project.dev_server_id)) - } - - async fn handle_dev_server_projects_update( - this: Model, - envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result<()> { - this.update(&mut cx, |this, cx| { - this.dev_servers = envelope - .payload - .dev_servers - .into_iter() - .map(|dev_server| (DevServerId(dev_server.dev_server_id), dev_server.into())) - .collect(); - this.dev_server_projects = envelope - .payload - .dev_server_projects - .into_iter() - .map(|project| (DevServerProjectId(project.id), project.into())) - .collect(); - - cx.notify(); - })?; - Ok(()) - } - - pub fn create_dev_server_project( - &mut self, - dev_server_id: DevServerId, - path: String, - cx: &mut ModelContext, - ) -> Task> { - let client = self.client.clone(); - cx.background_executor().spawn(async move { - client - .request(proto::CreateDevServerProject { - dev_server_id: dev_server_id.0, - path, - }) - .await - }) - } - - pub fn create_dev_server( - &mut self, - name: String, - ssh_connection_string: Option, - cx: &mut ModelContext, - ) -> Task> { - let client = self.client.clone(); - cx.background_executor().spawn(async move { - let result = client - .request(proto::CreateDevServer { - name, - ssh_connection_string, - }) - .await?; - Ok(result) - }) - } - - pub fn rename_dev_server( - &mut self, - dev_server_id: DevServerId, - name: String, - ssh_connection_string: Option, - cx: &mut ModelContext, - ) -> Task> { - let client = self.client.clone(); - cx.background_executor().spawn(async move { - client - .request(proto::RenameDevServer { - dev_server_id: dev_server_id.0, - name, - ssh_connection_string, - }) - .await?; - Ok(()) - }) - } - - pub fn regenerate_dev_server_token( - &mut self, - dev_server_id: DevServerId, - cx: &mut ModelContext, - ) -> Task> { - let client = self.client.clone(); - cx.background_executor().spawn(async move { - client - .request(proto::RegenerateDevServerToken { - dev_server_id: dev_server_id.0, - }) - .await - }) - } - - pub fn delete_dev_server( - &mut self, - id: DevServerId, - cx: &mut ModelContext, - ) -> Task> { - let client = self.client.clone(); - cx.background_executor().spawn(async move { - client - .request(proto::DeleteDevServer { - dev_server_id: id.0, - }) - .await?; - Ok(()) - }) - } - - pub fn delete_dev_server_project( - &mut self, - id: DevServerProjectId, - cx: &mut ModelContext, - ) -> Task> { - let client = self.client.clone(); - cx.background_executor().spawn(async move { - client - .request(proto::DeleteDevServerProject { - dev_server_project_id: id.0, - }) - .await?; - Ok(()) - }) - } -} diff --git a/crates/headless/Cargo.toml b/crates/headless/Cargo.toml deleted file mode 100644 index 209e843c0410a796665451fc6849f6d194f99e79..0000000000000000000000000000000000000000 --- a/crates/headless/Cargo.toml +++ /dev/null @@ -1,37 +0,0 @@ -[package] -name = "headless" -version = "0.1.0" -edition = "2021" -publish = false -license = "GPL-3.0-or-later" - -[lints] -workspace = true - -[lib] -path = "src/headless.rs" -doctest = false - -[dependencies] -anyhow.workspace = true -client.workspace = true -extension.workspace = true -signal-hook.workspace = true -gpui.workspace = true -log.workspace = true -util.workspace = true -node_runtime.workspace = true -language.workspace = true -project.workspace = true -proto.workspace = true -fs.workspace = true -futures.workspace = true -settings.workspace = true -shellexpand.workspace = true -postage.workspace = true - -[dev-dependencies] -client = { workspace = true, features = ["test-support"] } -fs = { workspace = true, features = ["test-support"] } -gpui = { workspace = true, features = ["test-support"] } -util = { workspace = true, features = ["test-support"] } diff --git a/crates/headless/LICENSE-GPL b/crates/headless/LICENSE-GPL deleted file mode 120000 index 89e542f750cd3860a0598eff0dc34b56d7336dc4..0000000000000000000000000000000000000000 --- a/crates/headless/LICENSE-GPL +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-GPL \ No newline at end of file diff --git a/crates/headless/src/headless.rs b/crates/headless/src/headless.rs deleted file mode 100644 index 6f8f42fc0f77f09140a6129d6f332097e1b35c2c..0000000000000000000000000000000000000000 --- a/crates/headless/src/headless.rs +++ /dev/null @@ -1,397 +0,0 @@ -use anyhow::{anyhow, Result}; -use client::DevServerProjectId; -use client::{user::UserStore, Client, ClientSettings}; -use extension::ExtensionStore; -use fs::Fs; -use futures::{Future, StreamExt}; -use gpui::{AppContext, AsyncAppContext, Context, Global, Model, ModelContext, Task, WeakModel}; -use language::LanguageRegistry; -use node_runtime::NodeRuntime; -use postage::stream::Stream; -use project::Project; -use proto::{self, ErrorCode, TypedEnvelope}; -use settings::{Settings, SettingsStore}; -use std::path::Path; -use std::{collections::HashMap, sync::Arc}; -use util::{ResultExt, TryFutureExt}; - -pub struct DevServer { - client: Arc, - app_state: AppState, - remote_shutdown: bool, - projects: HashMap>, - _subscriptions: Vec, - _maintain_connection: Task>, -} - -pub struct AppState { - pub node_runtime: NodeRuntime, - pub user_store: Model, - pub languages: Arc, - pub fs: Arc, -} - -struct GlobalDevServer(Model); - -impl Global for GlobalDevServer {} - -pub fn init(client: Arc, app_state: AppState, cx: &mut AppContext) -> Task> { - let dev_server = cx.new_model(|cx| DevServer::new(client.clone(), app_state, cx)); - cx.set_global(GlobalDevServer(dev_server.clone())); - - #[cfg(not(target_os = "windows"))] - { - use signal_hook::consts::{SIGINT, SIGTERM}; - use signal_hook::iterator::Signals; - // Set up a handler when the dev server is shut down - // with ctrl-c or kill - let (tx, rx) = futures::channel::oneshot::channel(); - let mut signals = Signals::new([SIGTERM, SIGINT]).unwrap(); - std::thread::spawn({ - move || { - if let Some(sig) = signals.forever().next() { - tx.send(sig).log_err(); - } - } - }); - cx.spawn(|cx| async move { - if let Ok(sig) = rx.await { - log::info!("received signal {sig:?}"); - cx.update(|cx| cx.quit()).log_err(); - } - }) - .detach(); - } - - let server_url = ClientSettings::get_global(cx).server_url.clone(); - cx.spawn(|cx| async move { - client - .authenticate_and_connect(false, &cx) - .await - .map_err(|e| anyhow!("Error connecting to '{}': {}", server_url, e)) - }) -} - -impl DevServer { - pub fn global(cx: &AppContext) -> Model { - cx.global::().0.clone() - } - - pub fn new(client: Arc, app_state: AppState, cx: &mut ModelContext) -> Self { - cx.on_app_quit(Self::app_will_quit).detach(); - - let maintain_connection = cx.spawn({ - let client = client.clone(); - move |this, cx| Self::maintain_connection(this, client.clone(), cx).log_err() - }); - - cx.observe_global::(|_, cx| { - ExtensionStore::global(cx).update(cx, |store, cx| store.auto_install_extensions(cx)) - }) - .detach(); - - DevServer { - _subscriptions: vec![ - client.add_message_handler(cx.weak_model(), Self::handle_dev_server_instructions), - client.add_request_handler( - cx.weak_model(), - Self::handle_validate_dev_server_project_request, - ), - client.add_request_handler(cx.weak_model(), Self::handle_list_remote_directory), - client.add_message_handler(cx.weak_model(), Self::handle_shutdown), - ], - _maintain_connection: maintain_connection, - projects: Default::default(), - remote_shutdown: false, - app_state, - client, - } - } - - fn app_will_quit(&mut self, _: &mut ModelContext) -> impl Future { - let request = if self.remote_shutdown { - None - } else { - Some( - self.client - .request(proto::ShutdownDevServer { reason: None }), - ) - }; - async move { - if let Some(request) = request { - request.await.log_err(); - } - } - } - - async fn handle_dev_server_instructions( - this: Model, - envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result<()> { - let (added_projects, retained_projects, removed_projects_ids) = - this.read_with(&mut cx, |this, _| { - let removed_projects = this - .projects - .keys() - .filter(|dev_server_project_id| { - !envelope - .payload - .projects - .iter() - .any(|p| p.id == dev_server_project_id.0) - }) - .cloned() - .collect::>(); - - let mut added_projects = vec![]; - let mut retained_projects = vec![]; - - for project in envelope.payload.projects.iter() { - if this.projects.contains_key(&DevServerProjectId(project.id)) { - retained_projects.push(project.clone()); - } else { - added_projects.push(project.clone()); - } - } - - (added_projects, retained_projects, removed_projects) - })?; - - for dev_server_project in added_projects { - DevServer::share_project(this.clone(), &dev_server_project, &mut cx).await?; - } - - for dev_server_project in retained_projects { - DevServer::update_project(this.clone(), &dev_server_project, &mut cx).await?; - } - - this.update(&mut cx, |this, cx| { - for old_project_id in &removed_projects_ids { - this.unshare_project(old_project_id, cx)?; - } - Ok::<(), anyhow::Error>(()) - })??; - Ok(()) - } - - async fn handle_validate_dev_server_project_request( - this: Model, - envelope: TypedEnvelope, - cx: AsyncAppContext, - ) -> Result { - let expanded = shellexpand::tilde(&envelope.payload.path).to_string(); - let path = std::path::Path::new(&expanded); - let fs = cx.read_model(&this, |this, _| this.app_state.fs.clone())?; - - let path_exists = fs.metadata(path).await.is_ok_and(|result| result.is_some()); - if !path_exists { - return Err(anyhow!(ErrorCode::DevServerProjectPathDoesNotExist))?; - } - - Ok(proto::Ack {}) - } - - async fn handle_list_remote_directory( - this: Model, - envelope: TypedEnvelope, - cx: AsyncAppContext, - ) -> Result { - let expanded = shellexpand::tilde(&envelope.payload.path).to_string(); - let fs = cx.read_model(&this, |this, _| this.app_state.fs.clone())?; - - let mut entries = Vec::new(); - let mut response = fs.read_dir(Path::new(&expanded)).await?; - while let Some(path) = response.next().await { - if let Some(file_name) = path?.file_name() { - entries.push(file_name.to_string_lossy().to_string()); - } - } - Ok(proto::ListRemoteDirectoryResponse { entries }) - } - - async fn handle_shutdown( - this: Model, - _envelope: TypedEnvelope, - mut cx: AsyncAppContext, - ) -> Result<()> { - this.update(&mut cx, |this, cx| { - this.remote_shutdown = true; - cx.quit(); - }) - } - - fn unshare_project( - &mut self, - dev_server_project_id: &DevServerProjectId, - cx: &mut ModelContext, - ) -> Result<()> { - if let Some(project) = self.projects.remove(dev_server_project_id) { - project.update(cx, |project, cx| project.unshare(cx))?; - } - Ok(()) - } - - async fn share_project( - this: Model, - dev_server_project: &proto::DevServerProject, - cx: &mut AsyncAppContext, - ) -> Result<()> { - let (client, project) = this.update(cx, |this, cx| { - let project = Project::local( - this.client.clone(), - this.app_state.node_runtime.clone(), - this.app_state.user_store.clone(), - this.app_state.languages.clone(), - this.app_state.fs.clone(), - None, - cx, - ); - - (this.client.clone(), project) - })?; - - for path in &dev_server_project.paths { - let path = shellexpand::tilde(path).to_string(); - - let (worktree, _) = project - .update(cx, |project, cx| { - project.find_or_create_worktree(&path, true, cx) - })? - .await?; - - worktree.update(cx, |worktree, cx| { - worktree.as_local_mut().unwrap().share_private_files(cx) - })?; - } - - let worktrees = - project.read_with(cx, |project, cx| project.worktree_metadata_protos(cx))?; - - let response = client - .request(proto::ShareDevServerProject { - dev_server_project_id: dev_server_project.id, - worktrees, - }) - .await?; - - let project_id = response.project_id; - project.update(cx, |project, cx| project.shared(project_id, cx))??; - this.update(cx, |this, _| { - this.projects - .insert(DevServerProjectId(dev_server_project.id), project); - })?; - Ok(()) - } - - async fn update_project( - this: Model, - dev_server_project: &proto::DevServerProject, - cx: &mut AsyncAppContext, - ) -> Result<()> { - let tasks = this.update(cx, |this, cx| { - let Some(project) = this - .projects - .get(&DevServerProjectId(dev_server_project.id)) - else { - return vec![]; - }; - - let mut to_delete = vec![]; - let mut tasks = vec![]; - - project.update(cx, |project, cx| { - for worktree in project.visible_worktrees(cx) { - let mut delete = true; - for config in dev_server_project.paths.iter() { - if worktree.read(cx).abs_path().to_string_lossy() - == shellexpand::tilde(config) - { - delete = false; - } - } - if delete { - to_delete.push(worktree.read(cx).id()) - } - } - - for worktree_id in to_delete { - project.remove_worktree(worktree_id, cx) - } - - for config in dev_server_project.paths.iter() { - tasks.push(project.find_or_create_worktree( - shellexpand::tilde(config).to_string(), - true, - cx, - )); - } - - tasks - }) - })?; - futures::future::join_all(tasks).await; - Ok(()) - } - - async fn maintain_connection( - this: WeakModel, - client: Arc, - mut cx: AsyncAppContext, - ) -> Result<()> { - let mut client_status = client.status(); - - let _ = client_status.try_recv(); - let current_status = *client_status.borrow(); - if current_status.is_connected() { - // wait for first disconnect - client_status.recv().await; - } - - loop { - let Some(current_status) = client_status.recv().await else { - return Ok(()); - }; - let Some(this) = this.upgrade() else { - return Ok(()); - }; - - if !current_status.is_connected() { - continue; - } - - this.update(&mut cx, |this, cx| this.rejoin(cx))?.await?; - } - } - - fn rejoin(&mut self, cx: &mut ModelContext) -> Task> { - let mut projects: HashMap> = HashMap::default(); - let request = self.client.request(proto::ReconnectDevServer { - reshared_projects: self - .projects - .iter() - .flat_map(|(_, handle)| { - let project = handle.read(cx); - let project_id = project.remote_id()?; - projects.insert(project_id, handle.clone()); - Some(proto::UpdateProject { - project_id, - worktrees: project.worktree_metadata_protos(cx), - }) - }) - .collect(), - }); - cx.spawn(|_, mut cx| async move { - let response = request.await?; - - for reshared_project in response.reshared_projects { - if let Some(project) = projects.get(&reshared_project.id) { - project.update(&mut cx, |project, cx| { - project.reshared(reshared_project, cx).log_err(); - })?; - } - } - Ok(()) - }) - } -} diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index 2a9bb82a3536592645c2e2365d8f7d92e94a0701..c360617173f9cd3f96307d3607288f6f1293f673 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -30,7 +30,6 @@ async-trait.workspace = true client.workspace = true clock.workspace = true collections.workspace = true -dev_server_projects.workspace = true fs.workspace = true futures.workspace = true fuzzy.workspace = true diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 4a3eaf98ba7ba9848291d48d8cae580ee33e3765..9459641f865d9377066bc6bb3fcec64ac1917b2c 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -25,8 +25,7 @@ mod yarn; use anyhow::{anyhow, Context as _, Result}; use buffer_store::{BufferStore, BufferStoreEvent}; use client::{ - proto, Client, Collaborator, DevServerProjectId, PendingEntitySubscription, ProjectId, - TypedEnvelope, UserStore, + proto, Client, Collaborator, PendingEntitySubscription, ProjectId, TypedEnvelope, UserStore, }; use clock::ReplicaId; use collections::{BTreeSet, HashMap, HashSet}; @@ -156,7 +155,6 @@ pub struct Project { terminals: Terminals, node: Option, hosted_project_id: Option, - dev_server_project_id: Option, search_history: SearchHistory, search_included_history: SearchHistory, search_excluded_history: SearchHistory, @@ -217,7 +215,6 @@ enum ProjectClientState { capability: Capability, remote_id: u64, replica_id: ReplicaId, - in_room: bool, }, } @@ -675,7 +672,6 @@ impl Project { }, node: Some(node), hosted_project_id: None, - dev_server_project_id: None, search_history: Self::new_search_history(), environment, remotely_created_models: Default::default(), @@ -705,7 +701,7 @@ impl Project { let ssh_proto = ssh.read(cx).proto_client(); let worktree_store = - cx.new_model(|_| WorktreeStore::remote(false, ssh_proto.clone(), 0, None)); + cx.new_model(|_| WorktreeStore::remote(false, ssh_proto.clone(), 0)); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); @@ -794,7 +790,6 @@ impl Project { }, node: Some(node), hosted_project_id: None, - dev_server_project_id: None, search_history: Self::new_search_history(), environment, remotely_created_models: Default::default(), @@ -898,15 +893,7 @@ impl Project { let role = response.payload.role(); let worktree_store = cx.new_model(|_| { - WorktreeStore::remote( - true, - client.clone().into(), - response.payload.project_id, - response - .payload - .dev_server_project_id - .map(DevServerProjectId), - ) + WorktreeStore::remote(true, client.clone().into(), response.payload.project_id) })?; let buffer_store = cx.new_model(|cx| { BufferStore::remote(worktree_store.clone(), client.clone().into(), remote_id, cx) @@ -992,7 +979,6 @@ impl Project { capability: Capability::ReadWrite, remote_id, replica_id, - in_room: response.payload.dev_server_project_id.is_none(), }, buffers_needing_diff: Default::default(), git_diff_debouncer: DebouncedDelay::new(), @@ -1001,10 +987,6 @@ impl Project { }, node: None, hosted_project_id: None, - dev_server_project_id: response - .payload - .dev_server_project_id - .map(DevServerProjectId), search_history: Self::new_search_history(), search_included_history: Self::new_search_history(), search_excluded_history: Self::new_search_history(), @@ -1305,39 +1287,23 @@ impl Project { self.hosted_project_id } - pub fn dev_server_project_id(&self) -> Option { - self.dev_server_project_id - } - - pub fn supports_terminal(&self, cx: &AppContext) -> bool { + pub fn supports_terminal(&self, _cx: &AppContext) -> bool { if self.is_local() { return true; } if self.is_via_ssh() { return true; } - let Some(id) = self.dev_server_project_id else { - return false; - }; - let Some(server) = dev_server_projects::Store::global(cx) - .read(cx) - .dev_server_for_project(id) - else { - return false; - }; - server.ssh_connection_string.is_some() + + return false; } pub fn ssh_connection_string(&self, cx: &AppContext) -> Option { if let Some(ssh_state) = &self.ssh_client { return Some(ssh_state.read(cx).connection_string().into()); } - let dev_server_id = self.dev_server_project_id()?; - dev_server_projects::Store::global(cx) - .read(cx) - .dev_server_for_project(dev_server_id)? - .ssh_connection_string - .clone() + + return None; } pub fn ssh_connection_state(&self, cx: &AppContext) -> Option { @@ -1549,17 +1515,9 @@ impl Project { pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext) -> Result<()> { if !matches!(self.client_state, ProjectClientState::Local) { - if let ProjectClientState::Remote { in_room, .. } = &mut self.client_state { - if *in_room || self.dev_server_project_id.is_none() { - return Err(anyhow!("project was already shared")); - } else { - *in_room = true; - return Ok(()); - } - } else { - return Err(anyhow!("project was already shared")); - } + return Err(anyhow!("project was already shared")); } + self.client_subscriptions.extend([ self.client .subscribe_to_entity(project_id)? @@ -1657,14 +1615,7 @@ impl Project { fn unshare_internal(&mut self, cx: &mut AppContext) -> Result<()> { if self.is_via_collab() { - if self.dev_server_project_id().is_some() { - if let ProjectClientState::Remote { in_room, .. } = &mut self.client_state { - *in_room = false - } - return Ok(()); - } else { - return Err(anyhow!("attempted to unshare a remote project")); - } + return Err(anyhow!("attempted to unshare a remote project")); } if let ProjectClientState::Shared { remote_id, .. } = self.client_state { @@ -2265,29 +2216,6 @@ impl Project { } fn on_worktree_released(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext) { - if let Some(dev_server_project_id) = self.dev_server_project_id { - let paths: Vec = self - .visible_worktrees(cx) - .filter_map(|worktree| { - if worktree.read(cx).id() == id_to_remove { - None - } else { - Some(worktree.read(cx).abs_path().to_string_lossy().to_string()) - } - }) - .collect(); - if !paths.is_empty() { - let request = self.client.request(proto::UpdateDevServerProject { - dev_server_project_id: dev_server_project_id.0, - paths, - }); - cx.background_executor() - .spawn(request) - .detach_and_log_err(cx); - } - return; - } - if let Some(ssh) = &self.ssh_client { ssh.read(cx) .proto_client() @@ -3152,7 +3080,7 @@ impl Project { match &self.client_state { ProjectClientState::Shared { .. } => true, ProjectClientState::Local => false, - ProjectClientState::Remote { in_room, .. } => *in_room, + ProjectClientState::Remote { .. } => true, } } @@ -3279,20 +3207,6 @@ impl Project { let response = response.await?; Ok(response.entries.into_iter().map(PathBuf::from).collect()) }) - } else if let Some(dev_server) = self.dev_server_project_id().and_then(|id| { - dev_server_projects::Store::global(cx) - .read(cx) - .dev_server_for_project(id) - }) { - let request = proto::ListRemoteDirectory { - dev_server_id: dev_server.id.0, - path: query, - }; - let response = self.client.request(request); - cx.background_executor().spawn(async move { - let response = response.await?; - Ok(response.entries.into_iter().map(PathBuf::from).collect()) - }) } else { Task::ready(Err(anyhow!("cannot list directory in remote project"))) } diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index d5166105900c0cf158cd23c6a3fb36df6014280a..0ccf730d9efaea13203dd7ce29f8ed104ff1e3d2 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -37,11 +37,8 @@ pub enum TerminalKind { /// SshCommand describes how to connect to a remote server #[derive(Debug, Clone, PartialEq, Eq)] -pub enum SshCommand { - /// DevServers give a string from the user - DevServer(String), - /// Direct ssh has a list of arguments to pass to ssh - Direct(Vec), +pub struct SshCommand { + arguments: Vec, } impl Project { @@ -73,19 +70,12 @@ impl Project { if let Some(args) = ssh_client.ssh_args() { return Some(( ssh_client.connection_options().host.clone(), - SshCommand::Direct(args), + SshCommand { arguments: args }, )); } } - let dev_server_project_id = self.dev_server_project_id()?; - let projects_store = dev_server_projects::Store::global(cx).read(cx); - let ssh_command = projects_store - .dev_server_for_project(dev_server_project_id)? - .ssh_connection_string - .as_ref()? - .to_string(); - Some(("".to_string(), SshCommand::DevServer(ssh_command))) + return None; } pub fn create_terminal( @@ -399,14 +389,8 @@ pub fn wrap_for_ssh( }; let shell_invocation = format!("sh -c {}", shlex::try_quote(&commands).unwrap()); - let (program, mut args) = match ssh_command { - SshCommand::DevServer(ssh_command) => { - let mut args = shlex::split(ssh_command).unwrap_or_default(); - let program = args.drain(0..1).next().unwrap_or("ssh".to_string()); - (program, args) - } - SshCommand::Direct(ssh_args) => ("ssh".to_string(), ssh_args.clone()), - }; + let program = "ssh".to_string(); + let mut args = ssh_command.arguments.clone(); args.push("-t".to_string()); args.push(shell_invocation); diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 7196ae199ffa600831f0f7157146adb0d502e13f..df190d03f39239e0a8908f5e105cd0449a8d310d 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -1,11 +1,9 @@ use std::{ - cell::RefCell, path::{Path, PathBuf}, sync::{atomic::AtomicUsize, Arc}, }; use anyhow::{anyhow, Context as _, Result}; -use client::DevServerProjectId; use collections::{HashMap, HashSet}; use fs::Fs; use futures::{ @@ -41,7 +39,6 @@ enum WorktreeStoreState { fs: Arc, }, Remote { - dev_server_project_id: Option, upstream_client: AnyProtoClient, upstream_project_id: u64, }, @@ -94,7 +91,6 @@ impl WorktreeStore { retain_worktrees: bool, upstream_client: AnyProtoClient, upstream_project_id: u64, - dev_server_project_id: Option, ) -> Self { Self { next_entry_id: Default::default(), @@ -106,7 +102,6 @@ impl WorktreeStore { state: WorktreeStoreState::Remote { upstream_client, upstream_project_id, - dev_server_project_id, }, } } @@ -196,18 +191,9 @@ impl WorktreeStore { if !self.loading_worktrees.contains_key(&path) { let task = match &self.state { WorktreeStoreState::Remote { - upstream_client, - dev_server_project_id, - .. + upstream_client, .. } => { - if let Some(dev_server_project_id) = dev_server_project_id { - self.create_dev_server_worktree( - upstream_client.clone(), - *dev_server_project_id, - abs_path, - cx, - ) - } else if upstream_client.is_via_collab() { + if upstream_client.is_via_collab() { Task::ready(Err(Arc::new(anyhow!("cannot create worktrees via collab")))) } else { self.create_ssh_worktree(upstream_client.clone(), abs_path, visible, cx) @@ -322,51 +308,6 @@ impl WorktreeStore { }) } - fn create_dev_server_worktree( - &mut self, - client: AnyProtoClient, - dev_server_project_id: DevServerProjectId, - abs_path: impl AsRef, - cx: &mut ModelContext, - ) -> Task, Arc>> { - let path: Arc = abs_path.as_ref().into(); - let mut paths: Vec = self - .visible_worktrees(cx) - .map(|worktree| worktree.read(cx).abs_path().to_string_lossy().to_string()) - .collect(); - paths.push(path.to_string_lossy().to_string()); - let request = client.request(proto::UpdateDevServerProject { - dev_server_project_id: dev_server_project_id.0, - paths, - }); - - let abs_path = abs_path.as_ref().to_path_buf(); - cx.spawn(move |project, cx| async move { - let (tx, rx) = futures::channel::oneshot::channel(); - let tx = RefCell::new(Some(tx)); - let Some(project) = project.upgrade() else { - return Err(anyhow!("project dropped"))?; - }; - let observer = cx.update(|cx| { - cx.observe(&project, move |project, cx| { - let abs_path = abs_path.clone(); - project.update(cx, |project, cx| { - if let Some((worktree, _)) = project.find_worktree(&abs_path, cx) { - if let Some(tx) = tx.borrow_mut().take() { - tx.send(worktree).ok(); - } - } - }) - }) - })?; - - request.await?; - let worktree = rx.await.map_err(|e| anyhow!(e))?; - drop(observer); - Ok(worktree) - }) - } - pub fn add(&mut self, worktree: &Model, cx: &mut ModelContext) { let worktree_id = worktree.read(cx).id(); debug_assert!(self.worktrees().all(|w| w.read(cx).id() != worktree_id)); diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 0f503c696bb3632286236392734798792b80c055..cb62a7f04b06067430c6d2eda814adf74aa5f3a4 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -503,7 +503,7 @@ impl ProjectPanel { let is_unfoldable = auto_fold_dirs && self.is_unfoldable(entry, worktree); let worktree_id = worktree.id(); let is_read_only = project.is_read_only(cx); - let is_remote = project.is_via_collab() && project.dev_server_project_id().is_none(); + let is_remote = project.is_via_collab(); let is_local = project.is_local(); let context_menu = ContextMenu::build(cx, |menu, cx| { @@ -3334,12 +3334,11 @@ impl Panel for ProjectPanel { fn starts_open(&self, cx: &WindowContext) -> bool { let project = &self.project.read(cx); - project.dev_server_project_id().is_some() - || project.visible_worktrees(cx).any(|tree| { - tree.read(cx) - .root_entry() - .map_or(false, |entry| entry.is_dir()) - }) + project.visible_worktrees(cx).any(|tree| { + tree.read(cx) + .root_entry() + .map_or(false, |entry| entry.is_dir()) + }) } } diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 6539604c86f70b59c83347f984cc2028a1285555..5635eb880022ea5abf6c4296c01ad9bb97f300ac 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -217,33 +217,14 @@ message Envelope { MultiLspQueryResponse multi_lsp_query_response = 176; RestartLanguageServers restart_language_servers = 208; - CreateDevServerProject create_dev_server_project = 177; - CreateDevServerProjectResponse create_dev_server_project_response = 188; - CreateDevServer create_dev_server = 178; - CreateDevServerResponse create_dev_server_response = 179; - ShutdownDevServer shutdown_dev_server = 180; - DevServerInstructions dev_server_instructions = 181; - ReconnectDevServer reconnect_dev_server = 182; - ReconnectDevServerResponse reconnect_dev_server_response = 183; - - ShareDevServerProject share_dev_server_project = 184; - JoinDevServerProject join_dev_server_project = 185; RejoinRemoteProjects rejoin_remote_projects = 186; RejoinRemoteProjectsResponse rejoin_remote_projects_response = 187; - DevServerProjectsUpdate dev_server_projects_update = 193; - ValidateDevServerProjectRequest validate_dev_server_project_request = 194; - DeleteDevServer delete_dev_server = 195; OpenNewBuffer open_new_buffer = 196; - DeleteDevServerProject delete_dev_server_project = 197; GetSupermavenApiKey get_supermaven_api_key = 198; GetSupermavenApiKeyResponse get_supermaven_api_key_response = 199; - RegenerateDevServerToken regenerate_dev_server_token = 200; - RegenerateDevServerTokenResponse regenerate_dev_server_token_response = 201; - RenameDevServer rename_dev_server = 202; - TaskContextForLocation task_context_for_location = 203; TaskContext task_context = 204; @@ -264,7 +245,6 @@ message Envelope { ListRemoteDirectory list_remote_directory = 219; ListRemoteDirectoryResponse list_remote_directory_response = 220; - UpdateDevServerProject update_dev_server_project = 221; AddWorktree add_worktree = 222; AddWorktreeResponse add_worktree_response = 223; @@ -304,10 +284,17 @@ message Envelope { LanguageServerPromptResponse language_server_prompt_response = 269; // current max } + reserved 87 to 88; reserved 158 to 161; reserved 166 to 169; + reserved 177 to 185; + reserved 188; + reserved 193 to 195; + reserved 197; + reserved 200 to 202; reserved 205 to 206; + reserved 221; reserved 224 to 229; reserved 247 to 254; } @@ -342,12 +329,11 @@ enum ErrorCode { WrongMoveTarget = 11; UnsharedItem = 12; NoSuchProject = 13; - DevServerAlreadyOnline = 14; - DevServerOffline = 15; DevServerProjectPathDoesNotExist = 16; RemoteUpgradeRequired = 17; RateLimitExceeded = 18; reserved 6; + reserved 14 to 15; } message EndStream {} @@ -511,7 +497,7 @@ message LiveKitConnectionInfo { message ShareProject { uint64 room_id = 1; repeated WorktreeMetadata worktrees = 2; - optional uint64 dev_server_project_id = 3; + reserved 3; bool is_ssh_project = 4; } @@ -536,19 +522,6 @@ message JoinHostedProject { uint64 project_id = 1; } -message CreateDevServerProject { - reserved 1; - reserved 2; - uint64 dev_server_id = 3; - string path = 4; -} -message CreateDevServerProjectResponse { - DevServerProject dev_server_project = 1; -} - -message ValidateDevServerProjectRequest { - string path = 1; -} message ListRemoteDirectory { uint64 dev_server_id = 1; @@ -559,77 +532,6 @@ message ListRemoteDirectoryResponse { repeated string entries = 1; } -message UpdateDevServerProject { - uint64 dev_server_project_id = 1; - repeated string paths = 2; -} - -message CreateDevServer { - reserved 1; - string name = 2; - optional string ssh_connection_string = 3; -} - -message RegenerateDevServerToken { - uint64 dev_server_id = 1; -} - -message RegenerateDevServerTokenResponse { - uint64 dev_server_id = 1; - string access_token = 2; -} - -message CreateDevServerResponse { - uint64 dev_server_id = 1; - reserved 2; - string access_token = 3; - string name = 4; -} - -message ShutdownDevServer { - optional string reason = 1; -} - -message RenameDevServer { - uint64 dev_server_id = 1; - string name = 2; - optional string ssh_connection_string = 3; -} - -message DeleteDevServer { - uint64 dev_server_id = 1; -} - -message DeleteDevServerProject { - uint64 dev_server_project_id = 1; -} - -message ReconnectDevServer { - repeated UpdateProject reshared_projects = 1; -} - -message ReconnectDevServerResponse { - repeated ResharedProject reshared_projects = 1; -} - -message DevServerInstructions { - repeated DevServerProject projects = 1; -} - -message DevServerProjectsUpdate { - repeated DevServer dev_servers = 1; - repeated DevServerProject dev_server_projects = 2; -} - -message ShareDevServerProject { - uint64 dev_server_project_id = 1; - repeated WorktreeMetadata worktrees = 2; -} - -message JoinDevServerProject { - uint64 dev_server_project_id = 1; -} - message JoinProjectResponse { uint64 project_id = 5; uint32 replica_id = 1; @@ -637,7 +539,7 @@ message JoinProjectResponse { repeated Collaborator collaborators = 3; repeated LanguageServer language_servers = 4; ChannelRole role = 6; - optional uint64 dev_server_project_id = 7; + reserved 7; } message LeaveProject { @@ -1429,29 +1331,6 @@ message HostedProject { ChannelVisibility visibility = 4; } -message DevServerProject { - uint64 id = 1; - optional uint64 project_id = 2; - reserved 3; - reserved 4; - uint64 dev_server_id = 5; - string path = 6; - repeated string paths = 7; -} - -message DevServer { - reserved 1; - uint64 dev_server_id = 2; - string name = 3; - DevServerStatus status = 4; - optional string ssh_connection_string = 5; -} - -enum DevServerStatus { - Offline = 0; - Online = 1; -} - message JoinChannel { uint64 channel_id = 1; } diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 85d9f572ee5ad4646218b88cd552b8889cae3a59..7a31e7cc7a691a5926e621f2c2d0c42aa00a7985 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -318,30 +318,12 @@ messages!( (SetRoomParticipantRole, Foreground), (BlameBuffer, Foreground), (BlameBufferResponse, Foreground), - (CreateDevServerProject, Background), - (CreateDevServerProjectResponse, Foreground), - (CreateDevServer, Foreground), - (CreateDevServerResponse, Foreground), - (DevServerInstructions, Foreground), - (ShutdownDevServer, Foreground), - (ReconnectDevServer, Foreground), - (ReconnectDevServerResponse, Foreground), - (ShareDevServerProject, Foreground), - (JoinDevServerProject, Foreground), (RejoinRemoteProjects, Foreground), (RejoinRemoteProjectsResponse, Foreground), (MultiLspQuery, Background), (MultiLspQueryResponse, Background), - (DevServerProjectsUpdate, Foreground), - (ValidateDevServerProjectRequest, Background), (ListRemoteDirectory, Background), (ListRemoteDirectoryResponse, Background), - (UpdateDevServerProject, Background), - (DeleteDevServer, Foreground), - (DeleteDevServerProject, Foreground), - (RegenerateDevServerToken, Foreground), - (RegenerateDevServerTokenResponse, Foreground), - (RenameDevServer, Foreground), (OpenNewBuffer, Foreground), (RestartLanguageServers, Foreground), (LinkedEditingRange, Background), @@ -419,7 +401,6 @@ request_messages!( (GetTypeDefinition, GetTypeDefinitionResponse), (LinkedEditingRange, LinkedEditingRangeResponse), (ListRemoteDirectory, ListRemoteDirectoryResponse), - (UpdateDevServerProject, Ack), (GetUsers, UsersResponse), (IncomingCall, Ack), (InlayHints, InlayHintsResponse), @@ -477,19 +458,8 @@ request_messages!( (LspExtExpandMacro, LspExtExpandMacroResponse), (SetRoomParticipantRole, Ack), (BlameBuffer, BlameBufferResponse), - (CreateDevServerProject, CreateDevServerProjectResponse), - (CreateDevServer, CreateDevServerResponse), - (ShutdownDevServer, Ack), - (ShareDevServerProject, ShareProjectResponse), - (JoinDevServerProject, JoinProjectResponse), (RejoinRemoteProjects, RejoinRemoteProjectsResponse), - (ReconnectDevServer, ReconnectDevServerResponse), - (ValidateDevServerProjectRequest, Ack), (MultiLspQuery, MultiLspQueryResponse), - (DeleteDevServer, Ack), - (DeleteDevServerProject, Ack), - (RegenerateDevServerToken, RegenerateDevServerTokenResponse), - (RenameDevServer, Ack), (RestartLanguageServers, Ack), (OpenContext, OpenContextResponse), (CreateContext, CreateContextResponse), diff --git a/crates/recent_projects/Cargo.toml b/crates/recent_projects/Cargo.toml index f69c6c1c21b788470a078e2c36c6ba7abd41558e..b1759de7783b10992ddd778957b9f738a4b355fc 100644 --- a/crates/recent_projects/Cargo.toml +++ b/crates/recent_projects/Cargo.toml @@ -16,7 +16,6 @@ doctest = false anyhow.workspace = true auto_update.workspace = true release_channel.workspace = true -client.workspace = true editor.workspace = true file_finder.workspace = true futures.workspace = true @@ -30,15 +29,12 @@ menu.workspace = true ordered-float.workspace = true picker.workspace = true project.workspace = true -dev_server_projects.workspace = true remote.workspace = true -rpc.workspace = true schemars.workspace = true serde.workspace = true settings.workspace = true smol.workspace = true task.workspace = true -terminal_view.workspace = true theme.workspace = true ui.workspace = true util.workspace = true diff --git a/crates/recent_projects/src/disconnected_overlay.rs b/crates/recent_projects/src/disconnected_overlay.rs index 09342c1d3c7ef512ddaac98346a96512bc499fcc..34a9b895a25de42cc3a5dc7c201cacf7af5ea400 100644 --- a/crates/recent_projects/src/disconnected_overlay.rs +++ b/crates/recent_projects/src/disconnected_overlay.rs @@ -1,6 +1,5 @@ use std::path::PathBuf; -use dev_server_projects::DevServer; use gpui::{ClickEvent, DismissEvent, EventEmitter, FocusHandle, FocusableView, Render, WeakView}; use project::project_settings::ProjectSettings; use remote::SshConnectionOptions; @@ -12,14 +11,10 @@ use ui::{ }; use workspace::{notifications::DetachAndPromptErr, ModalView, OpenOptions, Workspace}; -use crate::{ - open_dev_server_project, open_ssh_project, remote_servers::reconnect_to_dev_server_project, - RemoteServerProjects, SshSettings, -}; +use crate::{open_ssh_project, SshSettings}; enum Host { RemoteProject, - DevServerProject(DevServer), SshRemoteProject(SshConnectionOptions), } @@ -55,20 +50,9 @@ impl DisconnectedOverlay { return; } let handle = cx.view().downgrade(); - let dev_server = project - .read(cx) - .dev_server_project_id() - .and_then(|id| { - dev_server_projects::Store::global(cx) - .read(cx) - .dev_server_for_project(id) - }) - .cloned(); let ssh_connection_options = project.read(cx).ssh_connection_options(cx); - let host = if let Some(dev_server) = dev_server { - Host::DevServerProject(dev_server) - } else if let Some(ssh_connection_options) = ssh_connection_options { + let host = if let Some(ssh_connection_options) = ssh_connection_options { Host::SshRemoteProject(ssh_connection_options) } else { Host::RemoteProject @@ -89,9 +73,6 @@ impl DisconnectedOverlay { cx.emit(DismissEvent); match &self.host { - Host::DevServerProject(dev_server) => { - self.reconnect_to_dev_server(dev_server.clone(), cx); - } Host::SshRemoteProject(ssh_connection_options) => { self.reconnect_to_ssh_remote(ssh_connection_options.clone(), cx); } @@ -99,50 +80,6 @@ impl DisconnectedOverlay { } } - fn reconnect_to_dev_server(&self, dev_server: DevServer, cx: &mut ViewContext) { - let Some(workspace) = self.workspace.upgrade() else { - return; - }; - let Some(dev_server_project_id) = workspace - .read(cx) - .project() - .read(cx) - .dev_server_project_id() - else { - return; - }; - - if let Some(project_id) = dev_server_projects::Store::global(cx) - .read(cx) - .dev_server_project(dev_server_project_id) - .and_then(|project| project.project_id) - { - return workspace.update(cx, move |_, cx| { - open_dev_server_project(true, dev_server_project_id, project_id, cx) - .detach_and_prompt_err("Failed to reconnect", cx, |_, _| None) - }); - } - - if dev_server.ssh_connection_string.is_some() { - let task = workspace.update(cx, |_, cx| { - reconnect_to_dev_server_project( - cx.view().clone(), - dev_server, - dev_server_project_id, - true, - cx, - ) - }); - - task.detach_and_prompt_err("Failed to reconnect", cx, |_, _| None); - } else { - return workspace.update(cx, |workspace, cx| { - let handle = cx.view().downgrade(); - workspace.toggle_modal(cx, |cx| RemoteServerProjects::new(cx, handle)) - }); - } - } - fn reconnect_to_ssh_remote( &self, connection_options: SshConnectionOptions, @@ -200,13 +137,10 @@ impl DisconnectedOverlay { impl Render for DisconnectedOverlay { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { - let can_reconnect = matches!( - self.host, - Host::DevServerProject(_) | Host::SshRemoteProject(_) - ); + let can_reconnect = matches!(self.host, Host::SshRemoteProject(_)); let message = match &self.host { - Host::RemoteProject | Host::DevServerProject(_) => { + Host::RemoteProject => { "Your connection to the remote project has been lost.".to_string() } Host::SshRemoteProject(options) => { diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index b31bc1b5098101bccbdffc4fde43b3ee96311503..6032e7d9969eb5dda307ea9ed74825090b7ea885 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -4,7 +4,6 @@ mod ssh_connections; use remote::SshConnectionOptions; pub use ssh_connections::open_ssh_project; -use client::{DevServerProjectId, ProjectId}; use disconnected_overlay::DisconnectedOverlay; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{ @@ -17,9 +16,7 @@ use picker::{ highlighted_match_with_paths::{HighlightedMatchWithPaths, HighlightedText}, Picker, PickerDelegate, }; -use remote_servers::reconnect_to_dev_server_project; pub use remote_servers::RemoteServerProjects; -use rpc::proto::DevServerStatus; use serde::Deserialize; use settings::Settings; pub use ssh_connections::SshSettings; @@ -28,13 +25,12 @@ use std::{ sync::Arc, }; use ui::{ - prelude::*, tooltip_container, ButtonLike, IconWithIndicator, Indicator, KeyBinding, ListItem, - ListItemSpacing, Tooltip, + prelude::*, tooltip_container, ButtonLike, KeyBinding, ListItem, ListItemSpacing, Tooltip, }; use util::{paths::PathExt, ResultExt}; use workspace::{ - AppState, CloseIntent, ModalView, OpenOptions, SerializedWorkspaceLocation, Workspace, - WorkspaceId, WORKSPACE_DB, + CloseIntent, ModalView, OpenOptions, SerializedWorkspaceLocation, Workspace, WorkspaceId, + WORKSPACE_DB, }; #[derive(PartialEq, Clone, Deserialize, Default)] @@ -101,7 +97,7 @@ impl RecentProjects { } } - fn register(workspace: &mut Workspace, cx: &mut ViewContext) { + fn register(workspace: &mut Workspace, _cx: &mut ViewContext) { workspace.register_action(|workspace, open_recent: &OpenRecent, cx| { let Some(recent_projects) = workspace.active_modal::(cx) else { Self::open(workspace, open_recent.create_new_window, cx); @@ -114,20 +110,6 @@ impl RecentProjects { .update(cx, |picker, cx| picker.cycle_selection(cx)) }); }); - if workspace - .project() - .read(cx) - .dev_server_project_id() - .is_some() - { - workspace.register_action(|workspace, _: &workspace::Open, cx| { - if workspace.active_modal::(cx).is_some() { - cx.propagate(); - } else { - Self::open(workspace, true, cx); - } - }); - } } pub fn open( @@ -254,13 +236,6 @@ impl PickerDelegate for RecentProjectsDelegate { .map(|(_, path)| path.compact().to_string_lossy().into_owned()) .collect::>() .join(""), - SerializedWorkspaceLocation::DevServer(dev_server_project) => { - format!( - "{}{}", - dev_server_project.dev_server_name, - dev_server_project.paths.join("") - ) - } SerializedWorkspaceLocation::Ssh(ssh_project) => ssh_project .ssh_urls() .iter() @@ -321,7 +296,10 @@ impl PickerDelegate for RecentProjectsDelegate { cx.spawn(move |workspace, mut cx| async move { let continue_replacing = workspace .update(&mut cx, |workspace, cx| { - workspace.prepare_to_close(CloseIntent::ReplaceWindow, cx) + workspace.prepare_to_close( + CloseIntent::ReplaceWindow, + cx, + ) })? .await?; if continue_replacing { @@ -339,74 +317,56 @@ impl PickerDelegate for RecentProjectsDelegate { workspace.open_workspace_for_paths(false, paths, cx) } } - SerializedWorkspaceLocation::DevServer(dev_server_project) => { - let store = dev_server_projects::Store::global(cx); - let Some(project_id) = store.read(cx) - .dev_server_project(dev_server_project.id) - .and_then(|p| p.project_id) - else { - let server = store.read(cx).dev_server_for_project(dev_server_project.id); - if server.is_some_and(|server| server.ssh_connection_string.is_some()) { - return reconnect_to_dev_server_project(cx.view().clone(), server.unwrap().clone(), dev_server_project.id, replace_current_window, cx); - } else { - let dev_server_name = dev_server_project.dev_server_name.clone(); - return cx.spawn(|workspace, mut cx| async move { - let response = - cx.prompt(gpui::PromptLevel::Warning, - "Dev Server is offline", - Some(format!("Cannot connect to {}. To debug open the remote project settings.", dev_server_name).as_str()), - &["Ok", "Open Settings"] - ).await?; - if response == 1 { - workspace.update(&mut cx, |workspace, cx| { - let handle = cx.view().downgrade(); - workspace.toggle_modal(cx, |cx| RemoteServerProjects::new(cx, handle)) - })?; - } else { - workspace.update(&mut cx, |workspace, cx| { - RecentProjects::open(workspace, true, cx); - })?; - } - Ok(()) - }) - } + SerializedWorkspaceLocation::Ssh(ssh_project) => { + let app_state = workspace.app_state().clone(); + + let replace_window = if replace_current_window { + cx.window_handle().downcast::() + } else { + None }; - open_dev_server_project(replace_current_window, dev_server_project.id, project_id, cx) - } - SerializedWorkspaceLocation::Ssh(ssh_project) => { - let app_state = workspace.app_state().clone(); - - let replace_window = if replace_current_window { - cx.window_handle().downcast::() - } else { - None - }; - - let open_options = OpenOptions { - replace_window, - ..Default::default() - }; - - let args = SshSettings::get_global(cx).args_for(&ssh_project.host, ssh_project.port, &ssh_project.user); - let nickname = SshSettings::get_global(cx).nickname_for(&ssh_project.host, ssh_project.port, &ssh_project.user); - let connection_options = SshConnectionOptions { - host: ssh_project.host.clone(), - username: ssh_project.user.clone(), - port: ssh_project.port, - password: None, - args, - }; - - let paths = ssh_project.paths.iter().map(PathBuf::from).collect(); - - cx.spawn(|_, mut cx| async move { - open_ssh_project(connection_options, paths, app_state, open_options, nickname, &mut cx).await - }) + + let open_options = OpenOptions { + replace_window, + ..Default::default() + }; + + let args = SshSettings::get_global(cx).args_for( + &ssh_project.host, + ssh_project.port, + &ssh_project.user, + ); + let nickname = SshSettings::get_global(cx).nickname_for( + &ssh_project.host, + ssh_project.port, + &ssh_project.user, + ); + let connection_options = SshConnectionOptions { + host: ssh_project.host.clone(), + username: ssh_project.user.clone(), + port: ssh_project.port, + password: None, + args, + }; + + let paths = ssh_project.paths.iter().map(PathBuf::from).collect(); + + cx.spawn(|_, mut cx| async move { + open_ssh_project( + connection_options, + paths, + app_state, + open_options, + nickname, + &mut cx, + ) + .await + }) + } } } - } }) - .detach_and_log_err(cx); + .detach_and_log_err(cx); cx.emit(DismissEvent); } } @@ -431,20 +391,6 @@ impl PickerDelegate for RecentProjectsDelegate { let (_, location) = self.workspaces.get(hit.candidate_id)?; - let dev_server_status = - if let SerializedWorkspaceLocation::DevServer(dev_server_project) = location { - let store = dev_server_projects::Store::global(cx).read(cx); - Some( - store - .dev_server_project(dev_server_project.id) - .and_then(|p| store.dev_server(p.dev_server_id)) - .map(|s| s.status) - .unwrap_or_default(), - ) - } else { - None - }; - let mut path_start_offset = 0; let paths = match location { SerializedWorkspaceLocation::Local(paths, order) => Arc::new( @@ -457,13 +403,6 @@ impl PickerDelegate for RecentProjectsDelegate { .collect(), ), SerializedWorkspaceLocation::Ssh(ssh_project) => Arc::new(ssh_project.ssh_urls()), - SerializedWorkspaceLocation::DevServer(dev_server_project) => { - Arc::new(vec![PathBuf::from(format!( - "{}:{}", - dev_server_project.dev_server_name, - dev_server_project.paths.join(", ") - ))]) - } }; let (match_labels, paths): (Vec<_>, Vec<_>) = paths @@ -478,13 +417,7 @@ impl PickerDelegate for RecentProjectsDelegate { .unzip(); let highlighted_match = HighlightedMatchWithPaths { - match_label: HighlightedText::join(match_labels.into_iter().flatten(), ", ").color( - if matches!(dev_server_status, Some(DevServerStatus::Offline)) { - Color::Disabled - } else { - Color::Default - }, - ), + match_label: HighlightedText::join(match_labels.into_iter().flatten(), ", "), paths, }; @@ -507,24 +440,6 @@ impl PickerDelegate for RecentProjectsDelegate { SerializedWorkspaceLocation::Ssh(_) => Icon::new(IconName::Server) .color(Color::Muted) .into_any_element(), - SerializedWorkspaceLocation::DevServer(_) => { - let indicator_color = match dev_server_status { - Some(DevServerStatus::Online) => Color::Created, - Some(DevServerStatus::Offline) => Color::Hidden, - _ => unreachable!(), - }; - IconWithIndicator::new( - Icon::new(IconName::Server).color(Color::Muted), - Some(Indicator::dot()), - ) - .indicator_color(indicator_color) - .indicator_border_color(if selected { - Some(cx.theme().colors().element_selected) - } else { - None - }) - .into_any_element() - } }) }) .child({ @@ -597,59 +512,6 @@ impl PickerDelegate for RecentProjectsDelegate { } } -fn open_dev_server_project( - replace_current_window: bool, - dev_server_project_id: DevServerProjectId, - project_id: ProjectId, - cx: &mut ViewContext, -) -> Task> { - if let Some(app_state) = AppState::global(cx).upgrade() { - let handle = if replace_current_window { - cx.window_handle().downcast::() - } else { - None - }; - - if let Some(handle) = handle { - cx.spawn(move |workspace, mut cx| async move { - let continue_replacing = workspace - .update(&mut cx, |workspace, cx| { - workspace.prepare_to_close(CloseIntent::ReplaceWindow, cx) - })? - .await?; - if continue_replacing { - workspace - .update(&mut cx, |_workspace, cx| { - workspace::join_dev_server_project( - dev_server_project_id, - project_id, - app_state, - Some(handle), - cx, - ) - })? - .await?; - } - Ok(()) - }) - } else { - let task = workspace::join_dev_server_project( - dev_server_project_id, - project_id, - app_state, - None, - cx, - ); - cx.spawn(|_, _| async move { - task.await?; - Ok(()) - }) - } - } else { - Task::ready(Err(anyhow::anyhow!("App state not found"))) - } -} - // Compute the highlighted text for the name and path fn highlights_for_path( path: &Path, diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index 101f87f1ffc5b211605d0b22f36af68b58586311..7081afc903902d037c7768ce7fc90ea7f35bc2b4 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -1,19 +1,12 @@ -use std::collections::HashMap; use std::path::PathBuf; use std::sync::Arc; -use std::time::Duration; -use anyhow::anyhow; -use anyhow::Context; -use anyhow::Result; -use dev_server_projects::{DevServer, DevServerId, DevServerProjectId}; use editor::Editor; use file_finder::OpenPathDelegate; use futures::channel::oneshot; use futures::future::Shared; use futures::FutureExt; use gpui::canvas; -use gpui::AsyncWindowContext; use gpui::ClipboardItem; use gpui::Task; use gpui::WeakView; @@ -22,17 +15,10 @@ use gpui::{ PromptLevel, ScrollHandle, View, ViewContext, }; use picker::Picker; -use project::terminals::wrap_for_ssh; -use project::terminals::SshCommand; use project::Project; use remote::SshConnectionOptions; -use rpc::proto::DevServerStatus; use settings::update_settings_file; use settings::Settings; -use task::HideStrategy; -use task::RevealStrategy; -use task::SpawnInTerminal; -use terminal_view::terminal_panel::TerminalPanel; use ui::{ prelude::*, IconButtonShape, List, ListItem, ListSeparator, Modal, ModalHeader, Scrollbar, ScrollbarState, Section, Tooltip, @@ -43,7 +29,6 @@ use workspace::OpenOptions; use workspace::Toast; use workspace::{notifications::DetachAndPromptErr, ModalView, Workspace}; -use crate::open_dev_server_project; use crate::ssh_connections::connect_over_ssh; use crate::ssh_connections::open_ssh_project; use crate::ssh_connections::RemoteSettingsContent; @@ -1319,146 +1304,3 @@ impl Render for RemoteServerProjects { }) } } - -pub fn reconnect_to_dev_server_project( - workspace: View, - dev_server: DevServer, - dev_server_project_id: DevServerProjectId, - replace_current_window: bool, - cx: &mut WindowContext, -) -> Task> { - let store = dev_server_projects::Store::global(cx); - let reconnect = reconnect_to_dev_server(workspace.clone(), dev_server, cx); - cx.spawn(|mut cx| async move { - reconnect.await?; - - cx.background_executor() - .timer(Duration::from_millis(1000)) - .await; - - if let Some(project_id) = store.update(&mut cx, |store, _| { - store - .dev_server_project(dev_server_project_id) - .and_then(|p| p.project_id) - })? { - workspace - .update(&mut cx, move |_, cx| { - open_dev_server_project( - replace_current_window, - dev_server_project_id, - project_id, - cx, - ) - })? - .await?; - } - - Ok(()) - }) -} - -pub fn reconnect_to_dev_server( - workspace: View, - dev_server: DevServer, - cx: &mut WindowContext, -) -> Task> { - let Some(ssh_connection_string) = dev_server.ssh_connection_string else { - return Task::ready(Err(anyhow!("Can't reconnect, no ssh_connection_string"))); - }; - let dev_server_store = dev_server_projects::Store::global(cx); - let get_access_token = dev_server_store.update(cx, |store, cx| { - store.regenerate_dev_server_token(dev_server.id, cx) - }); - - cx.spawn(|mut cx| async move { - let access_token = get_access_token.await?.access_token; - - spawn_ssh_task( - workspace, - dev_server_store, - dev_server.id, - ssh_connection_string.to_string(), - access_token, - &mut cx, - ) - .await - }) -} - -pub async fn spawn_ssh_task( - workspace: View, - dev_server_store: Model, - dev_server_id: DevServerId, - ssh_connection_string: String, - access_token: String, - cx: &mut AsyncWindowContext, -) -> Result<()> { - let terminal_panel = workspace - .update(cx, |workspace, cx| workspace.panel::(cx)) - .ok() - .flatten() - .with_context(|| anyhow!("No terminal panel"))?; - - let command = "sh".to_string(); - let args = vec![ - "-x".to_string(), - "-c".to_string(), - format!( - r#"~/.local/bin/zed -v >/dev/stderr || (curl -f https://zed.dev/install.sh || wget -qO- https://zed.dev/install.sh) | sh && ZED_HEADLESS=1 ~/.local/bin/zed --dev-server-token {}"#, - access_token - ), - ]; - - let ssh_connection_string = ssh_connection_string.to_string(); - let (command, args) = wrap_for_ssh( - &SshCommand::DevServer(ssh_connection_string.clone()), - Some((&command, &args)), - None, - HashMap::default(), - None, - ); - - let terminal = terminal_panel - .update(cx, |terminal_panel, cx| { - terminal_panel.spawn_in_new_terminal( - SpawnInTerminal { - id: task::TaskId("ssh-remote".into()), - full_label: "Install zed over ssh".into(), - label: "Install zed over ssh".into(), - command, - args, - command_label: ssh_connection_string.clone(), - cwd: None, - use_new_terminal: true, - allow_concurrent_runs: false, - reveal: RevealStrategy::Always, - hide: HideStrategy::Never, - env: Default::default(), - shell: Default::default(), - }, - cx, - ) - })? - .await?; - - terminal - .update(cx, |terminal, cx| terminal.wait_for_completed_task(cx))? - .await; - - // There's a race-condition between the task completing successfully, and the server sending us the online status. Make it less likely we'll show the error state. - if dev_server_store.update(cx, |this, _| this.dev_server_status(dev_server_id))? - == DevServerStatus::Offline - { - cx.background_executor() - .timer(Duration::from_millis(200)) - .await - } - - if dev_server_store.update(cx, |this, _| this.dev_server_status(dev_server_id))? - == DevServerStatus::Offline - { - return Err(anyhow!("couldn't reconnect"))?; - } - - Ok(()) -} diff --git a/crates/title_bar/Cargo.toml b/crates/title_bar/Cargo.toml index dcfe289ca0a8fd66c0f7570e4c1e2f35d1c7718e..93a181e9263674ba11970616c3940471516f9527 100644 --- a/crates/title_bar/Cargo.toml +++ b/crates/title_bar/Cargo.toml @@ -33,7 +33,6 @@ auto_update.workspace = true call.workspace = true client.workspace = true command_palette.workspace = true -dev_server_projects.workspace = true extensions_ui.workspace = true feedback.workspace = true feature_flags.workspace = true diff --git a/crates/title_bar/src/collab.rs b/crates/title_bar/src/collab.rs index 2f53458905035824badf3a68b3f0a6b658c06506..edbc14792675f9e4f40d8d37f08cf89a2d5501aa 100644 --- a/crates/title_bar/src/collab.rs +++ b/crates/title_bar/src/collab.rs @@ -285,8 +285,7 @@ impl TitleBar { let room = room.read(cx); let project = self.project.read(cx); let is_local = project.is_local() || project.is_via_ssh(); - let is_dev_server_project = project.dev_server_project_id().is_some(); - let is_shared = (is_local || is_dev_server_project) && project.is_shared(); + let is_shared = is_local && project.is_shared(); let is_muted = room.is_muted(); let is_deafened = room.is_deafened().unwrap_or(false); let is_screen_sharing = room.is_screen_sharing(); @@ -299,7 +298,7 @@ impl TitleBar { let mut children = Vec::new(); - if (is_local || is_dev_server_project) && can_share_projects { + if is_local && can_share_projects { children.push( Button::new( "toggle_sharing", diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index c279d2eb6bcef3d3493397ad76ed4ea10c6acfca..8b3fb5739f0733727e0e95a5a70e4f2fae116e68 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -20,7 +20,7 @@ use gpui::{ use project::{Project, RepositoryEntry}; use recent_projects::{OpenRemote, RecentProjects, SshSettings}; use remote::SshConnectionOptions; -use rpc::proto::{self, DevServerStatus}; +use rpc::proto; use settings::Settings; use smallvec::SmallVec; use std::sync::Arc; @@ -334,39 +334,6 @@ impl TitleBar { } pub fn render_project_host(&self, cx: &mut ViewContext) -> Option { - if let Some(dev_server) = - self.project - .read(cx) - .dev_server_project_id() - .and_then(|dev_server_project_id| { - dev_server_projects::Store::global(cx) - .read(cx) - .dev_server_for_project(dev_server_project_id) - }) - { - return Some( - ButtonLike::new("dev_server_trigger") - .child(Indicator::dot().color( - if dev_server.status == DevServerStatus::Online { - Color::Created - } else { - Color::Disabled - }, - )) - .child( - Label::new(dev_server.name.clone()) - .size(LabelSize::Small) - .line_height_style(LineHeightStyle::UiLabel), - ) - .tooltip(move |cx| Tooltip::text("Project is hosted on a dev server", cx)) - .on_click(cx.listener(|this, _, cx| { - if let Some(workspace) = this.workspace.upgrade() { - recent_projects::RemoteServerProjects::open(workspace, cx) - } - })) - .into_any_element(), - ); - } if self.project.read(cx).is_via_ssh() { return self.render_ssh_project_host(cx); } diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index 47f6c138c823669bf7845c2a5ed9386b527a3432..6486302152387b6ef5827890d87873a37fe520f5 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -49,7 +49,6 @@ node_runtime.workspace = true parking_lot.workspace = true postage.workspace = true project.workspace = true -dev_server_projects.workspace = true task.workspace = true release_channel.workspace = true remote.workspace = true diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index d0cb684b5274001467ea2d8d31d89afa00e09ff5..7c4fb93ba1a6702c1a5a6279934f1ccf11a1acd6 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -24,9 +24,7 @@ use model::{ SerializedSshProject, SerializedWorkspace, }; -use self::model::{ - DockStructure, LocalPathsOrder, SerializedDevServerProject, SerializedWorkspaceLocation, -}; +use self::model::{DockStructure, LocalPathsOrder, SerializedWorkspaceLocation}; #[derive(Copy, Clone, Debug, PartialEq)] pub(crate) struct SerializedAxis(pub(crate) gpui::Axis); @@ -460,89 +458,6 @@ impl WorkspaceDb { }) } - pub(crate) fn workspace_for_dev_server_project( - &self, - dev_server_project_id: DevServerProjectId, - ) -> Option { - // Note that we re-assign the workspace_id here in case it's empty - // and we've grabbed the most recent workspace - let ( - workspace_id, - dev_server_project_id, - window_bounds, - display, - centered_layout, - docks, - window_id, - ): ( - WorkspaceId, - Option, - Option, - Option, - Option, - DockStructure, - Option, - ) = self - .select_row_bound(sql! { - SELECT - workspace_id, - dev_server_project_id, - window_state, - window_x, - window_y, - window_width, - window_height, - display, - centered_layout, - left_dock_visible, - left_dock_active_panel, - left_dock_zoom, - right_dock_visible, - right_dock_active_panel, - right_dock_zoom, - bottom_dock_visible, - bottom_dock_active_panel, - bottom_dock_zoom, - window_id - FROM workspaces - WHERE dev_server_project_id = ? - }) - .and_then(|mut prepared_statement| (prepared_statement)(dev_server_project_id.0)) - .context("No workspaces found") - .warn_on_err() - .flatten()?; - - let dev_server_project_id = dev_server_project_id?; - - let dev_server_project: SerializedDevServerProject = self - .select_row_bound(sql! { - SELECT id, path, dev_server_name - FROM dev_server_projects - WHERE id = ? - }) - .and_then(|mut prepared_statement| (prepared_statement)(dev_server_project_id)) - .context("No remote project found") - .warn_on_err() - .flatten()?; - - let location = SerializedWorkspaceLocation::DevServer(dev_server_project); - - Some(SerializedWorkspace { - id: workspace_id, - location, - center_group: self - .get_center_pane_group(workspace_id) - .context("Getting center group") - .log_err()?, - window_bounds, - centered_layout: centered_layout.unwrap_or(false), - display, - docks, - session_id: None, - window_id, - }) - } - pub(crate) fn workspace_for_ssh_project( &self, ssh_project: &SerializedSshProject, @@ -659,61 +574,6 @@ impl WorkspaceDb { prepared_query(args).context("Updating workspace")?; } - SerializedWorkspaceLocation::DevServer(dev_server_project) => { - conn.exec_bound(sql!( - DELETE FROM workspaces WHERE dev_server_project_id = ? AND workspace_id != ? - ))?((dev_server_project.id.0, workspace.id)) - .context("clearing out old locations")?; - - conn.exec_bound(sql!( - INSERT INTO dev_server_projects( - id, - path, - dev_server_name - ) VALUES (?1, ?2, ?3) - ON CONFLICT DO - UPDATE SET - path = ?2, - dev_server_name = ?3 - ))?(&dev_server_project)?; - - // Upsert - conn.exec_bound(sql!( - INSERT INTO workspaces( - workspace_id, - dev_server_project_id, - left_dock_visible, - left_dock_active_panel, - left_dock_zoom, - right_dock_visible, - right_dock_active_panel, - right_dock_zoom, - bottom_dock_visible, - bottom_dock_active_panel, - bottom_dock_zoom, - timestamp - ) - VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, CURRENT_TIMESTAMP) - ON CONFLICT DO - UPDATE SET - dev_server_project_id = ?2, - left_dock_visible = ?3, - left_dock_active_panel = ?4, - left_dock_zoom = ?5, - right_dock_visible = ?6, - right_dock_active_panel = ?7, - right_dock_zoom = ?8, - bottom_dock_visible = ?9, - bottom_dock_active_panel = ?10, - bottom_dock_zoom = ?11, - timestamp = CURRENT_TIMESTAMP - ))?(( - workspace.id, - dev_server_project.id.0, - workspace.docks, - )) - .context("Updating workspace")?; - }, SerializedWorkspaceLocation::Ssh(ssh_project) => { conn.exec_bound(sql!( DELETE FROM workspaces WHERE ssh_project_id = ? AND workspace_id != ? @@ -824,11 +684,10 @@ impl WorkspaceDb { } query! { - fn recent_workspaces() -> Result, Option)>> { - SELECT workspace_id, local_paths, local_paths_order, dev_server_project_id, ssh_project_id + fn recent_workspaces() -> Result)>> { + SELECT workspace_id, local_paths, local_paths_order, ssh_project_id FROM workspaces WHERE local_paths IS NOT NULL - OR dev_server_project_id IS NOT NULL OR ssh_project_id IS NOT NULL ORDER BY timestamp DESC } @@ -843,13 +702,6 @@ impl WorkspaceDb { } } - query! { - fn dev_server_projects() -> Result> { - SELECT id, path, dev_server_name - FROM dev_server_projects - } - } - query! { fn ssh_projects() -> Result> { SELECT id, host, port, paths, user @@ -913,24 +765,9 @@ impl WorkspaceDb { ) -> Result> { let mut result = Vec::new(); let mut delete_tasks = Vec::new(); - let dev_server_projects = self.dev_server_projects()?; let ssh_projects = self.ssh_projects()?; - for (id, location, order, dev_server_project_id, ssh_project_id) in - self.recent_workspaces()? - { - if let Some(dev_server_project_id) = dev_server_project_id.map(DevServerProjectId) { - if let Some(dev_server_project) = dev_server_projects - .iter() - .find(|rp| rp.id == dev_server_project_id) - { - result.push((id, dev_server_project.clone().into())); - } else { - delete_tasks.push(self.delete_workspace_by_id(id)); - } - continue; - } - + for (id, location, order, ssh_project_id) in self.recent_workspaces()? { if let Some(ssh_project_id) = ssh_project_id.map(SshProjectId) { if let Some(ssh_project) = ssh_projects.iter().find(|rp| rp.id == ssh_project_id) { result.push((id, SerializedWorkspaceLocation::Ssh(ssh_project.clone()))); diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index 7528e4c3934c5752a01c4a2d0d40b82b7bcc7c56..a2510b8bec6bf52246f20234d61b645c12046659 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -4,7 +4,6 @@ use crate::{ }; use anyhow::{Context, Result}; use async_recursion::async_recursion; -use client::DevServerProjectId; use db::sqlez::{ bindable::{Bind, Column, StaticColumnCount}, statement::Statement, @@ -17,7 +16,6 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use ui::SharedString; use util::ResultExt; use uuid::Uuid; @@ -92,13 +90,6 @@ impl Column for SerializedSshProject { } } -#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] -pub struct SerializedDevServerProject { - pub id: DevServerProjectId, - pub dev_server_name: String, - pub paths: Vec, -} - #[derive(Debug, PartialEq, Clone)] pub struct LocalPaths(Arc>); @@ -176,49 +167,10 @@ impl Column for LocalPathsOrder { } } -impl From for SerializedWorkspaceLocation { - fn from(dev_server_project: SerializedDevServerProject) -> Self { - Self::DevServer(dev_server_project) - } -} - -impl StaticColumnCount for SerializedDevServerProject {} -impl Bind for &SerializedDevServerProject { - fn bind(&self, statement: &Statement, start_index: i32) -> Result { - let next_index = statement.bind(&self.id.0, start_index)?; - let next_index = statement.bind(&self.dev_server_name, next_index)?; - let paths = serde_json::to_string(&self.paths)?; - statement.bind(&paths, next_index) - } -} - -impl Column for SerializedDevServerProject { - fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { - let id = statement.column_int64(start_index)?; - let dev_server_name = statement.column_text(start_index + 1)?.to_string(); - let paths = statement.column_text(start_index + 2)?.to_string(); - let paths: Vec = if paths.starts_with('[') { - serde_json::from_str(&paths).context("JSON deserialization of paths failed")? - } else { - vec![paths.into()] - }; - - Ok(( - Self { - id: DevServerProjectId(id as u64), - dev_server_name, - paths, - }, - start_index + 3, - )) - } -} - #[derive(Debug, PartialEq, Clone)] pub enum SerializedWorkspaceLocation { Local(LocalPaths, LocalPathsOrder), Ssh(SerializedSshProject), - DevServer(SerializedDevServerProject), } impl SerializedWorkspaceLocation { diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index adc3e687413a51e8c4101102c18e6200b58da524..6338c6fcbd169222b37375f465ebbdad0775ada8 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -16,7 +16,7 @@ use anyhow::{anyhow, Context as _, Result}; use call::{call_settings::CallSettings, ActiveCall}; use client::{ proto::{self, ErrorCode, PanelId, PeerId}, - ChannelId, Client, DevServerProjectId, ErrorExt, ProjectId, Status, TypedEnvelope, UserStore, + ChannelId, Client, ErrorExt, ProjectId, Status, TypedEnvelope, UserStore, }; use collections::{hash_map, HashMap, HashSet}; use derive_more::{Deref, DerefMut}; @@ -52,7 +52,7 @@ use notifications::{ pub use pane::*; pub use pane_group::*; pub use persistence::{ - model::{ItemId, LocalPaths, SerializedDevServerProject, SerializedWorkspaceLocation}, + model::{ItemId, LocalPaths, SerializedWorkspaceLocation}, WorkspaceDb, DB as WORKSPACE_DB, }; use persistence::{ @@ -97,7 +97,7 @@ use ui::{ IntoElement, ParentElement as _, Pixels, SharedString, Styled as _, ViewContext, VisualContext as _, WindowContext, }; -use util::{maybe, ResultExt, TryFutureExt}; +use util::{ResultExt, TryFutureExt}; use uuid::Uuid; pub use workspace_settings::{ AutosaveSetting, RestoreOnStartupBehavior, TabBarSettings, WorkspaceSettings, @@ -2057,7 +2057,7 @@ impl Workspace { fn add_folder_to_project(&mut self, _: &AddFolderToProject, cx: &mut ViewContext) { let project = self.project.read(cx); - if project.is_via_collab() && project.dev_server_project_id().is_none() { + if project.is_via_collab() { self.show_error( &anyhow!("You cannot add folders to someone else's project"), cx, @@ -4133,20 +4133,6 @@ impl Workspace { } else { None } - } else if let Some(dev_server_project_id) = self.project().read(cx).dev_server_project_id() - { - let store = dev_server_projects::Store::global(cx).read(cx); - maybe!({ - let project = store.dev_server_project(dev_server_project_id)?; - let dev_server = store.dev_server(project.dev_server_id)?; - - let dev_server_project = SerializedDevServerProject { - id: dev_server_project_id, - dev_server_name: dev_server.name.to_string(), - paths: project.paths.to_vec(), - }; - Some(SerializedWorkspaceLocation::DevServer(dev_server_project)) - }) } else { None }; @@ -5180,13 +5166,12 @@ async fn join_channel_internal( if let Some(workspace) = requesting_window { let project = workspace.update(cx, |workspace, cx| { let project = workspace.project.read(cx); - let is_dev_server = project.dev_server_project_id().is_some(); - if !is_dev_server && !CallSettings::get_global(cx).share_on_join { + if !CallSettings::get_global(cx).share_on_join { return None; } - if (project.is_local() || project.is_via_ssh() || is_dev_server) + if (project.is_local() || project.is_via_ssh()) && project.visible_worktrees(cx).any(|tree| { tree.read(cx) .root_entry() @@ -5685,84 +5670,6 @@ fn serialize_ssh_project( }) } -pub fn join_dev_server_project( - dev_server_project_id: DevServerProjectId, - project_id: ProjectId, - app_state: Arc, - window_to_replace: Option>, - cx: &mut AppContext, -) -> Task>> { - let windows = cx.windows(); - cx.spawn(|mut cx| async move { - let existing_workspace = windows.into_iter().find_map(|window| { - window.downcast::().and_then(|window| { - window - .update(&mut cx, |workspace, cx| { - if workspace.project().read(cx).remote_id() == Some(project_id.0) { - Some(window) - } else { - None - } - }) - .unwrap_or(None) - }) - }); - - let serialized_workspace: Option = - persistence::DB.workspace_for_dev_server_project(dev_server_project_id); - - let workspace = if let Some(existing_workspace) = existing_workspace { - existing_workspace - } else { - let project = Project::remote( - project_id.0, - app_state.client.clone(), - app_state.user_store.clone(), - app_state.languages.clone(), - app_state.fs.clone(), - cx.clone(), - ) - .await?; - - let workspace_id = if let Some(ref serialized_workspace) = serialized_workspace { - serialized_workspace.id - } else { - persistence::DB.next_id().await? - }; - - if let Some(window_to_replace) = window_to_replace { - cx.update_window(window_to_replace.into(), |_, cx| { - cx.replace_root_view(|cx| { - Workspace::new(Some(workspace_id), project, app_state.clone(), cx) - }); - })?; - window_to_replace - } else { - let window_bounds_override = window_bounds_env_override(); - cx.update(|cx| { - let mut options = (app_state.build_window_options)(None, cx); - options.window_bounds = window_bounds_override.map(WindowBounds::Windowed); - cx.open_window(options, |cx| { - cx.new_view(|cx| { - Workspace::new(Some(workspace_id), project, app_state.clone(), cx) - }) - }) - })?? - } - }; - - workspace - .update(&mut cx, |_, cx| { - cx.activate(true); - cx.activate_window(); - open_items(serialized_workspace, vec![], app_state, cx) - })? - .await?; - - anyhow::Ok(workspace) - }) -} - pub fn join_in_room_project( project_id: u64, follow_user_id: u64, diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index d80db98393d95745df8f5572ee878f3a455d8446..58728d504b3ea5019f1d73ce31b83eea4e4f5ba8 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -36,7 +36,6 @@ command_palette.workspace = true command_palette_hooks.workspace = true copilot.workspace = true db.workspace = true -dev_server_projects.workspace = true diagnostics.workspace = true editor.workspace = true env_logger.workspace = true @@ -52,7 +51,6 @@ git.workspace = true git_hosting_providers.workspace = true go_to_line.workspace = true gpui = { workspace = true, features = ["wayland", "x11", "font-kit"] } -headless.workspace = true http_client.workspace = true image_viewer.workspace = true inline_completion_button.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 5608d8477618f10145f08c1ccb94b0c56c0124c3..01c9d86c60032c033a9c73ac8471942428eb9db8 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -11,7 +11,7 @@ use assistant::PromptBuilder; use chrono::Offset; use clap::{command, Parser}; use cli::FORCE_CLI_MODE_ENV_VAR_NAME; -use client::{parse_zed_link, Client, DevServerToken, ProxySettings, UserStore}; +use client::{parse_zed_link, Client, ProxySettings, UserStore}; use collab_ui::channel_view::ChannelView; use db::kvp::{GLOBAL_KEY_VALUE_STORE, KEY_VALUE_STORE}; use editor::Editor; @@ -20,8 +20,8 @@ use fs::{Fs, RealFs}; use futures::{future, StreamExt}; use git::GitHostingProviderRegistry; use gpui::{ - Action, App, AppContext, AsyncAppContext, Context, DismissEvent, Global, Task, - UpdateGlobal as _, VisualContext, + Action, App, AppContext, AsyncAppContext, Context, DismissEvent, UpdateGlobal as _, + VisualContext, }; use http_client::{read_proxy_from_env, Uri}; use language::LanguageRegistry; @@ -136,43 +136,6 @@ fn fail_to_open_window(e: anyhow::Error, _cx: &mut AppContext) { } } -enum AppMode { - Headless(DevServerToken), - Ui, -} -impl Global for AppMode {} - -fn init_headless( - dev_server_token: DevServerToken, - app_state: Arc, - cx: &mut AppContext, -) -> Task> { - match cx.try_global::() { - Some(AppMode::Headless(token)) if token == &dev_server_token => return Task::ready(Ok(())), - Some(_) => { - return Task::ready(Err(anyhow!( - "zed is already running. Use `kill {}` to stop it", - process::id() - ))) - } - None => { - cx.set_global(AppMode::Headless(dev_server_token.clone())); - } - }; - let client = app_state.client.clone(); - client.set_dev_server_token(dev_server_token); - headless::init( - client.clone(), - headless::AppState { - languages: app_state.languages.clone(), - user_store: app_state.user_store.clone(), - fs: app_state.fs.clone(), - node_runtime: app_state.node_runtime.clone(), - }, - cx, - ) -} - // init_common is called for both headless and normal mode. fn init_common(app_state: Arc, cx: &mut AppContext) -> Arc { SystemAppearance::init(cx); @@ -223,19 +186,6 @@ fn init_ui( prompt_builder: Arc, cx: &mut AppContext, ) -> Result<()> { - match cx.try_global::() { - Some(AppMode::Headless(_)) => { - return Err(anyhow!( - "zed is already running in headless mode. Use `kill {}` to stop it", - process::id() - )) - } - Some(AppMode::Ui) => return Ok(()), - None => { - cx.set_global(AppMode::Ui); - } - }; - load_embedded_fonts(cx); #[cfg(target_os = "linux")] @@ -252,7 +202,6 @@ fn init_ui( go_to_line::init(cx); file_finder::init(cx); tab_switcher::init(cx); - dev_server_projects::init(app_state.client.clone(), cx); outline::init(cx); project_symbols::init(cx); project_panel::init(Assets, cx); @@ -426,22 +375,15 @@ fn main() { app.on_reopen(move |cx| { if let Some(app_state) = AppState::try_global(cx).and_then(|app_state| app_state.upgrade()) { - let ui_has_launched = cx - .try_global::() - .map(|mode| matches!(mode, AppMode::Ui)) - .unwrap_or(false); - - if ui_has_launched { - cx.spawn({ - let app_state = app_state.clone(); - |mut cx| async move { - if let Err(e) = restore_or_create_workspace(app_state, &mut cx).await { - fail_to_open_window_async(e, &mut cx) - } + cx.spawn({ + let app_state = app_state.clone(); + |mut cx| async move { + if let Err(e) = restore_or_create_workspace(app_state, &mut cx).await { + fail_to_open_window_async(e, &mut cx) } - }) - .detach(); - } + } + }) + .detach(); } }); @@ -590,30 +532,16 @@ fn main() { handle_open_request(request, app_state.clone(), prompt_builder.clone(), cx); } None => { - if let Some(dev_server_token) = args.dev_server_token { - let task = - init_headless(DevServerToken(dev_server_token), app_state.clone(), cx); - cx.spawn(|cx| async move { - if let Err(e) = task.await { - log::error!("{}", e); - cx.update(|cx| cx.quit()).log_err(); - } else { - log::info!("connected!"); - } - }) - .detach(); - } else { - init_ui(app_state.clone(), prompt_builder.clone(), cx).unwrap(); - cx.spawn({ - let app_state = app_state.clone(); - |mut cx| async move { - if let Err(e) = restore_or_create_workspace(app_state, &mut cx).await { - fail_to_open_window_async(e, &mut cx) - } + init_ui(app_state.clone(), prompt_builder.clone(), cx).unwrap(); + cx.spawn({ + let app_state = app_state.clone(); + |mut cx| async move { + if let Err(e) = restore_or_create_workspace(app_state, &mut cx).await { + fail_to_open_window_async(e, &mut cx) } - }) - .detach(); - } + } + }) + .detach(); } } @@ -927,7 +855,6 @@ async fn restore_or_create_workspace( }) .detach(); } - SerializedWorkspaceLocation::DevServer(_) => {} } } } else if matches!(KEY_VALUE_STORE.read_kvp(FIRST_OPEN), Ok(None)) { diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index bb217d6b16343daa625e4c53e1a23c4113956b39..32bfdd42ae8359fdffdf1deac54029d2c7e545f3 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -1,5 +1,5 @@ use crate::restorable_workspace_locations; -use crate::{handle_open_request, init_headless, init_ui}; +use crate::{handle_open_request, init_ui}; use anyhow::{anyhow, Context, Result}; use assistant::PromptBuilder; use cli::{ipc, IpcHandshake}; @@ -21,8 +21,8 @@ use remote::SshConnectionOptions; use settings::Settings; use std::path::{Path, PathBuf}; use std::sync::Arc; +use std::thread; use std::time::Duration; -use std::{process, thread}; use util::paths::PathWithPosition; use util::ResultExt; use welcome::{show_welcome_view, FIRST_OPEN}; @@ -262,38 +262,9 @@ pub async fn handle_cli_connection( paths, wait, open_new_workspace, - dev_server_token, + env, } => { - if let Some(dev_server_token) = dev_server_token { - match cx - .update(|cx| { - init_headless(client::DevServerToken(dev_server_token), app_state, cx) - }) - .unwrap() - .await - { - Ok(_) => { - responses - .send(CliResponse::Stdout { - message: format!("zed (pid {}) connected!", process::id()), - }) - .log_err(); - responses.send(CliResponse::Exit { status: 0 }).log_err(); - } - Err(error) => { - responses - .send(CliResponse::Stderr { - message: format!("{error}"), - }) - .log_err(); - responses.send(CliResponse::Exit { status: 1 }).log_err(); - cx.update(|cx| cx.quit()).log_err(); - } - } - return; - } - if !urls.is_empty() { cx.update(|cx| { match OpenRequest::parse(urls, cx) { @@ -459,7 +430,6 @@ async fn open_workspaces( // We don't set `errored` here, because for ssh projects, the // error is displayed in the window. } - SerializedWorkspaceLocation::DevServer(_) => {} } } From 3ec015b325b2b3cb834af2515ae932e00494a92a Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Thu, 24 Oct 2024 18:37:57 +0000 Subject: [PATCH 20/76] docs: Example theme_overrides for docstrings as italic (#19694) --- docs/src/themes.md | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/docs/src/themes.md b/docs/src/themes.md index 5dbd4b0d7641d30f801c75a4ee61acec97cdfeb9..3150b0168076c470afe2ceb012c3ef092469d1f7 100644 --- a/docs/src/themes.md +++ b/docs/src/themes.md @@ -34,7 +34,7 @@ By default, Zed maintains two themes: one for light mode and one for dark mode. To override specific attributes of a theme, use the `experimental.theme_overrides` setting. -For example, to override the background color of the editor and the font style of comments, add the following to your `settings.json` file: +For example, add the following to your `settings.json` if you wish to to override the background color of the editor and display comments and doc comments as italics: ```json { @@ -43,13 +43,18 @@ For example, to override the background color of the editor and the font style o "syntax": { "comment": { "font_style": "italic" + }, + "comment.doc": { + "font_style": "italic" } } } } ``` -See which attributes are available to override by looking at the JSON format of your theme. For example, [here is the JSON format for the `One` themes](https://github.com/zed-industries/zed/blob/main/assets/themes/one/one.json). +To see a comprehensive list of list of captures (like `comment` and `comment.doc`) see: [Language Extensions: Syntax highlighting](./extensions/languages.md#syntax-highlighting). + +To see a list of available theme attributes look at the JSON file for your theme. For example, [assets/themes/one/one.json](https://github.com/zed-industries/zed/blob/main/assets/themes/one/one.json) for the default One Dark and One Light themes. ## Local Themes From 454d3dd52b25d40e079b59f206a20317efa3c30e Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 24 Oct 2024 21:49:07 +0300 Subject: [PATCH 21/76] Fix ssh project history (#19683) Use `Fs` instead of `std::fs` and do entry existence checks better: * first, check the worktree entry existence without any FS checks * then, only for local cases, use `Fs` to check for abs_path existence of items, in case those came from single-filed worktrees that got closed and removed. Remote entries do not get file existence checks, so might try opening previously removed buffers for now. Release Notes: - N/A --- .../collab/src/tests/channel_guest_tests.rs | 4 +- crates/collab/src/tests/following_tests.rs | 19 +++-- crates/file_finder/src/file_finder.rs | 81 ++++++++++++------- crates/file_finder/src/file_finder_tests.rs | 11 ++- crates/project/src/project.rs | 19 +---- 5 files changed, 81 insertions(+), 53 deletions(-) diff --git a/crates/collab/src/tests/channel_guest_tests.rs b/crates/collab/src/tests/channel_guest_tests.rs index 06b14bee5e6df6786e4cf46669fc92b2b342ba49..5a091fe3083b1495ca89bc801cc188b1e8f33b1f 100644 --- a/crates/collab/src/tests/channel_guest_tests.rs +++ b/crates/collab/src/tests/channel_guest_tests.rs @@ -95,7 +95,9 @@ async fn test_channel_guest_promotion(cx_a: &mut TestAppContext, cx_b: &mut Test let room_b = cx_b .read(ActiveCall::global) .update(cx_b, |call, _| call.room().unwrap().clone()); - cx_b.simulate_keystrokes("cmd-p 1 enter"); + cx_b.simulate_keystrokes("cmd-p"); + cx_a.run_until_parked(); + cx_b.simulate_keystrokes("1 enter"); let (project_b, editor_b) = workspace_b.update(cx_b, |workspace, cx| { ( diff --git a/crates/collab/src/tests/following_tests.rs b/crates/collab/src/tests/following_tests.rs index 5e9c001491c6ce1d00745ed664083e4e8bec8d00..1367bf49c008e17e6be1da3c6627eedebf110ff1 100644 --- a/crates/collab/src/tests/following_tests.rs +++ b/crates/collab/src/tests/following_tests.rs @@ -1589,8 +1589,9 @@ async fn test_following_stops_on_unshare(cx_a: &mut TestAppContext, cx_b: &mut T .await; let (workspace_b, cx_b) = client_b.join_workspace(channel_id, cx_b).await; - cx_a.simulate_keystrokes("cmd-p 2 enter"); + cx_a.simulate_keystrokes("cmd-p"); cx_a.run_until_parked(); + cx_a.simulate_keystrokes("2 enter"); let editor_a = workspace_a.update(cx_a, |workspace, cx| { workspace.active_item_as::(cx).unwrap() @@ -2041,7 +2042,9 @@ async fn test_following_to_channel_notes_other_workspace( share_workspace(&workspace_a, cx_a).await.unwrap(); // a opens 1.txt - cx_a.simulate_keystrokes("cmd-p 1 enter"); + cx_a.simulate_keystrokes("cmd-p"); + cx_a.run_until_parked(); + cx_a.simulate_keystrokes("1 enter"); cx_a.run_until_parked(); workspace_a.update(cx_a, |workspace, cx| { let editor = workspace.active_item(cx).unwrap(); @@ -2098,7 +2101,9 @@ async fn test_following_while_deactivated(cx_a: &mut TestAppContext, cx_b: &mut share_workspace(&workspace_a, cx_a).await.unwrap(); // a opens 1.txt - cx_a.simulate_keystrokes("cmd-p 1 enter"); + cx_a.simulate_keystrokes("cmd-p"); + cx_a.run_until_parked(); + cx_a.simulate_keystrokes("1 enter"); cx_a.run_until_parked(); workspace_a.update(cx_a, |workspace, cx| { let editor = workspace.active_item(cx).unwrap(); @@ -2118,7 +2123,9 @@ async fn test_following_while_deactivated(cx_a: &mut TestAppContext, cx_b: &mut cx_b.simulate_keystrokes("down"); // a opens a different file while not followed - cx_a.simulate_keystrokes("cmd-p 2 enter"); + cx_a.simulate_keystrokes("cmd-p"); + cx_a.run_until_parked(); + cx_a.simulate_keystrokes("2 enter"); workspace_b.update(cx_b, |workspace, cx| { let editor = workspace.active_item_as::(cx).unwrap(); @@ -2128,7 +2135,9 @@ async fn test_following_while_deactivated(cx_a: &mut TestAppContext, cx_b: &mut // a opens a file in a new window let (_, cx_a2) = client_a.build_test_workspace(&mut cx_a2).await; cx_a2.update(|cx| cx.activate_window()); - cx_a2.simulate_keystrokes("cmd-p 3 enter"); + cx_a2.simulate_keystrokes("cmd-p"); + cx_a2.run_until_parked(); + cx_a2.simulate_keystrokes("3 enter"); cx_a2.run_until_parked(); // b starts following a again diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 4202e6e2d095859a63a7b1cfad6f70e387918c6d..299b129d82a90dd52c4f8b015eda5932bf0fb783 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -5,6 +5,7 @@ mod file_finder_settings; mod new_path_prompt; mod open_path_prompt; +use futures::future::join_all; pub use open_path_prompt::OpenPathDelegate; use collections::HashMap; @@ -59,7 +60,7 @@ impl FileFinder { fn register(workspace: &mut Workspace, _: &mut ViewContext) { workspace.register_action(|workspace, action: &workspace::ToggleFileFinder, cx| { let Some(file_finder) = workspace.active_modal::(cx) else { - Self::open(workspace, action.separate_history, cx); + Self::open(workspace, action.separate_history, cx).detach(); return; }; @@ -72,8 +73,13 @@ impl FileFinder { }); } - fn open(workspace: &mut Workspace, separate_history: bool, cx: &mut ViewContext) { + fn open( + workspace: &mut Workspace, + separate_history: bool, + cx: &mut ViewContext, + ) -> Task<()> { let project = workspace.project().read(cx); + let fs = project.fs(); let currently_opened_path = workspace .active_item(cx) @@ -88,28 +94,51 @@ impl FileFinder { let history_items = workspace .recent_navigation_history(Some(MAX_RECENT_SELECTIONS), cx) .into_iter() - .filter(|(_, history_abs_path)| match history_abs_path { - Some(abs_path) => history_file_exists(abs_path), - None => true, + .filter_map(|(project_path, abs_path)| { + if project.entry_for_path(&project_path, cx).is_some() { + return Some(Task::ready(Some(FoundPath::new(project_path, abs_path)))); + } + let abs_path = abs_path?; + if project.is_local() { + let fs = fs.clone(); + Some(cx.background_executor().spawn(async move { + if fs.is_file(&abs_path).await { + Some(FoundPath::new(project_path, Some(abs_path))) + } else { + None + } + })) + } else { + Some(Task::ready(Some(FoundPath::new( + project_path, + Some(abs_path), + )))) + } }) - .map(|(history_path, abs_path)| FoundPath::new(history_path, abs_path)) .collect::>(); + cx.spawn(move |workspace, mut cx| async move { + let history_items = join_all(history_items).await.into_iter().flatten(); + + workspace + .update(&mut cx, |workspace, cx| { + let project = workspace.project().clone(); + let weak_workspace = cx.view().downgrade(); + workspace.toggle_modal(cx, |cx| { + let delegate = FileFinderDelegate::new( + cx.view().downgrade(), + weak_workspace, + project, + currently_opened_path, + history_items.collect(), + separate_history, + cx, + ); - let project = workspace.project().clone(); - let weak_workspace = cx.view().downgrade(); - workspace.toggle_modal(cx, |cx| { - let delegate = FileFinderDelegate::new( - cx.view().downgrade(), - weak_workspace, - project, - currently_opened_path, - history_items, - separate_history, - cx, - ); - - FileFinder::new(delegate, cx) - }); + FileFinder::new(delegate, cx) + }); + }) + .ok(); + }) } fn new(delegate: FileFinderDelegate, cx: &mut ViewContext) -> Self { @@ -456,16 +485,6 @@ impl FoundPath { const MAX_RECENT_SELECTIONS: usize = 20; -#[cfg(not(test))] -fn history_file_exists(abs_path: &PathBuf) -> bool { - abs_path.exists() -} - -#[cfg(test)] -fn history_file_exists(abs_path: &Path) -> bool { - !abs_path.ends_with("nonexistent.rs") -} - pub enum Event { Selected(ProjectPath), Dismissed, diff --git a/crates/file_finder/src/file_finder_tests.rs b/crates/file_finder/src/file_finder_tests.rs index 8e25d382d382a924c1eace6f42f8cac1d34d572b..6bccf79e3bb747490eb87b52c247ab8fe83c1093 100644 --- a/crates/file_finder/src/file_finder_tests.rs +++ b/crates/file_finder/src/file_finder_tests.rs @@ -4,7 +4,7 @@ use super::*; use editor::Editor; use gpui::{Entity, TestAppContext, VisualTestContext}; use menu::{Confirm, SelectNext, SelectPrev}; -use project::FS_WATCH_LATENCY; +use project::{RemoveOptions, FS_WATCH_LATENCY}; use serde_json::json; use workspace::{AppState, ToggleFileFinder, Workspace}; @@ -1450,6 +1450,15 @@ async fn test_nonexistent_history_items_not_shown(cx: &mut gpui::TestAppContext) open_close_queried_buffer("non", 1, "nonexistent.rs", &workspace, cx).await; open_close_queried_buffer("thi", 1, "third.rs", &workspace, cx).await; open_close_queried_buffer("fir", 1, "first.rs", &workspace, cx).await; + app_state + .fs + .remove_file( + Path::new("/src/test/nonexistent.rs"), + RemoveOptions::default(), + ) + .await + .unwrap(); + cx.run_until_parked(); let picker = open_file_picker(&workspace, cx); cx.simulate_input("rs"); diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 9459641f865d9377066bc6bb3fcec64ac1917b2c..167d5c1d49b2da37a35ca69041683baf56d9ada7 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1875,11 +1875,7 @@ impl Project { }) } - pub fn get_open_buffer( - &mut self, - path: &ProjectPath, - cx: &mut ModelContext, - ) -> Option> { + pub fn get_open_buffer(&self, path: &ProjectPath, cx: &AppContext) -> Option> { self.buffer_store.read(cx).get_by_path(path, cx) } @@ -3295,17 +3291,10 @@ impl Project { } pub fn absolute_path(&self, project_path: &ProjectPath, cx: &AppContext) -> Option { - let workspace_root = self - .worktree_for_id(project_path.worktree_id, cx)? + self.worktree_for_id(project_path.worktree_id, cx)? .read(cx) - .abs_path(); - let project_path = project_path.path.as_ref(); - - Some(if project_path == Path::new("") { - workspace_root.to_path_buf() - } else { - workspace_root.join(project_path) - }) + .absolutize(&project_path.path) + .ok() } /// Attempts to find a `ProjectPath` corresponding to the given path. If the path From ca861bb1bbd4676d56e5d58fd46547696a6651f2 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 24 Oct 2024 16:34:45 -0400 Subject: [PATCH 22/76] ui: Fix swapped element background colors (#19701) This PR fixes an issue introduced in https://github.com/zed-industries/zed/pull/18768 where the element backgrounds colors for `ElevationIndex::ElevatedSurface` and `ElevationIndex::Surface` were swapped. Release Notes: - N/A --- .../ui/src/components/button/button_like.rs | 30 +++++++++---------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/crates/ui/src/components/button/button_like.rs b/crates/ui/src/components/button/button_like.rs index 22e84213913188a51c08de8571ce15c7f67355e6..ae5730569eae10e0ecca677be82ea44a39f56d28 100644 --- a/crates/ui/src/components/button/button_like.rs +++ b/crates/ui/src/components/button/button_like.rs @@ -149,8 +149,8 @@ pub(crate) struct ButtonLikeStyles { fn element_bg_from_elevation(elevation: Option, cx: &mut WindowContext) -> Hsla { match elevation { Some(ElevationIndex::Background) => cx.theme().colors().element_background, - Some(ElevationIndex::ElevatedSurface) => cx.theme().colors().surface_background, - Some(ElevationIndex::Surface) => cx.theme().colors().elevated_surface_background, + Some(ElevationIndex::ElevatedSurface) => cx.theme().colors().elevated_surface_background, + Some(ElevationIndex::Surface) => cx.theme().colors().surface_background, Some(ElevationIndex::ModalSurface) => cx.theme().colors().background, _ => cx.theme().colors().element_background, } @@ -162,11 +162,9 @@ impl ButtonStyle { elevation: Option, cx: &mut WindowContext, ) -> ButtonLikeStyles { - let filled_background = element_bg_from_elevation(elevation, cx); - match self { ButtonStyle::Filled => ButtonLikeStyles { - background: filled_background, + background: element_bg_from_elevation(elevation, cx), border_color: transparent_black(), label_color: Color::Default.color(cx), icon_color: Color::Default.color(cx), @@ -192,16 +190,18 @@ impl ButtonStyle { elevation: Option, cx: &mut WindowContext, ) -> ButtonLikeStyles { - let mut filled_background = element_bg_from_elevation(elevation, cx); - filled_background.fade_out(0.92); - match self { - ButtonStyle::Filled => ButtonLikeStyles { - background: filled_background, - border_color: transparent_black(), - label_color: Color::Default.color(cx), - icon_color: Color::Default.color(cx), - }, + ButtonStyle::Filled => { + let mut filled_background = element_bg_from_elevation(elevation, cx); + filled_background.fade_out(0.92); + + ButtonLikeStyles { + background: filled_background, + border_color: transparent_black(), + label_color: Color::Default.color(cx), + icon_color: Color::Default.color(cx), + } + } ButtonStyle::Tinted(tint) => tint.button_like_style(cx), ButtonStyle::Subtle => ButtonLikeStyles { background: cx.theme().colors().ghost_element_hover, @@ -277,8 +277,6 @@ impl ButtonStyle { elevation: Option, cx: &mut WindowContext, ) -> ButtonLikeStyles { - element_bg_from_elevation(elevation, cx).fade_out(0.82); - match self { ButtonStyle::Filled => ButtonLikeStyles { background: cx.theme().colors().element_disabled, From 3a9c071e6e1cbdc24cd8b3d0a479d61a1b669ca9 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 24 Oct 2024 14:37:02 -0600 Subject: [PATCH 23/76] Fix partial downloads of ssh remote server (#19700) Release Notes: - SSH Remoting: fix a bug where inerrrupting ssh connecting could leave your local binary cached in an invalid state --- crates/auto_update/src/auto_update.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 57286e673c4a8defd89b953b5cf23d9e57eb6e20..a45eb3a05b1838b95886151f5f3801d9768ee8c3 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -11,6 +11,7 @@ use gpui::{ }; use markdown_preview::markdown_preview_view::{MarkdownPreviewMode, MarkdownPreviewView}; +use paths::remote_servers_dir; use schemars::JsonSchema; use serde::Deserialize; use serde_derive::Serialize; @@ -661,12 +662,15 @@ async fn download_remote_server_binary( client: Arc, cx: &AsyncAppContext, ) -> Result<()> { - let mut target_file = File::create(&target_path).await?; + let temp = tempfile::Builder::new().tempfile_in(remote_servers_dir())?; + let mut temp_file = File::create(&temp).await?; let update_request_body = build_remote_server_update_request_body(cx)?; let request_body = AsyncBody::from(serde_json::to_string(&update_request_body)?); let mut response = client.get(&release.url, request_body, true).await?; - smol::io::copy(response.body_mut(), &mut target_file).await?; + smol::io::copy(response.body_mut(), &mut temp_file).await?; + smol::fs::rename(&temp, &target_path).await?; + Ok(()) } From d45b830412a9a3099c77a00bea1f9fc11de57580 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 24 Oct 2024 14:37:54 -0600 Subject: [PATCH 24/76] SSH connection pooling (#19692) Co-Authored-By: Max Closes #ISSUE Release Notes: - SSH Remoting: Reuse connections across hosts --------- Co-authored-by: Max --- .../remote_editing_collaboration_tests.rs | 4 +- crates/recent_projects/src/remote_servers.rs | 11 +- crates/remote/src/ssh_session.rs | 804 +++++++++++------- .../remote_server/src/remote_editing_tests.rs | 4 +- 4 files changed, 484 insertions(+), 339 deletions(-) diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index 52086c856c2884c11403b74a333d177bdd20556f..0e13c88d9464ea53b2d9dc5a0d16067a05611108 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -26,7 +26,7 @@ async fn test_sharing_an_ssh_remote_project( .await; // Set up project on remote FS - let (port, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx); + let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx); let remote_fs = FakeFs::new(server_cx.executor()); remote_fs .insert_tree( @@ -67,7 +67,7 @@ async fn test_sharing_an_ssh_remote_project( ) }); - let client_ssh = SshRemoteClient::fake_client(port, cx_a).await; + let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await; let (project_a, worktree_id) = client_a .build_ssh_project("/code/project1", client_ssh, cx_a) .await; diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index 7081afc903902d037c7768ce7fc90ea7f35bc2b4..d7f3beccb21388c2f5ef6c96181a2217ff21fbb3 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -17,6 +17,7 @@ use gpui::{ use picker::Picker; use project::Project; use remote::SshConnectionOptions; +use remote::SshRemoteClient; use settings::update_settings_file; use settings::Settings; use ui::{ @@ -46,6 +47,7 @@ pub struct RemoteServerProjects { scroll_handle: ScrollHandle, workspace: WeakView, selectable_items: SelectableItemList, + retained_connections: Vec>, } struct CreateRemoteServer { @@ -355,6 +357,7 @@ impl RemoteServerProjects { scroll_handle: ScrollHandle::new(), workspace, selectable_items: Default::default(), + retained_connections: Vec::new(), } } @@ -424,7 +427,7 @@ impl RemoteServerProjects { let address_editor = editor.clone(); let creating = cx.spawn(move |this, mut cx| async move { match connection.await { - Some(_) => this + Some(Some(client)) => this .update(&mut cx, |this, cx| { let _ = this.workspace.update(cx, |workspace, _| { workspace @@ -432,14 +435,14 @@ impl RemoteServerProjects { .telemetry() .report_app_event("create ssh server".to_string()) }); - + this.retained_connections.push(client); this.add_ssh_server(connection_options, cx); this.mode = Mode::default_mode(); this.selectable_items.reset_selection(); cx.notify() }) .log_err(), - None => this + _ => this .update(&mut cx, |this, cx| { address_editor.update(cx, |this, _| { this.set_read_only(false); @@ -1056,7 +1059,7 @@ impl RemoteServerProjects { ); cx.spawn(|mut cx| async move { - if confirmation.await.ok() == Some(1) { + if confirmation.await.ok() == Some(0) { remote_servers .update(&mut cx, |this, cx| { this.delete_ssh_server(index, cx); diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index f3baa5a286816a7247cbfe7a35ea759a8a2e31de..d47e0375ea75f5b359a42f9779f548e195b2d96b 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -13,17 +13,18 @@ use futures::{ mpsc::{self, Sender, UnboundedReceiver, UnboundedSender}, oneshot, }, - future::BoxFuture, + future::{BoxFuture, Shared}, select, select_biased, AsyncReadExt as _, Future, FutureExt as _, StreamExt as _, }; use gpui::{ - AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, SemanticVersion, Task, - WeakModel, + AppContext, AsyncAppContext, BorrowAppContext, Context, EventEmitter, Global, Model, + ModelContext, SemanticVersion, Task, WeakModel, }; use parking_lot::Mutex; use rpc::{ proto::{self, build_typed_envelope, Envelope, EnvelopedMessage, PeerId, RequestMessage}, - AnyProtoClient, EntityMessageSubscriber, ProtoClient, ProtoMessageHandlerSet, RpcError, + AnyProtoClient, EntityMessageSubscriber, ErrorExt, ProtoClient, ProtoMessageHandlerSet, + RpcError, }; use smol::{ fs, @@ -56,7 +57,7 @@ pub struct SshSocket { socket_path: PathBuf, } -#[derive(Debug, Default, Clone, PartialEq, Eq)] +#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] pub struct SshConnectionOptions { pub host: String, pub username: Option, @@ -290,7 +291,7 @@ const MAX_RECONNECT_ATTEMPTS: usize = 3; enum State { Connecting, Connected { - ssh_connection: Box, + ssh_connection: Arc, delegate: Arc, multiplex_task: Task>, @@ -299,7 +300,7 @@ enum State { HeartbeatMissed { missed_heartbeats: usize, - ssh_connection: Box, + ssh_connection: Arc, delegate: Arc, multiplex_task: Task>, @@ -307,7 +308,7 @@ enum State { }, Reconnecting, ReconnectFailed { - ssh_connection: Box, + ssh_connection: Arc, delegate: Arc, error: anyhow::Error, @@ -332,7 +333,7 @@ impl fmt::Display for State { } impl State { - fn ssh_connection(&self) -> Option<&dyn SshRemoteProcess> { + fn ssh_connection(&self) -> Option<&dyn RemoteConnection> { match self { Self::Connected { ssh_connection, .. } => Some(ssh_connection.as_ref()), Self::HeartbeatMissed { ssh_connection, .. } => Some(ssh_connection.as_ref()), @@ -462,7 +463,7 @@ impl SshRemoteClient { connection_options: SshConnectionOptions, cancellation: oneshot::Receiver<()>, delegate: Arc, - cx: &AppContext, + cx: &mut AppContext, ) -> Task>>> { cx.spawn(|mut cx| async move { let success = Box::pin(async move { @@ -479,17 +480,28 @@ impl SshRemoteClient { state: Arc::new(Mutex::new(Some(State::Connecting))), })?; - let (ssh_connection, io_task) = Self::establish_connection( + let ssh_connection = cx + .update(|cx| { + cx.update_default_global(|pool: &mut ConnectionPool, cx| { + pool.connect(connection_options, &delegate, cx) + }) + })? + .await + .map_err(|e| e.cloned())?; + let remote_binary_path = ssh_connection + .get_remote_binary_path(&delegate, false, &mut cx) + .await?; + + let io_task = ssh_connection.start_proxy( + remote_binary_path, unique_identifier, false, - connection_options, incoming_tx, outgoing_rx, connection_activity_tx, delegate.clone(), &mut cx, - ) - .await?; + ); let multiplex_task = Self::monitor(this.downgrade(), io_task, &cx); @@ -578,7 +590,7 @@ impl SshRemoteClient { } let state = lock.take().unwrap(); - let (attempts, mut ssh_connection, delegate) = match state { + let (attempts, ssh_connection, delegate) = match state { State::Connected { ssh_connection, delegate, @@ -624,7 +636,7 @@ impl SshRemoteClient { log::info!("Trying to reconnect to ssh server... Attempt {}", attempts); - let identifier = self.unique_identifier.clone(); + let unique_identifier = self.unique_identifier.clone(); let client = self.client.clone(); let reconnect_task = cx.spawn(|this, mut cx| async move { macro_rules! failed { @@ -652,19 +664,33 @@ impl SshRemoteClient { let (incoming_tx, incoming_rx) = mpsc::unbounded::(); let (connection_activity_tx, connection_activity_rx) = mpsc::channel::<()>(1); - let (ssh_connection, io_task) = match Self::establish_connection( - identifier, - true, - connection_options, - incoming_tx, - outgoing_rx, - connection_activity_tx, - delegate.clone(), - &mut cx, - ) + let (ssh_connection, io_task) = match async { + let ssh_connection = cx + .update_global(|pool: &mut ConnectionPool, cx| { + pool.connect(connection_options, &delegate, cx) + })? + .await + .map_err(|error| error.cloned())?; + + let remote_binary_path = ssh_connection + .get_remote_binary_path(&delegate, true, &mut cx) + .await?; + + let io_task = ssh_connection.start_proxy( + remote_binary_path, + unique_identifier, + true, + incoming_tx, + outgoing_rx, + connection_activity_tx, + delegate.clone(), + &mut cx, + ); + anyhow::Ok((ssh_connection, io_task)) + } .await { - Ok((ssh_connection, ssh_process)) => (ssh_connection, ssh_process), + Ok((ssh_connection, io_task)) => (ssh_connection, io_task), Err(error) => { failed!(error, attempts, ssh_connection, delegate); } @@ -834,108 +860,6 @@ impl SshRemoteClient { } } - fn multiplex( - mut ssh_proxy_process: Child, - incoming_tx: UnboundedSender, - mut outgoing_rx: UnboundedReceiver, - mut connection_activity_tx: Sender<()>, - cx: &AsyncAppContext, - ) -> Task> { - let mut child_stderr = ssh_proxy_process.stderr.take().unwrap(); - let mut child_stdout = ssh_proxy_process.stdout.take().unwrap(); - let mut child_stdin = ssh_proxy_process.stdin.take().unwrap(); - - let mut stdin_buffer = Vec::new(); - let mut stdout_buffer = Vec::new(); - let mut stderr_buffer = Vec::new(); - let mut stderr_offset = 0; - - let stdin_task = cx.background_executor().spawn(async move { - while let Some(outgoing) = outgoing_rx.next().await { - write_message(&mut child_stdin, &mut stdin_buffer, outgoing).await?; - } - anyhow::Ok(()) - }); - - let stdout_task = cx.background_executor().spawn({ - let mut connection_activity_tx = connection_activity_tx.clone(); - async move { - loop { - stdout_buffer.resize(MESSAGE_LEN_SIZE, 0); - let len = child_stdout.read(&mut stdout_buffer).await?; - - if len == 0 { - return anyhow::Ok(()); - } - - if len < MESSAGE_LEN_SIZE { - child_stdout.read_exact(&mut stdout_buffer[len..]).await?; - } - - let message_len = message_len_from_buffer(&stdout_buffer); - let envelope = - read_message_with_len(&mut child_stdout, &mut stdout_buffer, message_len) - .await?; - connection_activity_tx.try_send(()).ok(); - incoming_tx.unbounded_send(envelope).ok(); - } - } - }); - - let stderr_task: Task> = cx.background_executor().spawn(async move { - loop { - stderr_buffer.resize(stderr_offset + 1024, 0); - - let len = child_stderr - .read(&mut stderr_buffer[stderr_offset..]) - .await?; - if len == 0 { - return anyhow::Ok(()); - } - - stderr_offset += len; - let mut start_ix = 0; - while let Some(ix) = stderr_buffer[start_ix..stderr_offset] - .iter() - .position(|b| b == &b'\n') - { - let line_ix = start_ix + ix; - let content = &stderr_buffer[start_ix..line_ix]; - start_ix = line_ix + 1; - if let Ok(record) = serde_json::from_slice::(content) { - record.log(log::logger()) - } else { - eprintln!("(remote) {}", String::from_utf8_lossy(content)); - } - } - stderr_buffer.drain(0..start_ix); - stderr_offset -= start_ix; - - connection_activity_tx.try_send(()).ok(); - } - }); - - cx.spawn(|_| async move { - let result = futures::select! { - result = stdin_task.fuse() => { - result.context("stdin") - } - result = stdout_task.fuse() => { - result.context("stdout") - } - result = stderr_task.fuse() => { - result.context("stderr") - } - }; - - let status = ssh_proxy_process.status().await?.code().unwrap_or(1); - match result { - Ok(_) => Ok(status), - Err(error) => Err(error), - } - }) - } - fn monitor( this: WeakModel, io_task: Task>, @@ -1005,75 +929,6 @@ impl SshRemoteClient { cx.notify(); } - #[allow(clippy::too_many_arguments)] - async fn establish_connection( - unique_identifier: String, - reconnect: bool, - connection_options: SshConnectionOptions, - incoming_tx: UnboundedSender, - outgoing_rx: UnboundedReceiver, - connection_activity_tx: Sender<()>, - delegate: Arc, - cx: &mut AsyncAppContext, - ) -> Result<(Box, Task>)> { - #[cfg(any(test, feature = "test-support"))] - if let Some(fake) = fake::SshRemoteConnection::new(&connection_options) { - let io_task = fake::SshRemoteConnection::multiplex( - fake.connection_options(), - incoming_tx, - outgoing_rx, - connection_activity_tx, - cx, - ) - .await; - return Ok((fake, io_task)); - } - - let ssh_connection = - SshRemoteConnection::new(connection_options, delegate.clone(), cx).await?; - - let platform = ssh_connection.query_platform().await?; - let remote_binary_path = delegate.remote_server_binary_path(platform, cx)?; - if !reconnect { - ssh_connection - .ensure_server_binary(&delegate, &remote_binary_path, platform, cx) - .await?; - } - - let socket = ssh_connection.socket.clone(); - run_cmd(socket.ssh_command(&remote_binary_path).arg("version")).await?; - - delegate.set_status(Some("Starting proxy"), cx); - - let mut start_proxy_command = format!( - "RUST_LOG={} RUST_BACKTRACE={} {:?} proxy --identifier {}", - std::env::var("RUST_LOG").unwrap_or_default(), - std::env::var("RUST_BACKTRACE").unwrap_or_default(), - remote_binary_path, - unique_identifier, - ); - if reconnect { - start_proxy_command.push_str(" --reconnect"); - } - - let ssh_proxy_process = socket - .ssh_command(start_proxy_command) - // IMPORTANT: we kill this process when we drop the task that uses it. - .kill_on_drop(true) - .spawn() - .context("failed to spawn remote server")?; - - let io_task = Self::multiplex( - ssh_proxy_process, - incoming_tx, - outgoing_rx, - connection_activity_tx, - &cx, - ); - - Ok((Box::new(ssh_connection), io_task)) - } - pub fn subscribe_to_entity(&self, remote_id: u64, entity: &Model) { self.client.subscribe_to_entity(remote_id, entity); } @@ -1112,15 +967,21 @@ impl SshRemoteClient { #[cfg(any(test, feature = "test-support"))] pub fn simulate_disconnect(&self, client_cx: &mut AppContext) -> Task<()> { - let port = self.connection_options().port.unwrap(); + let opts = self.connection_options(); client_cx.spawn(|cx| async move { - let (channel, server_cx) = cx - .update_global(|c: &mut fake::ServerConnections, _| c.get(port)) + let connection = cx + .update_global(|c: &mut ConnectionPool, _| { + if let Some(ConnectionPoolEntry::Connecting(c)) = c.connections.get(&opts) { + c.clone() + } else { + panic!("missing test connection") + } + }) + .unwrap() + .await .unwrap(); - let (outgoing_tx, _) = mpsc::unbounded::(); - let (_, incoming_rx) = mpsc::unbounded::(); - channel.reconnect(incoming_rx, outgoing_tx, &server_cx); + connection.simulate_disconnect(&cx); }) } @@ -1128,78 +989,190 @@ impl SshRemoteClient { pub fn fake_server( client_cx: &mut gpui::TestAppContext, server_cx: &mut gpui::TestAppContext, - ) -> (u16, Arc) { - use gpui::BorrowAppContext; + ) -> (SshConnectionOptions, Arc) { + let port = client_cx + .update(|cx| cx.default_global::().connections.len() as u16 + 1); + let opts = SshConnectionOptions { + host: "".to_string(), + port: Some(port), + ..Default::default() + }; let (outgoing_tx, _) = mpsc::unbounded::(); let (_, incoming_rx) = mpsc::unbounded::(); let server_client = server_cx.update(|cx| ChannelClient::new(incoming_rx, outgoing_tx, cx, "fake-server")); - let port = client_cx.update(|cx| { - cx.update_default_global(|c: &mut fake::ServerConnections, _| { - c.push(server_client.clone(), server_cx.to_async()) + let connection: Arc = Arc::new(fake::FakeRemoteConnection { + connection_options: opts.clone(), + server_cx: fake::SendableCx::new(server_cx.to_async()), + server_channel: server_client.clone(), + }); + + client_cx.update(|cx| { + cx.update_default_global(|c: &mut ConnectionPool, cx| { + c.connections.insert( + opts.clone(), + ConnectionPoolEntry::Connecting( + cx.foreground_executor() + .spawn({ + let connection = connection.clone(); + async move { Ok(connection.clone()) } + }) + .shared(), + ), + ); }) }); - (port, server_client) + + (opts, server_client) } #[cfg(any(test, feature = "test-support"))] - pub async fn fake_client(port: u16, client_cx: &mut gpui::TestAppContext) -> Model { + pub async fn fake_client( + opts: SshConnectionOptions, + client_cx: &mut gpui::TestAppContext, + ) -> Model { let (_tx, rx) = oneshot::channel(); client_cx - .update(|cx| { - Self::new( - "fake".to_string(), - SshConnectionOptions { - host: "".to_string(), - port: Some(port), - ..Default::default() - }, - rx, - Arc::new(fake::Delegate), - cx, - ) - }) + .update(|cx| Self::new("fake".to_string(), opts, rx, Arc::new(fake::Delegate), cx)) .await .unwrap() .unwrap() } } +enum ConnectionPoolEntry { + Connecting(Shared, Arc>>>), + Connected(Weak), +} + +#[derive(Default)] +struct ConnectionPool { + connections: HashMap, +} + +impl Global for ConnectionPool {} + +impl ConnectionPool { + pub fn connect( + &mut self, + opts: SshConnectionOptions, + delegate: &Arc, + cx: &mut AppContext, + ) -> Shared, Arc>>> { + let connection = self.connections.get(&opts); + match connection { + Some(ConnectionPoolEntry::Connecting(task)) => { + let delegate = delegate.clone(); + cx.spawn(|mut cx| async move { + delegate.set_status(Some("Waiting for existing connection attempt"), &mut cx); + }) + .detach(); + return task.clone(); + } + Some(ConnectionPoolEntry::Connected(ssh)) => { + if let Some(ssh) = ssh.upgrade() { + if !ssh.has_been_killed() { + return Task::ready(Ok(ssh)).shared(); + } + } + self.connections.remove(&opts); + } + None => {} + } + + let task = cx + .spawn({ + let opts = opts.clone(); + let delegate = delegate.clone(); + |mut cx| async move { + let connection = SshRemoteConnection::new(opts.clone(), delegate, &mut cx) + .await + .map(|connection| Arc::new(connection) as Arc); + + cx.update_global(|pool: &mut Self, _| { + debug_assert!(matches!( + pool.connections.get(&opts), + Some(ConnectionPoolEntry::Connecting(_)) + )); + match connection { + Ok(connection) => { + pool.connections.insert( + opts.clone(), + ConnectionPoolEntry::Connected(Arc::downgrade(&connection)), + ); + Ok(connection) + } + Err(error) => { + pool.connections.remove(&opts); + Err(Arc::new(error)) + } + } + })? + } + }) + .shared(); + + self.connections + .insert(opts.clone(), ConnectionPoolEntry::Connecting(task.clone())); + task + } +} + impl From for AnyProtoClient { fn from(client: SshRemoteClient) -> Self { AnyProtoClient::new(client.client.clone()) } } -#[async_trait] -trait SshRemoteProcess: Send + Sync { - async fn kill(&mut self) -> Result<()>; +#[async_trait(?Send)] +trait RemoteConnection: Send + Sync { + #[allow(clippy::too_many_arguments)] + fn start_proxy( + &self, + remote_binary_path: PathBuf, + unique_identifier: String, + reconnect: bool, + incoming_tx: UnboundedSender, + outgoing_rx: UnboundedReceiver, + connection_activity_tx: Sender<()>, + delegate: Arc, + cx: &mut AsyncAppContext, + ) -> Task>; + async fn get_remote_binary_path( + &self, + delegate: &Arc, + reconnect: bool, + cx: &mut AsyncAppContext, + ) -> Result; + async fn kill(&self) -> Result<()>; + fn has_been_killed(&self) -> bool; fn ssh_args(&self) -> Vec; fn connection_options(&self) -> SshConnectionOptions; + + #[cfg(any(test, feature = "test-support"))] + fn simulate_disconnect(&self, _: &AsyncAppContext) {} } struct SshRemoteConnection { socket: SshSocket, - master_process: process::Child, + master_process: Mutex>, + platform: SshPlatform, _temp_dir: TempDir, } -impl Drop for SshRemoteConnection { - fn drop(&mut self) { - if let Err(error) = self.master_process.kill() { - log::error!("failed to kill SSH master process: {}", error); - } +#[async_trait(?Send)] +impl RemoteConnection for SshRemoteConnection { + async fn kill(&self) -> Result<()> { + let Some(mut process) = self.master_process.lock().take() else { + return Ok(()); + }; + process.kill().ok(); + process.status().await?; + Ok(()) } -} -#[async_trait] -impl SshRemoteProcess for SshRemoteConnection { - async fn kill(&mut self) -> Result<()> { - self.master_process.kill()?; - - self.master_process.status().await?; - - Ok(()) + fn has_been_killed(&self) -> bool { + self.master_process.lock().is_none() } fn ssh_args(&self) -> Vec { @@ -1209,6 +1182,70 @@ impl SshRemoteProcess for SshRemoteConnection { fn connection_options(&self) -> SshConnectionOptions { self.socket.connection_options.clone() } + + async fn get_remote_binary_path( + &self, + delegate: &Arc, + reconnect: bool, + cx: &mut AsyncAppContext, + ) -> Result { + let platform = self.platform; + let remote_binary_path = delegate.remote_server_binary_path(platform, cx)?; + if !reconnect { + self.ensure_server_binary(&delegate, &remote_binary_path, platform, cx) + .await?; + } + + let socket = self.socket.clone(); + run_cmd(socket.ssh_command(&remote_binary_path).arg("version")).await?; + Ok(remote_binary_path) + } + + fn start_proxy( + &self, + remote_binary_path: PathBuf, + unique_identifier: String, + reconnect: bool, + incoming_tx: UnboundedSender, + outgoing_rx: UnboundedReceiver, + connection_activity_tx: Sender<()>, + delegate: Arc, + cx: &mut AsyncAppContext, + ) -> Task> { + delegate.set_status(Some("Starting proxy"), cx); + + let mut start_proxy_command = format!( + "RUST_LOG={} RUST_BACKTRACE={} {:?} proxy --identifier {}", + std::env::var("RUST_LOG").unwrap_or_default(), + std::env::var("RUST_BACKTRACE").unwrap_or_default(), + remote_binary_path, + unique_identifier, + ); + if reconnect { + start_proxy_command.push_str(" --reconnect"); + } + + let ssh_proxy_process = match self + .socket + .ssh_command(start_proxy_command) + // IMPORTANT: we kill this process when we drop the task that uses it. + .kill_on_drop(true) + .spawn() + { + Ok(process) => process, + Err(error) => { + return Task::ready(Err(anyhow!("failed to spawn remote server: {}", error))) + } + }; + + Self::multiplex( + ssh_proxy_process, + incoming_tx, + outgoing_rx, + connection_activity_tx, + &cx, + ) + } } impl SshRemoteConnection { @@ -1305,6 +1342,7 @@ impl SshRemoteConnection { ]) .arg(format!("ControlPath={}", socket_path.display())) .arg(&url) + .kill_on_drop(true) .spawn()?; // Wait for this ssh process to close its stdout, indicating that authentication @@ -1348,16 +1386,139 @@ impl SshRemoteConnection { Err(anyhow!(error_message))?; } + let socket = SshSocket { + connection_options, + socket_path, + }; + + let os = run_cmd(socket.ssh_command("uname").arg("-s")).await?; + let arch = run_cmd(socket.ssh_command("uname").arg("-m")).await?; + + let os = match os.trim() { + "Darwin" => "macos", + "Linux" => "linux", + _ => Err(anyhow!("unknown uname os {os:?}"))?, + }; + let arch = if arch.starts_with("arm") || arch.starts_with("aarch64") { + "aarch64" + } else if arch.starts_with("x86") || arch.starts_with("i686") { + "x86_64" + } else { + Err(anyhow!("unknown uname architecture {arch:?}"))? + }; + + let platform = SshPlatform { os, arch }; + Ok(Self { - socket: SshSocket { - connection_options, - socket_path, - }, - master_process, + socket, + master_process: Mutex::new(Some(master_process)), + platform, _temp_dir: temp_dir, }) } + fn multiplex( + mut ssh_proxy_process: Child, + incoming_tx: UnboundedSender, + mut outgoing_rx: UnboundedReceiver, + mut connection_activity_tx: Sender<()>, + cx: &AsyncAppContext, + ) -> Task> { + let mut child_stderr = ssh_proxy_process.stderr.take().unwrap(); + let mut child_stdout = ssh_proxy_process.stdout.take().unwrap(); + let mut child_stdin = ssh_proxy_process.stdin.take().unwrap(); + + let mut stdin_buffer = Vec::new(); + let mut stdout_buffer = Vec::new(); + let mut stderr_buffer = Vec::new(); + let mut stderr_offset = 0; + + let stdin_task = cx.background_executor().spawn(async move { + while let Some(outgoing) = outgoing_rx.next().await { + write_message(&mut child_stdin, &mut stdin_buffer, outgoing).await?; + } + anyhow::Ok(()) + }); + + let stdout_task = cx.background_executor().spawn({ + let mut connection_activity_tx = connection_activity_tx.clone(); + async move { + loop { + stdout_buffer.resize(MESSAGE_LEN_SIZE, 0); + let len = child_stdout.read(&mut stdout_buffer).await?; + + if len == 0 { + return anyhow::Ok(()); + } + + if len < MESSAGE_LEN_SIZE { + child_stdout.read_exact(&mut stdout_buffer[len..]).await?; + } + + let message_len = message_len_from_buffer(&stdout_buffer); + let envelope = + read_message_with_len(&mut child_stdout, &mut stdout_buffer, message_len) + .await?; + connection_activity_tx.try_send(()).ok(); + incoming_tx.unbounded_send(envelope).ok(); + } + } + }); + + let stderr_task: Task> = cx.background_executor().spawn(async move { + loop { + stderr_buffer.resize(stderr_offset + 1024, 0); + + let len = child_stderr + .read(&mut stderr_buffer[stderr_offset..]) + .await?; + if len == 0 { + return anyhow::Ok(()); + } + + stderr_offset += len; + let mut start_ix = 0; + while let Some(ix) = stderr_buffer[start_ix..stderr_offset] + .iter() + .position(|b| b == &b'\n') + { + let line_ix = start_ix + ix; + let content = &stderr_buffer[start_ix..line_ix]; + start_ix = line_ix + 1; + if let Ok(record) = serde_json::from_slice::(content) { + record.log(log::logger()) + } else { + eprintln!("(remote) {}", String::from_utf8_lossy(content)); + } + } + stderr_buffer.drain(0..start_ix); + stderr_offset -= start_ix; + + connection_activity_tx.try_send(()).ok(); + } + }); + + cx.spawn(|_| async move { + let result = futures::select! { + result = stdin_task.fuse() => { + result.context("stdin") + } + result = stdout_task.fuse() => { + result.context("stdout") + } + result = stderr_task.fuse() => { + result.context("stderr") + } + }; + + let status = ssh_proxy_process.status().await?.code().unwrap_or(1); + match result { + Ok(_) => Ok(status), + Err(error) => Err(error), + } + }) + } + async fn ensure_server_binary( &self, delegate: &Arc, @@ -1621,26 +1782,6 @@ impl SshRemoteConnection { Ok(()) } - async fn query_platform(&self) -> Result { - let os = run_cmd(self.socket.ssh_command("uname").arg("-s")).await?; - let arch = run_cmd(self.socket.ssh_command("uname").arg("-m")).await?; - - let os = match os.trim() { - "Darwin" => "macos", - "Linux" => "linux", - _ => Err(anyhow!("unknown uname os {os:?}"))?, - }; - let arch = if arch.starts_with("arm") || arch.starts_with("aarch64") { - "aarch64" - } else if arch.starts_with("x86") || arch.starts_with("i686") { - "x86_64" - } else { - Err(anyhow!("unknown uname architecture {arch:?}"))? - }; - - Ok(SshPlatform { os, arch }) - } - async fn upload_file(&self, src_path: &Path, dest_path: &Path) -> Result<()> { let mut command = process::Command::new("scp"); let output = self @@ -1974,50 +2115,86 @@ mod fake { }, select_biased, FutureExt, SinkExt, StreamExt, }; - use gpui::{AsyncAppContext, BorrowAppContext, Global, SemanticVersion, Task}; + use gpui::{AsyncAppContext, SemanticVersion, Task}; use rpc::proto::Envelope; use super::{ - ChannelClient, ServerBinary, SshClientDelegate, SshConnectionOptions, SshPlatform, - SshRemoteProcess, + ChannelClient, RemoteConnection, ServerBinary, SshClientDelegate, SshConnectionOptions, + SshPlatform, }; - pub(super) struct SshRemoteConnection { - connection_options: SshConnectionOptions, + pub(super) struct FakeRemoteConnection { + pub(super) connection_options: SshConnectionOptions, + pub(super) server_channel: Arc, + pub(super) server_cx: SendableCx, } - impl SshRemoteConnection { - pub(super) fn new( - connection_options: &SshConnectionOptions, - ) -> Option> { - if connection_options.host == "" { - return Some(Box::new(Self { - connection_options: connection_options.clone(), - })); - } - return None; + pub(super) struct SendableCx(AsyncAppContext); + // safety: you can only get the other cx on the main thread. + impl SendableCx { + pub(super) fn new(cx: AsyncAppContext) -> Self { + Self(cx) + } + fn get(&self, _: &AsyncAppContext) -> AsyncAppContext { + self.0.clone() + } + } + unsafe impl Send for SendableCx {} + unsafe impl Sync for SendableCx {} + + #[async_trait(?Send)] + impl RemoteConnection for FakeRemoteConnection { + async fn kill(&self) -> Result<()> { + Ok(()) + } + + fn has_been_killed(&self) -> bool { + false + } + + fn ssh_args(&self) -> Vec { + Vec::new() + } + + fn connection_options(&self) -> SshConnectionOptions { + self.connection_options.clone() + } + + fn simulate_disconnect(&self, cx: &AsyncAppContext) { + let (outgoing_tx, _) = mpsc::unbounded::(); + let (_, incoming_rx) = mpsc::unbounded::(); + self.server_channel + .reconnect(incoming_rx, outgoing_tx, &self.server_cx.get(&cx)); + } + + async fn get_remote_binary_path( + &self, + _delegate: &Arc, + _reconnect: bool, + _cx: &mut AsyncAppContext, + ) -> Result { + Ok(PathBuf::new()) } - pub(super) async fn multiplex( - connection_options: SshConnectionOptions, + + fn start_proxy( + &self, + _remote_binary_path: PathBuf, + _unique_identifier: String, + _reconnect: bool, mut client_incoming_tx: mpsc::UnboundedSender, mut client_outgoing_rx: mpsc::UnboundedReceiver, mut connection_activity_tx: Sender<()>, + _delegate: Arc, cx: &mut AsyncAppContext, ) -> Task> { let (mut server_incoming_tx, server_incoming_rx) = mpsc::unbounded::(); let (server_outgoing_tx, mut server_outgoing_rx) = mpsc::unbounded::(); - let (channel, server_cx) = cx - .update(|cx| { - cx.update_global(|conns: &mut ServerConnections, _| { - conns.get(connection_options.port.unwrap()) - }) - }) - .unwrap(); - channel.reconnect(server_incoming_rx, server_outgoing_tx, &server_cx); - - // send to proxy_tx to get to the server. - // receive from + self.server_channel.reconnect( + server_incoming_rx, + server_outgoing_tx, + &self.server_cx.get(cx), + ); cx.background_executor().spawn(async move { loop { @@ -2041,39 +2218,6 @@ mod fake { } } - #[async_trait] - impl SshRemoteProcess for SshRemoteConnection { - async fn kill(&mut self) -> Result<()> { - Ok(()) - } - - fn ssh_args(&self) -> Vec { - Vec::new() - } - - fn connection_options(&self) -> SshConnectionOptions { - self.connection_options.clone() - } - } - - #[derive(Default)] - pub(super) struct ServerConnections(Vec<(Arc, AsyncAppContext)>); - impl Global for ServerConnections {} - - impl ServerConnections { - pub(super) fn push(&mut self, server: Arc, cx: AsyncAppContext) -> u16 { - self.0.push((server.clone(), cx)); - self.0.len() as u16 - 1 - } - - pub(super) fn get(&mut self, port: u16) -> (Arc, AsyncAppContext) { - self.0 - .get(port as usize) - .expect("no fake server for port") - .clone() - } - } - pub(super) struct Delegate; impl SshClientDelegate for Delegate { @@ -2099,8 +2243,6 @@ mod fake { unreachable!() } - fn set_status(&self, _: Option<&str>, _: &mut AsyncAppContext) { - unreachable!() - } + fn set_status(&self, _: Option<&str>, _: &mut AsyncAppContext) {} } } diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 32333def7fb7f10abce8902797c40ec70eba9506..f7420ef5b091b70c8036a77650eb81208edfa2d3 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -702,7 +702,7 @@ async fn init_test( ) -> (Model, Model, Arc) { init_logger(); - let (forwarder, ssh_server_client) = SshRemoteClient::fake_server(cx, server_cx); + let (opts, ssh_server_client) = SshRemoteClient::fake_server(cx, server_cx); let fs = FakeFs::new(server_cx.executor()); fs.insert_tree( "/code", @@ -744,7 +744,7 @@ async fn init_test( ) }); - let ssh = SshRemoteClient::fake_client(forwarder, cx).await; + let ssh = SshRemoteClient::fake_client(opts, cx).await; let project = build_project(ssh, cx); project .update(cx, { From 6cd5c9e32f5191dbe26e6db78d9d911cdc549f9f Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 24 Oct 2024 19:37:09 -0300 Subject: [PATCH 25/76] assistant: Tweak the model selector design (#19704) Exploring using the UI font for it, as it is more common for dropdowns and popovers throughout the app. Feeling like it makes it lighter and also shorter in width! | Before | After | |--------|--------| | Screenshot 2024-10-24 at 16 39 04 | Screenshot 2024-10-24 at 16 38 26 | Release Notes: - N/A --- crates/assistant/src/model_selector.rs | 51 ++++++++++++-------------- 1 file changed, 23 insertions(+), 28 deletions(-) diff --git a/crates/assistant/src/model_selector.rs b/crates/assistant/src/model_selector.rs index 8a7946c028ca93bbdaf2f9716bdbbbabe9d97b6e..c9fbdd36c339e34dd021efa414e3d7923109a636 100644 --- a/crates/assistant/src/model_selector.rs +++ b/crates/assistant/src/model_selector.rs @@ -158,39 +158,34 @@ impl PickerDelegate for ModelPickerDelegate { .spacing(ListItemSpacing::Sparse) .selected(selected) .start_slot( - div().pr_1().child( + div().pr_0p5().child( Icon::new(model_info.icon) .color(Color::Muted) .size(IconSize::Medium), ), ) .child( - h_flex() - .w_full() - .justify_between() - .font_buffer(cx) - .min_w(px(240.)) - .child( - h_flex() - .gap_2() - .child(Label::new(model_info.model.name().0.clone())) - .child( - Label::new(provider_name) - .size(LabelSize::XSmall) - .color(Color::Muted), - ) - .children(match model_info.availability { - LanguageModelAvailability::Public => None, - LanguageModelAvailability::RequiresPlan(Plan::Free) => None, - LanguageModelAvailability::RequiresPlan(Plan::ZedPro) => { - show_badges.then(|| { - Label::new("Pro") - .size(LabelSize::XSmall) - .color(Color::Muted) - }) - } - }), - ), + h_flex().w_full().justify_between().min_w(px(200.)).child( + h_flex() + .gap_1p5() + .child(Label::new(model_info.model.name().0.clone())) + .child( + Label::new(provider_name) + .size(LabelSize::XSmall) + .color(Color::Muted), + ) + .children(match model_info.availability { + LanguageModelAvailability::Public => None, + LanguageModelAvailability::RequiresPlan(Plan::Free) => None, + LanguageModelAvailability::RequiresPlan(Plan::ZedPro) => { + show_badges.then(|| { + Label::new("Pro") + .size(LabelSize::XSmall) + .color(Color::Muted) + }) + } + }), + ), ) .end_slot(div().when(model_info.is_selected, |this| { this.child( @@ -212,7 +207,7 @@ impl PickerDelegate for ModelPickerDelegate { h_flex() .w_full() .border_t_1() - .border_color(cx.theme().colors().border) + .border_color(cx.theme().colors().border_variant) .p_1() .gap_4() .justify_between() From 42a7402cc5354fa6a3182d972fa7f551dc9760ba Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 24 Oct 2024 19:37:42 -0300 Subject: [PATCH 26/76] assistant: Use a labeled button for the slash command menu (#19703) This should help a bit more the discoverability of the slash commands. Release Notes: - N/A --- crates/assistant/src/assistant_panel.rs | 10 +++++----- crates/assistant/src/slash_command_picker.rs | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index d4da36adedeca2cd5ae9f57cbc4158e8ce8ac486..6d525a1ff08754fee6c317d90b41e47f4d776c62 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -3949,7 +3949,7 @@ impl Render for ContextEditor { .bg(cx.theme().colors().editor_background) .child( h_flex() - .gap_2() + .gap_1() .child(render_inject_context_menu(cx.view().downgrade(), cx)) .child( IconButton::new("quote-button", IconName::Quote) @@ -4249,11 +4249,11 @@ fn render_inject_context_menu( slash_command_picker::SlashCommandSelector::new( commands.clone(), active_context_editor, - IconButton::new("trigger", IconName::SlashSquare) + Button::new("trigger", "Add Context") + .icon(IconName::Plus) .icon_size(IconSize::Small) - .tooltip(|cx| { - Tooltip::with_meta("Insert Context", None, "Type / to insert via keyboard", cx) - }), + .icon_position(IconPosition::Start) + .tooltip(|cx| Tooltip::text("Type / to insert via keyboard", cx)), ) } diff --git a/crates/assistant/src/slash_command_picker.rs b/crates/assistant/src/slash_command_picker.rs index 58023848b0e508eb63ccbe626cddad34cebe5c1f..35ae90d412cfa0aaa956566060fbc71d27922c25 100644 --- a/crates/assistant/src/slash_command_picker.rs +++ b/crates/assistant/src/slash_command_picker.rs @@ -178,7 +178,7 @@ impl PickerDelegate for SlashCommandDelegate { SlashCommandEntry::Info(info) => Some( ListItem::new(ix) .inset(true) - .spacing(ListItemSpacing::Sparse) + .spacing(ListItemSpacing::Dense) .selected(selected) .child( h_flex() @@ -224,7 +224,7 @@ impl PickerDelegate for SlashCommandDelegate { SlashCommandEntry::Advert { renderer, .. } => Some( ListItem::new(ix) .inset(true) - .spacing(ListItemSpacing::Sparse) + .spacing(ListItemSpacing::Dense) .selected(selected) .child(renderer(cx)), ), From ebc3031fd9d01f872f68f3ef9f56bad52a885caa Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 24 Oct 2024 20:43:40 -0600 Subject: [PATCH 27/76] Inline initialization (#19711) This restores all the init behaviour into main again. This means we never need to call init_ui (and so we can't call it more than once). Release Notes: - (Nightly only) fixes a panic when using the cli to open another file in a running zed. --- crates/zed/src/main.rs | 285 +++++++++++++--------------- crates/zed/src/zed/open_listener.rs | 25 +-- 2 files changed, 133 insertions(+), 177 deletions(-) diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 01c9d86c60032c033a9c73ac8471942428eb9db8..017deca5682d32ebdad747fe886a8d2f3d99ad10 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -7,7 +7,6 @@ mod reliability; mod zed; use anyhow::{anyhow, Context as _, Result}; -use assistant::PromptBuilder; use chrono::Offset; use clap::{command, Parser}; use cli::FORCE_CLI_MODE_ENV_VAR_NAME; @@ -136,142 +135,6 @@ fn fail_to_open_window(e: anyhow::Error, _cx: &mut AppContext) { } } -// init_common is called for both headless and normal mode. -fn init_common(app_state: Arc, cx: &mut AppContext) -> Arc { - SystemAppearance::init(cx); - theme::init(theme::LoadThemes::All(Box::new(Assets)), cx); - command_palette::init(cx); - let copilot_language_server_id = app_state.languages.next_language_server_id(); - copilot::init( - copilot_language_server_id, - app_state.fs.clone(), - app_state.client.http_client(), - app_state.node_runtime.clone(), - cx, - ); - supermaven::init(app_state.client.clone(), cx); - language_model::init( - app_state.user_store.clone(), - app_state.client.clone(), - app_state.fs.clone(), - cx, - ); - snippet_provider::init(cx); - inline_completion_registry::init(app_state.client.telemetry().clone(), cx); - let prompt_builder = assistant::init( - app_state.fs.clone(), - app_state.client.clone(), - stdout_is_a_pty(), - cx, - ); - repl::init( - app_state.fs.clone(), - app_state.client.telemetry().clone(), - cx, - ); - extension::init( - app_state.fs.clone(), - app_state.client.clone(), - app_state.node_runtime.clone(), - app_state.languages.clone(), - ThemeRegistry::global(cx), - cx, - ); - recent_projects::init(cx); - prompt_builder -} - -fn init_ui( - app_state: Arc, - prompt_builder: Arc, - cx: &mut AppContext, -) -> Result<()> { - load_embedded_fonts(cx); - - #[cfg(target_os = "linux")] - crate::zed::linux_prompts::init(cx); - - app_state.languages.set_theme(cx.theme().clone()); - editor::init(cx); - image_viewer::init(cx); - diagnostics::init(cx); - - audio::init(Assets, cx); - workspace::init(app_state.clone(), cx); - - go_to_line::init(cx); - file_finder::init(cx); - tab_switcher::init(cx); - outline::init(cx); - project_symbols::init(cx); - project_panel::init(Assets, cx); - outline_panel::init(Assets, cx); - tasks_ui::init(cx); - snippets_ui::init(cx); - channel::init(&app_state.client.clone(), app_state.user_store.clone(), cx); - search::init(cx); - vim::init(cx); - terminal_view::init(cx); - journal::init(app_state.clone(), cx); - language_selector::init(cx); - theme_selector::init(cx); - language_tools::init(cx); - call::init(app_state.client.clone(), app_state.user_store.clone(), cx); - notifications::init(app_state.client.clone(), app_state.user_store.clone(), cx); - collab_ui::init(&app_state, cx); - feedback::init(cx); - markdown_preview::init(cx); - welcome::init(cx); - settings_ui::init(cx); - extensions_ui::init(cx); - - cx.observe_global::({ - let languages = app_state.languages.clone(); - let http = app_state.client.http_client(); - let client = app_state.client.clone(); - - move |cx| { - for &mut window in cx.windows().iter_mut() { - let background_appearance = cx.theme().window_background_appearance(); - window - .update(cx, |_, cx| { - cx.set_background_appearance(background_appearance) - }) - .ok(); - } - languages.set_theme(cx.theme().clone()); - let new_host = &client::ClientSettings::get_global(cx).server_url; - if &http.base_url() != new_host { - http.set_base_url(new_host); - if client.status().borrow().is_connected() { - client.reconnect(&cx.to_async()); - } - } - } - }) - .detach(); - let telemetry = app_state.client.telemetry(); - telemetry.report_setting_event("theme", cx.theme().name.to_string()); - telemetry.report_setting_event("keymap", BaseKeymap::get_global(cx).to_string()); - telemetry.flush_events(); - - let fs = app_state.fs.clone(); - load_user_themes_in_background(fs.clone(), cx); - watch_themes(fs.clone(), cx); - watch_languages(fs.clone(), app_state.languages.clone(), cx); - watch_file_types(fs.clone(), cx); - - cx.set_menus(app_menus()); - initialize_workspace(app_state.clone(), prompt_builder, cx); - - cx.activate(true); - - cx.spawn(|cx| async move { authenticate(app_state.client.clone(), &cx).await }) - .detach_and_log_err(cx); - - Ok(()) -} - fn main() { menu::init(); zed_actions::init(); @@ -509,7 +372,133 @@ fn main() { installation_id.clone().map(|id| id.to_string()), cx, ); - let prompt_builder = init_common(app_state.clone(), cx); + + SystemAppearance::init(cx); + theme::init(theme::LoadThemes::All(Box::new(Assets)), cx); + command_palette::init(cx); + let copilot_language_server_id = app_state.languages.next_language_server_id(); + copilot::init( + copilot_language_server_id, + app_state.fs.clone(), + app_state.client.http_client(), + app_state.node_runtime.clone(), + cx, + ); + supermaven::init(app_state.client.clone(), cx); + language_model::init( + app_state.user_store.clone(), + app_state.client.clone(), + app_state.fs.clone(), + cx, + ); + snippet_provider::init(cx); + inline_completion_registry::init(app_state.client.telemetry().clone(), cx); + let prompt_builder = assistant::init( + app_state.fs.clone(), + app_state.client.clone(), + stdout_is_a_pty(), + cx, + ); + repl::init( + app_state.fs.clone(), + app_state.client.telemetry().clone(), + cx, + ); + extension::init( + app_state.fs.clone(), + app_state.client.clone(), + app_state.node_runtime.clone(), + app_state.languages.clone(), + ThemeRegistry::global(cx), + cx, + ); + recent_projects::init(cx); + + load_embedded_fonts(cx); + + #[cfg(target_os = "linux")] + crate::zed::linux_prompts::init(cx); + + app_state.languages.set_theme(cx.theme().clone()); + editor::init(cx); + image_viewer::init(cx); + diagnostics::init(cx); + + audio::init(Assets, cx); + workspace::init(app_state.clone(), cx); + + go_to_line::init(cx); + file_finder::init(cx); + tab_switcher::init(cx); + outline::init(cx); + project_symbols::init(cx); + project_panel::init(Assets, cx); + outline_panel::init(Assets, cx); + tasks_ui::init(cx); + snippets_ui::init(cx); + channel::init(&app_state.client.clone(), app_state.user_store.clone(), cx); + search::init(cx); + vim::init(cx); + terminal_view::init(cx); + journal::init(app_state.clone(), cx); + language_selector::init(cx); + theme_selector::init(cx); + language_tools::init(cx); + call::init(app_state.client.clone(), app_state.user_store.clone(), cx); + notifications::init(app_state.client.clone(), app_state.user_store.clone(), cx); + collab_ui::init(&app_state, cx); + feedback::init(cx); + markdown_preview::init(cx); + welcome::init(cx); + settings_ui::init(cx); + extensions_ui::init(cx); + + cx.observe_global::({ + let languages = app_state.languages.clone(); + let http = app_state.client.http_client(); + let client = app_state.client.clone(); + + move |cx| { + for &mut window in cx.windows().iter_mut() { + let background_appearance = cx.theme().window_background_appearance(); + window + .update(cx, |_, cx| { + cx.set_background_appearance(background_appearance) + }) + .ok(); + } + languages.set_theme(cx.theme().clone()); + let new_host = &client::ClientSettings::get_global(cx).server_url; + if &http.base_url() != new_host { + http.set_base_url(new_host); + if client.status().borrow().is_connected() { + client.reconnect(&cx.to_async()); + } + } + } + }) + .detach(); + let telemetry = app_state.client.telemetry(); + telemetry.report_setting_event("theme", cx.theme().name.to_string()); + telemetry.report_setting_event("keymap", BaseKeymap::get_global(cx).to_string()); + telemetry.flush_events(); + + let fs = app_state.fs.clone(); + load_user_themes_in_background(fs.clone(), cx); + watch_themes(fs.clone(), cx); + watch_languages(fs.clone(), app_state.languages.clone(), cx); + watch_file_types(fs.clone(), cx); + + cx.set_menus(app_menus()); + initialize_workspace(app_state.clone(), prompt_builder, cx); + + cx.activate(true); + + cx.spawn({ + let client = app_state.client.clone(); + |cx| async move { authenticate(client, &cx).await } + }) + .detach_and_log_err(cx); let args = Args::parse(); let urls: Vec<_> = args @@ -529,10 +518,9 @@ fn main() { .and_then(|urls| OpenRequest::parse(urls, cx).log_err()) { Some(request) => { - handle_open_request(request, app_state.clone(), prompt_builder.clone(), cx); + handle_open_request(request, app_state.clone(), cx); } None => { - init_ui(app_state.clone(), prompt_builder.clone(), cx).unwrap(); cx.spawn({ let app_state = app_state.clone(); |mut cx| async move { @@ -546,12 +534,11 @@ fn main() { } let app_state = app_state.clone(); - let prompt_builder = prompt_builder.clone(); cx.spawn(move |cx| async move { while let Some(urls) = open_rx.next().await { cx.update(|cx| { if let Some(request) = OpenRequest::parse(urls, cx).log_err() { - handle_open_request(request, app_state.clone(), prompt_builder.clone(), cx); + handle_open_request(request, app_state.clone(), cx); } }) .ok(); @@ -621,24 +608,14 @@ fn handle_settings_changed(error: Option, cx: &mut AppContext) { } } -fn handle_open_request( - request: OpenRequest, - app_state: Arc, - prompt_builder: Arc, - cx: &mut AppContext, -) { +fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut AppContext) { if let Some(connection) = request.cli_connection { let app_state = app_state.clone(); - cx.spawn(move |cx| handle_cli_connection(connection, app_state, prompt_builder, cx)) + cx.spawn(move |cx| handle_cli_connection(connection, app_state, cx)) .detach(); return; } - if let Err(e) = init_ui(app_state.clone(), prompt_builder, cx) { - fail_to_open_window(e, cx); - return; - }; - if let Some(connection_info) = request.ssh_connection { cx.spawn(|mut cx| async move { let nickname = cx diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index 32bfdd42ae8359fdffdf1deac54029d2c7e545f3..5656ffa86c7ea491f3f202a4d68acb7f2fe91075 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -1,7 +1,6 @@ +use crate::handle_open_request; use crate::restorable_workspace_locations; -use crate::{handle_open_request, init_ui}; use anyhow::{anyhow, Context, Result}; -use assistant::PromptBuilder; use cli::{ipc, IpcHandshake}; use cli::{ipc::IpcSender, CliRequest, CliResponse}; use client::parse_zed_link; @@ -252,7 +251,6 @@ pub async fn open_paths_with_positions( pub async fn handle_cli_connection( (mut requests, responses): (mpsc::Receiver, IpcSender), app_state: Arc, - prompt_builder: Arc, mut cx: AsyncAppContext, ) { if let Some(request) = requests.next().await { @@ -262,19 +260,13 @@ pub async fn handle_cli_connection( paths, wait, open_new_workspace, - env, } => { if !urls.is_empty() { cx.update(|cx| { match OpenRequest::parse(urls, cx) { Ok(open_request) => { - handle_open_request( - open_request, - app_state.clone(), - prompt_builder.clone(), - cx, - ); + handle_open_request(open_request, app_state.clone(), cx); responses.send(CliResponse::Exit { status: 0 }).log_err(); } Err(e) => { @@ -291,19 +283,6 @@ pub async fn handle_cli_connection( return; } - if let Err(e) = cx - .update(|cx| init_ui(app_state.clone(), prompt_builder.clone(), cx)) - .and_then(|r| r) - { - responses - .send(CliResponse::Stderr { - message: format!("{e}"), - }) - .log_err(); - responses.send(CliResponse::Exit { status: 1 }).log_err(); - return; - } - let open_workspace_result = open_workspaces( paths, open_new_workspace, From 6eb67882019a5171c336d5dbcc8ec1313e8a456f Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Fri, 25 Oct 2024 09:34:50 +0200 Subject: [PATCH 28/76] image viewer: Reuse existing tabs (#19717) Co-authored-by: Kirill Co-authored-by: Mikayla Fixes #9896 Release Notes: - Fixed an issue where clicking on an image inside the project panel would not re-use an existing image tab Co-authored-by: Kirill Co-authored-by: Mikayla --- crates/image_viewer/src/image_viewer.rs | 82 ++++++++++++++++++++----- 1 file changed, 66 insertions(+), 16 deletions(-) diff --git a/crates/image_viewer/src/image_viewer.rs b/crates/image_viewer/src/image_viewer.rs index 5691119dea9726003c4aaa82487744bc0650f7f8..607a4a4abe2faf9ee2d7af017f5ec374c867bc5d 100644 --- a/crates/image_viewer/src/image_viewer.rs +++ b/crates/image_viewer/src/image_viewer.rs @@ -1,3 +1,4 @@ +use anyhow::Context as _; use gpui::{ canvas, div, fill, img, opaque_grey, point, size, AnyElement, AppContext, Bounds, Context, EventEmitter, FocusHandle, FocusableView, Img, InteractiveElement, IntoElement, Model, @@ -19,6 +20,7 @@ use workspace::{ const IMAGE_VIEWER_KIND: &str = "ImageView"; pub struct ImageItem { + id: ProjectEntryId, path: PathBuf, project_path: ProjectPath, } @@ -48,9 +50,15 @@ impl project::Item for ImageItem { .read_with(&cx, |project, cx| project.absolute_path(&path, cx))? .ok_or_else(|| anyhow::anyhow!("Failed to find the absolute path"))?; + let id = project + .update(&mut cx, |project, cx| project.entry_for_path(&path, cx))? + .context("Entry not found")? + .id; + cx.new_model(|_| ImageItem { path: abs_path, project_path: path, + id, }) })) } else { @@ -59,7 +67,7 @@ impl project::Item for ImageItem { } fn entry_id(&self, _: &AppContext) -> Option { - None + Some(self.id) } fn project_path(&self, _: &AppContext) -> Option { @@ -68,18 +76,30 @@ impl project::Item for ImageItem { } pub struct ImageView { - path: PathBuf, + image: Model, focus_handle: FocusHandle, } impl Item for ImageView { type Event = (); - fn tab_content(&self, params: TabContentParams, _cx: &WindowContext) -> AnyElement { - let title = self - .path + fn for_each_project_item( + &self, + cx: &AppContext, + f: &mut dyn FnMut(gpui::EntityId, &dyn project::Item), + ) { + f(self.image.entity_id(), self.image.read(cx)) + } + + fn is_singleton(&self, _cx: &AppContext) -> bool { + true + } + + fn tab_content(&self, params: TabContentParams, cx: &WindowContext) -> AnyElement { + let path = &self.image.read(cx).path; + let title = path .file_name() - .unwrap_or_else(|| self.path.as_os_str()) + .unwrap_or_else(|| path.as_os_str()) .to_string_lossy() .to_string(); Label::new(title) @@ -90,9 +110,10 @@ impl Item for ImageView { } fn tab_icon(&self, cx: &WindowContext) -> Option { + let path = &self.image.read(cx).path; ItemSettings::get_global(cx) .file_icons - .then(|| FileIcons::get_icon(self.path.as_path(), cx)) + .then(|| FileIcons::get_icon(path.as_path(), cx)) .flatten() .map(Icon::from_path) } @@ -106,7 +127,7 @@ impl Item for ImageView { Self: Sized, { Some(cx.new_view(|cx| Self { - path: self.path.clone(), + image: self.image.clone(), focus_handle: cx.focus_handle(), })) } @@ -118,7 +139,7 @@ impl SerializableItem for ImageView { } fn deserialize( - _project: Model, + project: Model, _workspace: WeakView, workspace_id: WorkspaceId, item_id: ItemId, @@ -129,10 +150,38 @@ impl SerializableItem for ImageView { .get_image_path(item_id, workspace_id)? .ok_or_else(|| anyhow::anyhow!("No image path found"))?; - cx.new_view(|cx| ImageView { - path: image_path, - focus_handle: cx.focus_handle(), - }) + let (worktree, relative_path) = project + .update(&mut cx, |project, cx| { + project.find_or_create_worktree(image_path.clone(), false, cx) + })? + .await + .context("Path not found")?; + let worktree_id = worktree.update(&mut cx, |worktree, _cx| worktree.id())?; + + let project_path = ProjectPath { + worktree_id, + path: relative_path.into(), + }; + + let id = project + .update(&mut cx, |project, cx| { + project.entry_for_path(&project_path, cx) + })? + .context("No entry found")? + .id; + + cx.update(|cx| { + let image = cx.new_model(|_| ImageItem { + id, + path: image_path, + project_path, + }); + + Ok(cx.new_view(|cx| ImageView { + image, + focus_handle: cx.focus_handle(), + })) + })? }) } @@ -154,7 +203,7 @@ impl SerializableItem for ImageView { let workspace_id = workspace.database_id()?; Some(cx.background_executor().spawn({ - let image_path = self.path.clone(); + let image_path = self.image.read(cx).path.clone(); async move { IMAGE_VIEWER .save_image_path(item_id, workspace_id, image_path) @@ -177,6 +226,7 @@ impl FocusableView for ImageView { impl Render for ImageView { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let image_path = self.image.read(cx).path.clone(); let checkered_background = |bounds: Bounds, _, cx: &mut WindowContext| { let square_size = 32.0; @@ -233,7 +283,7 @@ impl Render for ImageView { // TODO: In browser based Tailwind & Flex this would be h-screen and we'd use w-full .h_full() .child( - img(self.path.clone()) + img(image_path) .object_fit(ObjectFit::ScaleDown) .max_w_full() .max_h_full(), @@ -254,7 +304,7 @@ impl ProjectItem for ImageView { Self: Sized, { Self { - path: item.read(cx).path.clone(), + image: item, focus_handle: cx.focus_handle(), } } From 3617873431fea0de4681c3891f4b8b44db28ec83 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Fri, 25 Oct 2024 12:13:21 +0200 Subject: [PATCH 29/76] project panel: Fix interactions with auto-folded directories (#19723) Closes https://github.com/zed-industries/zed/issues/19566 Release Notes: - N/A --------- Co-authored-by: Peter Tripp --- crates/project_panel/src/project_panel.rs | 45 +++++++++++++++-------- 1 file changed, 30 insertions(+), 15 deletions(-) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index cb62a7f04b06067430c6d2eda814adf74aa5f3a4..62d5da762ad0987ef79bb9904dfb541463158f2d 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -497,6 +497,7 @@ impl ProjectPanel { if let Some((worktree, entry)) = self.selected_sub_entry(cx) { let auto_fold_dirs = ProjectPanelSettings::get_global(cx).auto_fold_dirs; + let worktree = worktree.read(cx); let is_root = Some(entry) == worktree.root_entry(); let is_dir = entry.is_dir(); let is_foldable = auto_fold_dirs && self.is_foldable(entry, worktree); @@ -976,6 +977,7 @@ impl ProjectPanel { }) = self.selection { let directory_id; + let new_entry_id = self.resolve_entry(entry_id); if let Some((worktree, expanded_dir_ids)) = self .project .read(cx) @@ -983,7 +985,7 @@ impl ProjectPanel { .zip(self.expanded_dir_ids.get_mut(&worktree_id)) { let worktree = worktree.read(cx); - if let Some(mut entry) = worktree.entry_for_id(entry_id) { + if let Some(mut entry) = worktree.entry_for_id(new_entry_id) { loop { if entry.is_dir() { if let Err(ix) = expanded_dir_ids.binary_search(&entry.id) { @@ -1273,6 +1275,7 @@ impl ProjectPanel { fn select_parent(&mut self, _: &SelectParent, cx: &mut ViewContext) { if let Some((worktree, entry)) = self.selected_sub_entry(cx) { if let Some(parent) = entry.path.parent() { + let worktree = worktree.read(cx); if let Some(parent_entry) = worktree.entry_for_path(parent) { self.selection = Some(SelectedEntry { worktree_id: worktree.id(), @@ -1406,7 +1409,6 @@ impl ProjectPanel { .clipboard .as_ref() .filter(|clipboard| !clipboard.items().is_empty())?; - enum PasteTask { Rename(Task>), Copy(Task>>), @@ -1416,7 +1418,7 @@ impl ProjectPanel { let clip_is_cut = clipboard_entries.is_cut(); for clipboard_entry in clipboard_entries.items() { let new_path = - self.create_paste_path(clipboard_entry, self.selected_entry_handle(cx)?, cx)?; + self.create_paste_path(clipboard_entry, self.selected_sub_entry(cx)?, cx)?; let clip_entry_id = clipboard_entry.entry_id; let is_same_worktree = clipboard_entry.worktree_id == worktree_id; let relative_worktree_source_path = if !is_same_worktree { @@ -1558,7 +1560,7 @@ impl ProjectPanel { fn reveal_in_finder(&mut self, _: &RevealInFileManager, cx: &mut ViewContext) { if let Some((worktree, entry)) = self.selected_sub_entry(cx) { - cx.reveal_path(&worktree.abs_path().join(&entry.path)); + cx.reveal_path(&worktree.read(cx).abs_path().join(&entry.path)); } } @@ -1573,7 +1575,7 @@ impl ProjectPanel { if let Some((worktree, entry)) = self.selected_sub_entry(cx) { let abs_path = match &entry.canonical_path { Some(canonical_path) => Some(canonical_path.to_path_buf()), - None => worktree.absolutize(&entry.path).ok(), + None => worktree.read(cx).absolutize(&entry.path).ok(), }; let working_directory = if entry.is_dir() { @@ -1596,7 +1598,7 @@ impl ProjectPanel { if entry.is_dir() { let include_root = self.project.read(cx).visible_worktrees(cx).count() > 1; let dir_path = if include_root { - let mut full_path = PathBuf::from(worktree.root_name()); + let mut full_path = PathBuf::from(worktree.read(cx).root_name()); full_path.push(&entry.path); Arc::from(full_path) } else { @@ -1730,6 +1732,8 @@ impl ProjectPanel { } } + /// Finds the currently selected subentry for a given leaf entry id. If a given entry + /// has no ancestors, the project entry ID that's passed in is returned as-is. fn resolve_entry(&self, id: ProjectEntryId) -> ProjectEntryId { self.ancestors .get(&id) @@ -1756,12 +1760,12 @@ impl ProjectPanel { fn selected_sub_entry<'a>( &self, cx: &'a AppContext, - ) -> Option<(&'a Worktree, &'a project::Entry)> { + ) -> Option<(Model, &'a project::Entry)> { let (worktree, mut entry) = self.selected_entry_handle(cx)?; - let worktree = worktree.read(cx); let resolved_id = self.resolve_entry(entry.id); if resolved_id != entry.id { + let worktree = worktree.read(cx); entry = worktree.entry_for_id(resolved_id)?; } Some((worktree, entry)) @@ -1885,7 +1889,19 @@ impl ProjectPanel { } auto_folded_ancestors.clear(); visible_worktree_entries.push(entry.clone()); - if Some(entry.id) == new_entry_parent_id { + let precedes_new_entry = if let Some(new_entry_id) = new_entry_parent_id { + entry.id == new_entry_id || { + self.ancestors.get(&entry.id).map_or(false, |entries| { + entries + .ancestors + .iter() + .any(|entry_id| *entry_id == new_entry_id) + }) + } + } else { + false + }; + if precedes_new_entry { visible_worktree_entries.push(Entry { id: NEW_ENTRY_ID, kind: new_entry_kind, @@ -2546,9 +2562,7 @@ impl ProjectPanel { h_flex().h_6().w_full().child(editor.clone()) } else { h_flex().h_6().map(|mut this| { - if let Some(folded_ancestors) = - is_active.then(|| self.ancestors.get(&entry_id)).flatten() - { + if let Some(folded_ancestors) = self.ancestors.get(&entry_id) { let components = Path::new(&file_name) .components() .map(|comp| { @@ -2592,9 +2606,10 @@ impl ProjectPanel { Label::new(component) .single_line() .color(filename_text_color) - .when(index == active_index, |this| { - this.underline(true) - }), + .when( + is_active && index == active_index, + |this| this.underline(true), + ), ); this = this.child(label); From 08a3c54bacc270395e7f1ac53d0963d9d71b8aea Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 25 Oct 2024 03:29:25 -0700 Subject: [PATCH 30/76] Allow editor blocks to replace ranges of text (#19531) This PR adds the ability for editor blocks to replace lines of text, but does not yet use that feature anywhere. We'll update assistant patches to use replace blocks on another branch: https://github.com/zed-industries/zed/tree/assistant-patch-replace-blocks Release Notes: - N/A --------- Co-authored-by: Antonio Scandurra Co-authored-by: Richard Feldman Co-authored-by: Marshall Bowers Co-authored-by: Nathan Sobo --- crates/assistant/src/assistant_panel.rs | 27 +- crates/assistant/src/inline_assistant.rs | 11 +- crates/diagnostics/src/diagnostics.rs | 28 +- crates/editor/src/display_map.rs | 19 +- crates/editor/src/display_map/block_map.rs | 1403 ++++++++++++-------- crates/editor/src/display_map/char_map.rs | 33 + crates/editor/src/display_map/fold_map.rs | 43 + crates/editor/src/display_map/wrap_map.rs | 126 +- crates/editor/src/editor.rs | 8 +- crates/editor/src/editor_tests.rs | 3 +- crates/editor/src/element.rs | 7 +- crates/editor/src/hunk_diff.rs | 8 +- crates/repl/src/session.rs | 5 +- 13 files changed, 1120 insertions(+), 601 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 6d525a1ff08754fee6c317d90b41e47f4d776c62..479d63a76edaf6b43ce2be00bb4aedbed6151bb2 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -26,8 +26,8 @@ use collections::{BTreeSet, HashMap, HashSet}; use editor::{ actions::{FoldAt, MoveToEndOfLine, Newline, ShowCompletions, UnfoldAt}, display_map::{ - BlockContext, BlockDisposition, BlockId, BlockProperties, BlockStyle, Crease, - CreaseMetadata, CustomBlockId, FoldId, RenderBlock, ToDisplayPoint, + BlockContext, BlockId, BlockPlacement, BlockProperties, BlockStyle, Crease, CreaseMetadata, + CustomBlockId, FoldId, RenderBlock, ToDisplayPoint, }, scroll::{Autoscroll, AutoscrollStrategy}, Anchor, Editor, EditorEvent, ProposedChangeLocation, ProposedChangesEditor, RowExt, @@ -2009,13 +2009,12 @@ impl ContextEditor { }) .map(|(command, error_message)| BlockProperties { style: BlockStyle::Fixed, - position: Anchor { + height: 1, + placement: BlockPlacement::Below(Anchor { buffer_id: Some(buffer_id), excerpt_id, text_anchor: command.source_range.start, - }, - height: 1, - disposition: BlockDisposition::Below, + }), render: slash_command_error_block_renderer(error_message), priority: 0, }), @@ -2242,11 +2241,10 @@ impl ContextEditor { } else { let block_ids = editor.insert_blocks( [BlockProperties { - position: patch_start, height: path_count as u32 + 1, style: BlockStyle::Flex, render: render_block, - disposition: BlockDisposition::Below, + placement: BlockPlacement::Below(patch_start), priority: 0, }], None, @@ -2731,12 +2729,13 @@ impl ContextEditor { }) }; let create_block_properties = |message: &Message| BlockProperties { - position: buffer - .anchor_in_excerpt(excerpt_id, message.anchor_range.start) - .unwrap(), height: 2, style: BlockStyle::Sticky, - disposition: BlockDisposition::Above, + placement: BlockPlacement::Above( + buffer + .anchor_in_excerpt(excerpt_id, message.anchor_range.start) + .unwrap(), + ), priority: usize::MAX, render: render_block(MessageMetadata::from(message)), }; @@ -3372,7 +3371,7 @@ impl ContextEditor { let anchor = buffer.anchor_in_excerpt(excerpt_id, anchor).unwrap(); let image = render_image.clone(); anchor.is_valid(&buffer).then(|| BlockProperties { - position: anchor, + placement: BlockPlacement::Above(anchor), height: MAX_HEIGHT_IN_LINES, style: BlockStyle::Sticky, render: Box::new(move |cx| { @@ -3393,8 +3392,6 @@ impl ContextEditor { ) .into_any_element() }), - - disposition: BlockDisposition::Above, priority: 0, }) }) diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index 0b9ee0eae2156b10dd420808dc7bcbc4104d3008..1134747d558041c5a238522d7082ecdc0d86727d 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -9,7 +9,7 @@ use collections::{hash_map, HashMap, HashSet, VecDeque}; use editor::{ actions::{MoveDown, MoveUp, SelectAll}, display_map::{ - BlockContext, BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, RenderBlock, + BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, RenderBlock, ToDisplayPoint, }, Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorElement, EditorEvent, EditorMode, @@ -446,15 +446,14 @@ impl InlineAssistant { let assist_blocks = vec![ BlockProperties { style: BlockStyle::Sticky, - position: range.start, + placement: BlockPlacement::Above(range.start), height: prompt_editor_height, render: build_assist_editor_renderer(prompt_editor), - disposition: BlockDisposition::Above, priority: 0, }, BlockProperties { style: BlockStyle::Sticky, - position: range.end, + placement: BlockPlacement::Below(range.end), height: 0, render: Box::new(|cx| { v_flex() @@ -464,7 +463,6 @@ impl InlineAssistant { .border_color(cx.theme().status().info_border) .into_any_element() }), - disposition: BlockDisposition::Below, priority: 0, }, ]; @@ -1179,7 +1177,7 @@ impl InlineAssistant { let height = deleted_lines_editor.update(cx, |editor, cx| editor.max_point(cx).row().0 + 1); new_blocks.push(BlockProperties { - position: new_row, + placement: BlockPlacement::Above(new_row), height, style: BlockStyle::Flex, render: Box::new(move |cx| { @@ -1191,7 +1189,6 @@ impl InlineAssistant { .child(deleted_lines_editor.clone()) .into_any_element() }), - disposition: BlockDisposition::Above, priority: 0, }); } diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 687638854209ba56f8b60912db2f8b3684355bd7..cb6d07e906461007261a3ab1fd3e7995cad249f1 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -9,7 +9,7 @@ use anyhow::Result; use collections::{BTreeSet, HashSet}; use editor::{ diagnostic_block_renderer, - display_map::{BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, RenderBlock}, + display_map::{BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, RenderBlock}, highlight_diagnostic_message, scroll::Autoscroll, Editor, EditorEvent, ExcerptId, ExcerptRange, MultiBuffer, ToOffset, @@ -439,11 +439,10 @@ impl ProjectDiagnosticsEditor { primary.message.split('\n').next().unwrap().to_string(); group_state.block_count += 1; blocks_to_add.push(BlockProperties { - position: header_position, + placement: BlockPlacement::Above(header_position), height: 2, style: BlockStyle::Sticky, render: diagnostic_header_renderer(primary), - disposition: BlockDisposition::Above, priority: 0, }); } @@ -459,13 +458,15 @@ impl ProjectDiagnosticsEditor { if !diagnostic.message.is_empty() { group_state.block_count += 1; blocks_to_add.push(BlockProperties { - position: (excerpt_id, entry.range.start), + placement: BlockPlacement::Below(( + excerpt_id, + entry.range.start, + )), height: diagnostic.message.matches('\n').count() as u32 + 1, style: BlockStyle::Fixed, render: diagnostic_block_renderer( diagnostic, None, true, true, ), - disposition: BlockDisposition::Below, priority: 0, }); } @@ -498,13 +499,24 @@ impl ProjectDiagnosticsEditor { editor.remove_blocks(blocks_to_remove, None, cx); let block_ids = editor.insert_blocks( blocks_to_add.into_iter().flat_map(|block| { - let (excerpt_id, text_anchor) = block.position; + let placement = match block.placement { + BlockPlacement::Above((excerpt_id, text_anchor)) => BlockPlacement::Above( + excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor)?, + ), + BlockPlacement::Below((excerpt_id, text_anchor)) => BlockPlacement::Below( + excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor)?, + ), + BlockPlacement::Replace(_) => { + unreachable!( + "no Replace block should have been pushed to blocks_to_add" + ) + } + }; Some(BlockProperties { - position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor)?, + placement, height: block.height, style: block.style, render: block.render, - disposition: block.disposition, priority: 0, }) }), diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 528385ebab22ff63a5e529c2808cb27275deccda..e24336d1e9979422005d7781fdcbf44c07be44af 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -29,8 +29,8 @@ use crate::{ hover_links::InlayHighlight, movement::TextLayoutDetails, EditorStyle, InlayId, RowExt, }; pub use block_map::{ - Block, BlockBufferRows, BlockChunks as DisplayChunks, BlockContext, BlockDisposition, BlockId, - BlockMap, BlockPoint, BlockProperties, BlockStyle, CustomBlockId, RenderBlock, + Block, BlockBufferRows, BlockChunks as DisplayChunks, BlockContext, BlockId, BlockMap, + BlockPlacement, BlockPoint, BlockProperties, BlockStyle, CustomBlockId, RenderBlock, }; use block_map::{BlockRow, BlockSnapshot}; use char_map::{CharMap, CharSnapshot}; @@ -1180,6 +1180,7 @@ impl ToDisplayPoint for Anchor { pub mod tests { use super::*; use crate::{movement, test::marked_display_snapshot}; + use block_map::BlockPlacement; use gpui::{div, font, observe, px, AppContext, BorrowAppContext, Context, Element, Hsla}; use language::{ language_settings::{AllLanguageSettings, AllLanguageSettingsContent}, @@ -1293,24 +1294,22 @@ pub mod tests { Bias::Left, )); - let disposition = if rng.gen() { - BlockDisposition::Above + let placement = if rng.gen() { + BlockPlacement::Above(position) } else { - BlockDisposition::Below + BlockPlacement::Below(position) }; let height = rng.gen_range(1..5); log::info!( - "inserting block {:?} {:?} with height {}", - disposition, - position.to_point(&buffer), + "inserting block {:?} with height {}", + placement.as_ref().map(|p| p.to_point(&buffer)), height ); let priority = rng.gen_range(1..100); BlockProperties { + placement, style: BlockStyle::Fixed, - position, height, - disposition, render: Box::new(|_| div().into_any()), priority, } diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 6b0d45fc765f1d3e0e82ddc4ac20a2ff40393201..44a540bc9510a1a9254c674661952d9d31c6e708 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -6,7 +6,9 @@ use crate::{EditorStyle, GutterDimensions}; use collections::{Bound, HashMap, HashSet}; use gpui::{AnyElement, EntityId, Pixels, WindowContext}; use language::{Chunk, Patch, Point}; -use multi_buffer::{Anchor, ExcerptId, ExcerptInfo, MultiBufferRow, ToPoint as _}; +use multi_buffer::{ + Anchor, ExcerptId, ExcerptInfo, MultiBufferRow, MultiBufferSnapshot, ToPoint as _, +}; use parking_lot::Mutex; use std::{ cell::RefCell, @@ -18,7 +20,7 @@ use std::{ Arc, }, }; -use sum_tree::{Bias, SumTree, TreeMap}; +use sum_tree::{Bias, SumTree, Summary, TreeMap}; use text::Edit; use ui::ElementId; @@ -77,32 +79,173 @@ struct WrapRow(u32); pub type RenderBlock = Box AnyElement>; +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum BlockPlacement { + Above(T), + Below(T), + Replace(Range), +} + +impl BlockPlacement { + fn start(&self) -> &T { + match self { + BlockPlacement::Above(position) => position, + BlockPlacement::Below(position) => position, + BlockPlacement::Replace(range) => &range.start, + } + } + + fn end(&self) -> &T { + match self { + BlockPlacement::Above(position) => position, + BlockPlacement::Below(position) => position, + BlockPlacement::Replace(range) => &range.end, + } + } + + pub fn as_ref(&self) -> BlockPlacement<&T> { + match self { + BlockPlacement::Above(position) => BlockPlacement::Above(position), + BlockPlacement::Below(position) => BlockPlacement::Below(position), + BlockPlacement::Replace(range) => BlockPlacement::Replace(&range.start..&range.end), + } + } + + pub fn map(self, mut f: impl FnMut(T) -> R) -> BlockPlacement { + match self { + BlockPlacement::Above(position) => BlockPlacement::Above(f(position)), + BlockPlacement::Below(position) => BlockPlacement::Below(f(position)), + BlockPlacement::Replace(range) => BlockPlacement::Replace(f(range.start)..f(range.end)), + } + } +} + +impl BlockPlacement { + fn cmp(&self, other: &Self, buffer: &MultiBufferSnapshot) -> Ordering { + match (self, other) { + (BlockPlacement::Above(anchor_a), BlockPlacement::Above(anchor_b)) + | (BlockPlacement::Below(anchor_a), BlockPlacement::Below(anchor_b)) => { + anchor_a.cmp(anchor_b, buffer) + } + (BlockPlacement::Above(anchor_a), BlockPlacement::Below(anchor_b)) => { + anchor_a.cmp(anchor_b, buffer).then(Ordering::Less) + } + (BlockPlacement::Below(anchor_a), BlockPlacement::Above(anchor_b)) => { + anchor_a.cmp(anchor_b, buffer).then(Ordering::Greater) + } + (BlockPlacement::Above(anchor), BlockPlacement::Replace(range)) => { + anchor.cmp(&range.start, buffer).then(Ordering::Less) + } + (BlockPlacement::Replace(range), BlockPlacement::Above(anchor)) => { + range.start.cmp(anchor, buffer).then(Ordering::Greater) + } + (BlockPlacement::Below(anchor), BlockPlacement::Replace(range)) => { + anchor.cmp(&range.start, buffer).then(Ordering::Greater) + } + (BlockPlacement::Replace(range), BlockPlacement::Below(anchor)) => { + range.start.cmp(anchor, buffer).then(Ordering::Less) + } + (BlockPlacement::Replace(range_a), BlockPlacement::Replace(range_b)) => range_a + .start + .cmp(&range_b.start, buffer) + .then_with(|| range_b.end.cmp(&range_a.end, buffer)), + } + } + + fn to_wrap_row(&self, wrap_snapshot: &WrapSnapshot) -> Option> { + let buffer_snapshot = wrap_snapshot.buffer_snapshot(); + match self { + BlockPlacement::Above(position) => { + let mut position = position.to_point(buffer_snapshot); + position.column = 0; + let wrap_row = WrapRow(wrap_snapshot.make_wrap_point(position, Bias::Left).row()); + Some(BlockPlacement::Above(wrap_row)) + } + BlockPlacement::Below(position) => { + let mut position = position.to_point(buffer_snapshot); + position.column = buffer_snapshot.line_len(MultiBufferRow(position.row)); + let wrap_row = WrapRow(wrap_snapshot.make_wrap_point(position, Bias::Left).row()); + Some(BlockPlacement::Below(wrap_row)) + } + BlockPlacement::Replace(range) => { + let mut start = range.start.to_point(buffer_snapshot); + let mut end = range.end.to_point(buffer_snapshot); + if start == end { + None + } else { + start.column = 0; + let start_wrap_row = + WrapRow(wrap_snapshot.make_wrap_point(start, Bias::Left).row()); + end.column = buffer_snapshot.line_len(MultiBufferRow(end.row)); + let end_wrap_row = + WrapRow(wrap_snapshot.make_wrap_point(end, Bias::Left).row()); + Some(BlockPlacement::Replace(start_wrap_row..end_wrap_row)) + } + } + } + } +} + +impl Ord for BlockPlacement { + fn cmp(&self, other: &Self) -> Ordering { + match (self, other) { + (BlockPlacement::Above(row_a), BlockPlacement::Above(row_b)) + | (BlockPlacement::Below(row_a), BlockPlacement::Below(row_b)) => row_a.cmp(row_b), + (BlockPlacement::Above(row_a), BlockPlacement::Below(row_b)) => { + row_a.cmp(row_b).then(Ordering::Less) + } + (BlockPlacement::Below(row_a), BlockPlacement::Above(row_b)) => { + row_a.cmp(row_b).then(Ordering::Greater) + } + (BlockPlacement::Above(row), BlockPlacement::Replace(range)) => { + row.cmp(&range.start).then(Ordering::Less) + } + (BlockPlacement::Replace(range), BlockPlacement::Above(row)) => { + range.start.cmp(row).then(Ordering::Greater) + } + (BlockPlacement::Below(row), BlockPlacement::Replace(range)) => { + row.cmp(&range.start).then(Ordering::Greater) + } + (BlockPlacement::Replace(range), BlockPlacement::Below(row)) => { + range.start.cmp(row).then(Ordering::Less) + } + (BlockPlacement::Replace(range_a), BlockPlacement::Replace(range_b)) => range_a + .start + .cmp(&range_b.start) + .then_with(|| range_b.end.cmp(&range_a.end)), + } + } +} + +impl PartialOrd for BlockPlacement { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + pub struct CustomBlock { id: CustomBlockId, - position: Anchor, + placement: BlockPlacement, height: u32, style: BlockStyle, render: Arc>, - disposition: BlockDisposition, priority: usize, } pub struct BlockProperties

{ - pub position: P, + pub placement: BlockPlacement

, pub height: u32, pub style: BlockStyle, pub render: RenderBlock, - pub disposition: BlockDisposition, pub priority: usize, } impl Debug for BlockProperties

{ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("BlockProperties") - .field("position", &self.position) + .field("placement", &self.placement) .field("height", &self.height) .field("style", &self.style) - .field("disposition", &self.disposition) .finish() } } @@ -125,10 +268,10 @@ pub struct BlockContext<'a, 'b> { pub editor_style: &'b EditorStyle, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)] pub enum BlockId { - Custom(CustomBlockId), ExcerptBoundary(Option), + Custom(CustomBlockId), } impl From for ElementId { @@ -152,30 +295,12 @@ impl std::fmt::Display for BlockId { } } -/// Whether the block should be considered above or below the anchor line -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub enum BlockDisposition { - Above, - Below, -} - #[derive(Clone, Debug)] struct Transform { summary: TransformSummary, block: Option, } -pub(crate) enum BlockType { - Custom(CustomBlockId), - ExcerptBoundary, -} - -pub(crate) trait BlockLike { - fn block_type(&self) -> BlockType; - fn disposition(&self) -> BlockDisposition; - fn priority(&self) -> usize; -} - #[allow(clippy::large_enum_variant)] #[derive(Clone)] pub enum Block { @@ -189,26 +314,6 @@ pub enum Block { }, } -impl BlockLike for Block { - fn block_type(&self) -> BlockType { - match self { - Block::Custom(block) => BlockType::Custom(block.id), - Block::ExcerptBoundary { .. } => BlockType::ExcerptBoundary, - } - } - - fn disposition(&self) -> BlockDisposition { - self.disposition() - } - - fn priority(&self) -> usize { - match self { - Block::Custom(block) => block.priority, - Block::ExcerptBoundary { .. } => usize::MAX, - } - } -} - impl Block { pub fn id(&self) -> BlockId { match self { @@ -219,19 +324,6 @@ impl Block { } } - fn disposition(&self) -> BlockDisposition { - match self { - Block::Custom(block) => block.disposition, - Block::ExcerptBoundary { next_excerpt, .. } => { - if next_excerpt.is_some() { - BlockDisposition::Above - } else { - BlockDisposition::Below - } - } - } - } - pub fn height(&self) -> u32 { match self { Block::Custom(block) => block.height, @@ -245,6 +337,20 @@ impl Block { Block::ExcerptBoundary { .. } => BlockStyle::Sticky, } } + + fn place_above(&self) -> bool { + match self { + Block::Custom(block) => matches!(block.placement, BlockPlacement::Above(_)), + Block::ExcerptBoundary { next_excerpt, .. } => next_excerpt.is_some(), + } + } + + fn place_below(&self) -> bool { + match self { + Block::Custom(block) => matches!(block.placement, BlockPlacement::Below(_)), + Block::ExcerptBoundary { next_excerpt, .. } => next_excerpt.is_none(), + } + } } impl Debug for Block { @@ -270,6 +376,8 @@ impl Debug for Block { struct TransformSummary { input_rows: u32, output_rows: u32, + longest_row: u32, + longest_row_chars: u32, } pub struct BlockChunks<'a> { @@ -298,11 +406,13 @@ impl BlockMap { excerpt_footer_height: u32, ) -> Self { let row_count = wrap_snapshot.max_point().row() + 1; + let mut transforms = SumTree::default(); + push_isomorphic(&mut transforms, row_count, &wrap_snapshot); let map = Self { next_block_id: AtomicUsize::new(0), custom_blocks: Vec::new(), custom_blocks_by_id: TreeMap::default(), - transforms: RefCell::new(SumTree::from_item(Transform::isomorphic(row_count), &())), + transforms: RefCell::new(transforms), wrap_snapshot: RefCell::new(wrap_snapshot.clone()), show_excerpt_controls, buffer_header_height, @@ -368,28 +478,29 @@ impl BlockMap { let mut transforms = self.transforms.borrow_mut(); let mut new_transforms = SumTree::default(); - let old_row_count = transforms.summary().input_rows; - let new_row_count = wrap_snapshot.max_point().row() + 1; let mut cursor = transforms.cursor::(&()); let mut last_block_ix = 0; let mut blocks_in_edit = Vec::new(); let mut edits = edits.into_iter().peekable(); while let Some(edit) = edits.next() { - // Preserve any old transforms that precede this edit. - let old_start = WrapRow(edit.old.start); - let new_start = WrapRow(edit.new.start); + let mut old_start = WrapRow(edit.old.start); + let mut new_start = WrapRow(edit.new.start); + + // Preserve transforms that: + // * strictly precedes this edit + // * isomorphic or replace transforms that end *at* the start of the edit + // * below blocks that end at the start of the edit new_transforms.append(cursor.slice(&old_start, Bias::Left, &()), &()); if let Some(transform) = cursor.item() { - if transform.is_isomorphic() && old_start == cursor.end(&()) { + if transform.summary.input_rows > 0 && cursor.end(&()) == old_start { + // Preserve the transform (push and next) new_transforms.push(transform.clone(), &()); cursor.next(&()); + + // Preserve below blocks at end of edit while let Some(transform) = cursor.item() { - if transform - .block - .as_ref() - .map_or(false, |b| b.disposition().is_below()) - { + if transform.block.as_ref().map_or(false, |b| b.place_below()) { new_transforms.push(transform.clone(), &()); cursor.next(&()); } else { @@ -399,50 +510,70 @@ impl BlockMap { } } - // Preserve any portion of an old transform that precedes this edit. - let extent_before_edit = old_start.0 - cursor.start().0; - push_isomorphic(&mut new_transforms, extent_before_edit); + // Ensure the edit starts at a transform boundary. + // If the edit starts within an isomorphic transform, preserve its prefix + // If the edit lands within a replacement block, expand the edit to include the start of the replaced input range + let mut preserved_blocks_above_edit = false; + let transform = cursor.item().unwrap(); + let transform_rows_before_edit = old_start.0 - cursor.start().0; + if transform_rows_before_edit > 0 { + if transform.block.is_none() { + // Preserve any portion of the old isomorphic transform that precedes this edit. + push_isomorphic( + &mut new_transforms, + transform_rows_before_edit, + wrap_snapshot, + ); + } else { + // We landed within a block that replaces some lines, so we + // extend the edit to start at the beginning of the + // replacement. + debug_assert!(transform.summary.input_rows > 0); + old_start.0 -= transform_rows_before_edit; + new_start.0 -= transform_rows_before_edit; + // The blocks *above* it are already in the new transforms, so + // we don't need to re-insert them when querying blocks. + preserved_blocks_above_edit = true; + } + } - // Skip over any old transforms that intersect this edit. + // Decide where the edit ends + // * It should end at a transform boundary + // * Coalesce edits that intersect the same transform let mut old_end = WrapRow(edit.old.end); let mut new_end = WrapRow(edit.new.end); - cursor.seek(&old_end, Bias::Left, &()); - cursor.next(&()); - if old_end == *cursor.start() { - while let Some(transform) = cursor.item() { - if transform - .block - .as_ref() - .map_or(false, |b| b.disposition().is_below()) - { + loop { + // Seek to the transform starting at or after the end of the edit + cursor.seek(&old_end, Bias::Left, &()); + cursor.next(&()); + + // Extend edit to the end of the discarded transform so it is reconstructed in full + let transform_rows_after_edit = cursor.start().0 - old_end.0; + old_end.0 += transform_rows_after_edit; + new_end.0 += transform_rows_after_edit; + + // Combine this edit with any subsequent edits that intersect the same transform. + while let Some(next_edit) = edits.peek() { + if next_edit.old.start <= cursor.start().0 { + old_end = WrapRow(next_edit.old.end); + new_end = WrapRow(next_edit.new.end); + cursor.seek(&old_end, Bias::Left, &()); cursor.next(&()); + edits.next(); } else { break; } } + + if *cursor.start() == old_end { + break; + } } - // Combine this edit with any subsequent edits that intersect the same transform. - while let Some(next_edit) = edits.peek() { - if next_edit.old.start <= cursor.start().0 { - old_end = WrapRow(next_edit.old.end); - new_end = WrapRow(next_edit.new.end); - cursor.seek(&old_end, Bias::Left, &()); + // Discard below blocks at the end of the edit. They'll be reconstructed. + while let Some(transform) = cursor.item() { + if transform.block.as_ref().map_or(false, |b| b.place_below()) { cursor.next(&()); - if old_end == *cursor.start() { - while let Some(transform) = cursor.item() { - if transform - .block - .as_ref() - .map_or(false, |b| b.disposition().is_below()) - { - cursor.next(&()); - } else { - break; - } - } - } - edits.next(); } else { break; } @@ -455,9 +586,10 @@ impl BlockMap { let start_block_ix = match self.custom_blocks[last_block_ix..].binary_search_by(|probe| { probe - .position + .start() .to_point(buffer) .cmp(&new_buffer_start) + // Move left until we find the index of the first block starting within this edit .then(Ordering::Greater) }) { Ok(ix) | Err(ix) => last_block_ix + ix, @@ -473,7 +605,7 @@ impl BlockMap { end_bound = Bound::Excluded(new_buffer_end); match self.custom_blocks[start_block_ix..].binary_search_by(|probe| { probe - .position + .start() .to_point(buffer) .cmp(&new_buffer_end) .then(Ordering::Greater) @@ -484,19 +616,17 @@ impl BlockMap { last_block_ix = end_block_ix; debug_assert!(blocks_in_edit.is_empty()); - blocks_in_edit.extend(self.custom_blocks[start_block_ix..end_block_ix].iter().map( - |block| { - let mut position = block.position.to_point(buffer); - match block.disposition { - BlockDisposition::Above => position.column = 0, - BlockDisposition::Below => { - position.column = buffer.line_len(MultiBufferRow(position.row)) - } - } - let position = wrap_snapshot.make_wrap_point(position, Bias::Left); - (position.row(), Block::Custom(block.clone())) - }, - )); + + blocks_in_edit.extend( + self.custom_blocks[start_block_ix..end_block_ix] + .iter() + .filter_map(|block| { + Some(( + block.placement.to_wrap_row(wrap_snapshot)?, + Block::Custom(block.clone()), + )) + }), + ); if buffer.show_headers() { blocks_in_edit.extend(BlockMap::header_and_footer_blocks( @@ -514,26 +644,49 @@ impl BlockMap { // For each of these blocks, insert a new isomorphic transform preceding the block, // and then insert the block itself. - for (block_row, block) in blocks_in_edit.drain(..) { - let insertion_row = match block.disposition() { - BlockDisposition::Above => block_row, - BlockDisposition::Below => block_row + 1, + for (block_placement, block) in blocks_in_edit.drain(..) { + if preserved_blocks_above_edit + && block_placement == BlockPlacement::Above(new_start) + { + continue; + } + + let mut summary = TransformSummary { + input_rows: 0, + output_rows: block.height(), + longest_row: 0, + longest_row_chars: 0, }; - let extent_before_block = insertion_row - new_transforms.summary().input_rows; - push_isomorphic(&mut new_transforms, extent_before_block); - new_transforms.push(Transform::block(block), &()); - } - old_end = WrapRow(old_end.0.min(old_row_count)); - new_end = WrapRow(new_end.0.min(new_row_count)); + let rows_before_block; + match block_placement { + BlockPlacement::Above(position) => { + rows_before_block = position.0 - new_transforms.summary().input_rows; + } + BlockPlacement::Below(position) => { + rows_before_block = (position.0 + 1) - new_transforms.summary().input_rows; + } + BlockPlacement::Replace(range) => { + rows_before_block = range.start.0 - new_transforms.summary().input_rows; + summary.input_rows = range.end.0 - range.start.0 + 1; + } + } - // Insert an isomorphic transform after the final block. - let extent_after_last_block = new_end.0 - new_transforms.summary().input_rows; - push_isomorphic(&mut new_transforms, extent_after_last_block); + push_isomorphic(&mut new_transforms, rows_before_block, wrap_snapshot); + new_transforms.push( + Transform { + summary, + block: Some(block), + }, + &(), + ); + } - // Preserve any portion of the old transform after this edit. - let extent_after_edit = cursor.start().0 - old_end.0; - push_isomorphic(&mut new_transforms, extent_after_edit); + // Insert an isomorphic transform after the final block. + let rows_after_last_block = new_end + .0 + .saturating_sub(new_transforms.summary().input_rows); + push_isomorphic(&mut new_transforms, rows_after_last_block, wrap_snapshot); } new_transforms.append(cursor.suffix(&()), &()); @@ -558,7 +711,7 @@ impl BlockMap { self.show_excerpt_controls } - pub fn header_and_footer_blocks<'a, 'b: 'a, 'c: 'a + 'b, R, T>( + fn header_and_footer_blocks<'a, 'b: 'a, 'c: 'a + 'b, R, T>( show_excerpt_controls: bool, excerpt_footer_height: u32, buffer_header_height: u32, @@ -566,7 +719,7 @@ impl BlockMap { buffer: &'b multi_buffer::MultiBufferSnapshot, range: R, wrap_snapshot: &'c WrapSnapshot, - ) -> impl Iterator + 'b + ) -> impl Iterator, Block)> + 'b where R: RangeBounds, T: multi_buffer::ToOffset, @@ -619,7 +772,11 @@ impl BlockMap { } Some(( - wrap_row, + if excerpt_boundary.next.is_some() { + BlockPlacement::Above(WrapRow(wrap_row)) + } else { + BlockPlacement::Below(WrapRow(wrap_row)) + }, Block::ExcerptBoundary { prev_excerpt: excerpt_boundary.prev, next_excerpt: excerpt_boundary.next, @@ -631,45 +788,96 @@ impl BlockMap { }) } - pub(crate) fn sort_blocks(blocks: &mut [(u32, B)]) { - // Place excerpt headers and footers above custom blocks on the same row - blocks.sort_unstable_by(|(row_a, block_a), (row_b, block_b)| { - row_a.cmp(row_b).then_with(|| { - block_a - .disposition() - .cmp(&block_b.disposition()) - .then_with(|| match ((block_a.block_type()), (block_b.block_type())) { - (BlockType::ExcerptBoundary, BlockType::ExcerptBoundary) => Ordering::Equal, - (BlockType::ExcerptBoundary, _) => Ordering::Less, - (_, BlockType::ExcerptBoundary) => Ordering::Greater, - (BlockType::Custom(a_id), BlockType::Custom(b_id)) => block_b - .priority() - .cmp(&block_a.priority()) - .then_with(|| a_id.cmp(&b_id)), - }) - }) + fn sort_blocks(blocks: &mut Vec<(BlockPlacement, Block)>) { + blocks.sort_unstable_by(|(placement_a, block_a), (placement_b, block_b)| { + placement_a + .cmp(&placement_b) + .then_with(|| match (block_a, block_b) { + ( + Block::ExcerptBoundary { + next_excerpt: next_excerpt_a, + .. + }, + Block::ExcerptBoundary { + next_excerpt: next_excerpt_b, + .. + }, + ) => next_excerpt_a + .as_ref() + .map(|excerpt| excerpt.id) + .cmp(&next_excerpt_b.as_ref().map(|excerpt| excerpt.id)), + (Block::ExcerptBoundary { next_excerpt, .. }, Block::Custom(_)) => { + if next_excerpt.is_some() { + Ordering::Less + } else { + Ordering::Greater + } + } + (Block::Custom(_), Block::ExcerptBoundary { next_excerpt, .. }) => { + if next_excerpt.is_some() { + Ordering::Greater + } else { + Ordering::Less + } + } + (Block::Custom(block_a), Block::Custom(block_b)) => block_a + .priority + .cmp(&block_b.priority) + .then_with(|| block_a.id.cmp(&block_b.id)), + }) + }); + blocks.dedup_by(|(right, _), (left, _)| match (left, right) { + (BlockPlacement::Replace(range), BlockPlacement::Above(row)) => { + range.start < *row && range.end >= *row + } + (BlockPlacement::Replace(range), BlockPlacement::Below(row)) => { + range.start <= *row && range.end > *row + } + (BlockPlacement::Replace(range_a), BlockPlacement::Replace(range_b)) => { + if range_a.end >= range_b.start && range_a.start <= range_b.end { + range_a.end = range_a.end.max(range_b.end); + true + } else { + false + } + } + _ => false, }); } } -fn push_isomorphic(tree: &mut SumTree, rows: u32) { +fn push_isomorphic(tree: &mut SumTree, rows: u32, wrap_snapshot: &WrapSnapshot) { if rows == 0 { return; } - let mut extent = Some(rows); + let wrap_row_start = tree.summary().input_rows; + let wrap_row_end = wrap_row_start + rows; + let wrap_summary = wrap_snapshot.text_summary_for_range(wrap_row_start..wrap_row_end); + let summary = TransformSummary { + input_rows: rows, + output_rows: rows, + longest_row: wrap_summary.longest_row, + longest_row_chars: wrap_summary.longest_row_chars, + }; + let mut merged = false; tree.update_last( |last_transform| { - if last_transform.is_isomorphic() { - let extent = extent.take().unwrap(); - last_transform.summary.input_rows += extent; - last_transform.summary.output_rows += extent; + if last_transform.block.is_none() { + last_transform.summary.add_summary(&summary, &()); + merged = true; } }, &(), ); - if let Some(extent) = extent { - tree.push(Transform::isomorphic(extent), &()); + if !merged { + tree.push( + Transform { + summary, + block: None, + }, + &(), + ); } } @@ -711,7 +919,7 @@ impl<'a> BlockMapReader<'a> { pub fn row_for_block(&self, block_id: CustomBlockId) -> Option { let block = self.blocks.iter().find(|block| block.id == block_id)?; let buffer_row = block - .position + .start() .to_point(self.wrap_snapshot.buffer_snapshot()) .row; let wrap_row = self @@ -735,9 +943,7 @@ impl<'a> BlockMapReader<'a> { break; } - if let Some(BlockType::Custom(id)) = - transform.block.as_ref().map(|block| block.block_type()) - { + if let Some(BlockId::Custom(id)) = transform.block.as_ref().map(|block| block.id()) { if id == block_id { return Some(cursor.start().1); } @@ -762,21 +968,27 @@ impl<'a> BlockMapWriter<'a> { let mut previous_wrap_row_range: Option> = None; for block in blocks { + if let BlockPlacement::Replace(_) = &block.placement { + debug_assert!(block.height > 0); + } + let id = CustomBlockId(self.0.next_block_id.fetch_add(1, SeqCst)); ids.push(id); - let position = block.position; - let point = position.to_point(buffer); - let wrap_row = wrap_snapshot - .make_wrap_point(Point::new(point.row, 0), Bias::Left) - .row(); + let start = block.placement.start().to_point(buffer); + let end = block.placement.end().to_point(buffer); + let start_wrap_row = wrap_snapshot.make_wrap_point(start, Bias::Left).row(); + let end_wrap_row = wrap_snapshot.make_wrap_point(end, Bias::Left).row(); let (start_row, end_row) = { - previous_wrap_row_range.take_if(|range| !range.contains(&wrap_row)); + previous_wrap_row_range.take_if(|range| { + !range.contains(&start_wrap_row) || !range.contains(&end_wrap_row) + }); let range = previous_wrap_row_range.get_or_insert_with(|| { - let start_row = wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0)); + let start_row = + wrap_snapshot.prev_row_boundary(WrapPoint::new(start_wrap_row, 0)); let end_row = wrap_snapshot - .next_row_boundary(WrapPoint::new(wrap_row, 0)) + .next_row_boundary(WrapPoint::new(end_wrap_row, 0)) .unwrap_or(wrap_snapshot.max_point().row() + 1); start_row..end_row }); @@ -785,16 +997,15 @@ impl<'a> BlockMapWriter<'a> { let block_ix = match self .0 .custom_blocks - .binary_search_by(|probe| probe.position.cmp(&position, buffer)) + .binary_search_by(|probe| probe.placement.cmp(&block.placement, buffer)) { Ok(ix) | Err(ix) => ix, }; let new_block = Arc::new(CustomBlock { id, - position, + placement: block.placement, height: block.height, render: Arc::new(Mutex::new(block.render)), - disposition: block.disposition, style: block.style, priority: block.priority, }); @@ -819,34 +1030,41 @@ impl<'a> BlockMapWriter<'a> { for block in &mut self.0.custom_blocks { if let Some(new_height) = heights.remove(&block.id) { + if let BlockPlacement::Replace(_) = &block.placement { + debug_assert!(new_height > 0); + } + if block.height != new_height { let new_block = CustomBlock { id: block.id, - position: block.position, + placement: block.placement.clone(), height: new_height, style: block.style, render: block.render.clone(), - disposition: block.disposition, priority: block.priority, }; let new_block = Arc::new(new_block); *block = new_block.clone(); self.0.custom_blocks_by_id.insert(block.id, new_block); - let buffer_row = block.position.to_point(buffer).row; - if last_block_buffer_row != Some(buffer_row) { - last_block_buffer_row = Some(buffer_row); - let wrap_row = wrap_snapshot - .make_wrap_point(Point::new(buffer_row, 0), Bias::Left) + let start_row = block.placement.start().to_point(buffer).row; + let end_row = block.placement.end().to_point(buffer).row; + if last_block_buffer_row != Some(end_row) { + last_block_buffer_row = Some(end_row); + let start_wrap_row = wrap_snapshot + .make_wrap_point(Point::new(start_row, 0), Bias::Left) + .row(); + let end_wrap_row = wrap_snapshot + .make_wrap_point(Point::new(end_row, 0), Bias::Left) .row(); - let start_row = - wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0)); - let end_row = wrap_snapshot - .next_row_boundary(WrapPoint::new(wrap_row, 0)) + let start = + wrap_snapshot.prev_row_boundary(WrapPoint::new(start_wrap_row, 0)); + let end = wrap_snapshot + .next_row_boundary(WrapPoint::new(end_wrap_row, 0)) .unwrap_or(wrap_snapshot.max_point().row() + 1); edits.push(Edit { - old: start_row..end_row, - new: start_row..end_row, + old: start..end, + new: start..end, }) } } @@ -864,19 +1082,21 @@ impl<'a> BlockMapWriter<'a> { let mut previous_wrap_row_range: Option> = None; self.0.custom_blocks.retain(|block| { if block_ids.contains(&block.id) { - let buffer_row = block.position.to_point(buffer).row; - if last_block_buffer_row != Some(buffer_row) { - last_block_buffer_row = Some(buffer_row); - let wrap_row = wrap_snapshot - .make_wrap_point(Point::new(buffer_row, 0), Bias::Left) - .row(); + let start = block.placement.start().to_point(buffer); + let end = block.placement.end().to_point(buffer); + if last_block_buffer_row != Some(end.row) { + last_block_buffer_row = Some(end.row); + let start_wrap_row = wrap_snapshot.make_wrap_point(start, Bias::Left).row(); + let end_wrap_row = wrap_snapshot.make_wrap_point(end, Bias::Left).row(); let (start_row, end_row) = { - previous_wrap_row_range.take_if(|range| !range.contains(&wrap_row)); + previous_wrap_row_range.take_if(|range| { + !range.contains(&start_wrap_row) || !range.contains(&end_wrap_row) + }); let range = previous_wrap_row_range.get_or_insert_with(|| { let start_row = - wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0)); + wrap_snapshot.prev_row_boundary(WrapPoint::new(start_wrap_row, 0)); let end_row = wrap_snapshot - .next_row_boundary(WrapPoint::new(wrap_row, 0)) + .next_row_boundary(WrapPoint::new(end_wrap_row, 0)) .unwrap_or(wrap_snapshot.max_point().row() + 1); start_row..end_row }); @@ -921,31 +1141,24 @@ impl BlockSnapshot { highlights: Highlights<'a>, ) -> BlockChunks<'a> { let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows); + let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); - let input_end = { - cursor.seek(&BlockRow(rows.end), Bias::Right, &()); - let overshoot = if cursor - .item() - .map_or(false, |transform| transform.is_isomorphic()) - { - rows.end - cursor.start().0 .0 - } else { - 0 - }; - cursor.start().1 .0 + overshoot - }; - let input_start = { - cursor.seek(&BlockRow(rows.start), Bias::Right, &()); - let overshoot = if cursor - .item() - .map_or(false, |transform| transform.is_isomorphic()) - { - rows.start - cursor.start().0 .0 - } else { - 0 - }; - cursor.start().1 .0 + overshoot - }; + cursor.seek(&BlockRow(rows.start), Bias::Right, &()); + let transform_output_start = cursor.start().0 .0; + let transform_input_start = cursor.start().1 .0; + + let mut input_start = transform_input_start; + let mut input_end = transform_input_start; + if let Some(transform) = cursor.item() { + if transform.block.is_none() { + input_start += rows.start - transform_output_start; + input_end += cmp::min( + rows.end - transform_output_start, + transform.summary.input_rows, + ); + } + } + BlockChunks { input_chunks: self.wrap_snapshot.chunks( input_start..input_end, @@ -964,7 +1177,10 @@ impl BlockSnapshot { let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); cursor.seek(&start_row, Bias::Right, &()); let (output_start, input_start) = cursor.start(); - let overshoot = if cursor.item().map_or(false, |t| t.is_isomorphic()) { + let overshoot = if cursor + .item() + .map_or(false, |transform| transform.block.is_none()) + { start_row.0 - output_start.0 } else { 0 @@ -1049,13 +1265,12 @@ impl BlockSnapshot { } pub fn max_point(&self) -> BlockPoint { - let row = self.transforms.summary().output_rows - 1; + let row = self.transforms.summary().output_rows.saturating_sub(1); BlockPoint::new(row, self.line_len(BlockRow(row))) } pub fn longest_row(&self) -> u32 { - let input_row = self.wrap_snapshot.longest_row(); - self.to_block_point(WrapPoint::new(input_row, 0)).row + self.transforms.summary().longest_row } pub(super) fn line_len(&self, row: BlockRow) -> u32 { @@ -1069,6 +1284,8 @@ impl BlockSnapshot { } else { self.wrap_snapshot.line_len(input_start.0 + overshoot) } + } else if row.0 == 0 { + 0 } else { panic!("row out of range"); } @@ -1091,26 +1308,40 @@ impl BlockSnapshot { loop { if let Some(transform) = cursor.item() { - if transform.is_isomorphic() { - let (output_start_row, input_start_row) = cursor.start(); - let (output_end_row, input_end_row) = cursor.end(&()); - let output_start = Point::new(output_start_row.0, 0); - let input_start = Point::new(input_start_row.0, 0); - let input_end = Point::new(input_end_row.0, 0); - let input_point = if point.row >= output_end_row.0 { - let line_len = self.wrap_snapshot.line_len(input_end_row.0 - 1); - self.wrap_snapshot - .clip_point(WrapPoint::new(input_end_row.0 - 1, line_len), bias) - } else { - let output_overshoot = point.0.saturating_sub(output_start); - self.wrap_snapshot - .clip_point(WrapPoint(input_start + output_overshoot), bias) - }; - - if (input_start..input_end).contains(&input_point.0) { - let input_overshoot = input_point.0.saturating_sub(input_start); - return BlockPoint(output_start + input_overshoot); + let (output_start_row, input_start_row) = cursor.start(); + let (output_end_row, input_end_row) = cursor.end(&()); + let output_start = Point::new(output_start_row.0, 0); + let output_end = Point::new(output_end_row.0, 0); + let input_start = Point::new(input_start_row.0, 0); + let input_end = Point::new(input_end_row.0, 0); + + match transform.block.as_ref() { + Some(Block::Custom(block)) + if matches!(block.placement, BlockPlacement::Replace(_)) => + { + if bias == Bias::Left { + return BlockPoint(output_start); + } else { + return BlockPoint(Point::new(output_end.row - 1, 0)); + } } + None => { + let input_point = if point.row >= output_end_row.0 { + let line_len = self.wrap_snapshot.line_len(input_end_row.0 - 1); + self.wrap_snapshot + .clip_point(WrapPoint::new(input_end_row.0 - 1, line_len), bias) + } else { + let output_overshoot = point.0.saturating_sub(output_start); + self.wrap_snapshot + .clip_point(WrapPoint(input_start + output_overshoot), bias) + }; + + if (input_start..input_end).contains(&input_point.0) { + let input_overshoot = input_point.0.saturating_sub(input_start); + return BlockPoint(output_start + input_overshoot); + } + } + _ => {} } if search_left { @@ -1132,27 +1363,40 @@ impl BlockSnapshot { let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&()); cursor.seek(&WrapRow(wrap_point.row()), Bias::Right, &()); if let Some(transform) = cursor.item() { - debug_assert!(transform.is_isomorphic()); + if transform.block.is_some() { + let wrap_start = WrapPoint::new(cursor.start().0 .0, 0); + if wrap_start == wrap_point { + BlockPoint::new(cursor.start().1 .0, 0) + } else { + BlockPoint::new(cursor.end(&()).1 .0 - 1, 0) + } + } else { + let (input_start_row, output_start_row) = cursor.start(); + let input_start = Point::new(input_start_row.0, 0); + let output_start = Point::new(output_start_row.0, 0); + let input_overshoot = wrap_point.0 - input_start; + BlockPoint(output_start + input_overshoot) + } } else { - return self.max_point(); + self.max_point() } - - let (input_start_row, output_start_row) = cursor.start(); - let input_start = Point::new(input_start_row.0, 0); - let output_start = Point::new(output_start_row.0, 0); - let input_overshoot = wrap_point.0 - input_start; - BlockPoint(output_start + input_overshoot) } pub fn to_wrap_point(&self, block_point: BlockPoint) -> WrapPoint { let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&()); cursor.seek(&BlockRow(block_point.row), Bias::Right, &()); if let Some(transform) = cursor.item() { - match transform.block.as_ref().map(|b| b.disposition()) { - Some(BlockDisposition::Above) => WrapPoint::new(cursor.start().1 .0, 0), - Some(BlockDisposition::Below) => { - let wrap_row = cursor.start().1 .0 - 1; - WrapPoint::new(wrap_row, self.wrap_snapshot.line_len(wrap_row)) + match transform.block.as_ref() { + Some(block) => { + if block.place_below() { + let wrap_row = cursor.start().1 .0 - 1; + WrapPoint::new(wrap_row, self.wrap_snapshot.line_len(wrap_row)) + } else if block.place_above() || block_point.row == cursor.start().0 .0 { + WrapPoint::new(cursor.start().1 .0, 0) + } else { + let wrap_row = cursor.end(&()).1 .0 - 1; + WrapPoint::new(wrap_row, self.wrap_snapshot.line_len(wrap_row)) + } } None => { let overshoot = block_point.row - cursor.start().0 .0; @@ -1166,33 +1410,8 @@ impl BlockSnapshot { } } -impl Transform { - fn isomorphic(rows: u32) -> Self { - Self { - summary: TransformSummary { - input_rows: rows, - output_rows: rows, - }, - block: None, - } - } - - fn block(block: Block) -> Self { - Self { - summary: TransformSummary { - input_rows: 0, - output_rows: block.height(), - }, - block: Some(block), - } - } - - fn is_isomorphic(&self) -> bool { - self.block.is_none() - } -} - impl<'a> BlockChunks<'a> { + /// Go to the next transform fn advance(&mut self) { self.transforms.next(&()); while let Some(transform) = self.transforms.item() { @@ -1206,6 +1425,23 @@ impl<'a> BlockChunks<'a> { break; } } + + if self + .transforms + .item() + .map_or(false, |transform| transform.block.is_none()) + { + let start_input_row = self.transforms.start().1 .0; + let start_output_row = self.transforms.start().0 .0; + if start_output_row < self.max_output_row { + let end_input_row = cmp::min( + self.transforms.end(&()).1 .0, + start_input_row + (self.max_output_row - start_output_row), + ); + self.input_chunks.seek(start_input_row..end_input_row); + } + self.input_chunk = Chunk::default(); + } } } @@ -1241,16 +1477,17 @@ impl<'a> Iterator for BlockChunks<'a> { if let Some(input_chunk) = self.input_chunks.next() { self.input_chunk = input_chunk; } else { - self.output_row += 1; if self.output_row < self.max_output_row { + self.output_row += 1; self.advance(); - return Some(Chunk { - text: "\n", - ..Default::default() - }); - } else { - return None; + if self.transforms.item().is_some() { + return Some(Chunk { + text: "\n", + ..Default::default() + }); + } } + return None; } } @@ -1258,6 +1495,7 @@ impl<'a> Iterator for BlockChunks<'a> { let (prefix_rows, prefix_bytes) = offset_for_row(self.input_chunk.text, transform_end - self.output_row); self.output_row += prefix_rows; + let (mut prefix, suffix) = self.input_chunk.text.split_at(prefix_bytes); self.input_chunk.text = suffix; if self.output_row == transform_end { @@ -1291,17 +1529,20 @@ impl<'a> Iterator for BlockBufferRows<'a> { if self.output_row.0 >= self.transforms.end(&()).0 .0 { self.transforms.next(&()); - } + while let Some(transform) = self.transforms.item() { + if transform + .block + .as_ref() + .map_or(false, |block| block.height() == 0) + { + self.transforms.next(&()); + } else { + break; + } + } - while let Some(transform) = self.transforms.item() { - if transform - .block - .as_ref() - .map_or(false, |block| block.height() == 0) - { - self.transforms.next(&()); - } else { - break; + if self.transforms.item()?.block.is_none() { + self.input_buffer_rows.seek(self.transforms.start().1 .0); } } @@ -1330,6 +1571,10 @@ impl sum_tree::Summary for TransformSummary { } fn add_summary(&mut self, summary: &Self, _: &()) { + if summary.longest_row_chars > self.longest_row_chars { + self.longest_row = self.output_rows + summary.longest_row; + self.longest_row_chars = summary.longest_row_chars; + } self.input_rows += summary.input_rows; self.output_rows += summary.output_rows; } @@ -1355,12 +1600,6 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for BlockRow { } } -impl BlockDisposition { - fn is_below(&self) -> bool { - matches!(self, BlockDisposition::Below) - } -} - impl<'a> Deref for BlockContext<'a, '_> { type Target = WindowContext<'a>; @@ -1380,8 +1619,12 @@ impl CustomBlock { self.render.lock()(cx) } - pub fn position(&self) -> &Anchor { - &self.position + pub fn start(&self) -> Anchor { + *self.placement.start() + } + + pub fn end(&self) -> Anchor { + *self.placement.end() } pub fn style(&self) -> BlockStyle { @@ -1393,9 +1636,11 @@ impl Debug for CustomBlock { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Block") .field("id", &self.id) - .field("position", &self.position) - .field("disposition", &self.disposition) - .finish() + .field("placement", &self.placement) + .field("height", &self.height) + .field("style", &self.style) + .field("priority", &self.priority) + .finish_non_exhaustive() } } @@ -1465,25 +1710,22 @@ mod tests { let block_ids = writer.insert(vec![ BlockProperties { style: BlockStyle::Fixed, - position: buffer_snapshot.anchor_after(Point::new(1, 0)), + placement: BlockPlacement::Above(buffer_snapshot.anchor_after(Point::new(1, 0))), height: 1, - disposition: BlockDisposition::Above, render: Box::new(|_| div().into_any()), priority: 0, }, BlockProperties { style: BlockStyle::Fixed, - position: buffer_snapshot.anchor_after(Point::new(1, 2)), + placement: BlockPlacement::Above(buffer_snapshot.anchor_after(Point::new(1, 2))), height: 2, - disposition: BlockDisposition::Above, render: Box::new(|_| div().into_any()), priority: 0, }, BlockProperties { style: BlockStyle::Fixed, - position: buffer_snapshot.anchor_after(Point::new(3, 3)), + placement: BlockPlacement::Below(buffer_snapshot.anchor_after(Point::new(3, 3))), height: 3, - disposition: BlockDisposition::Below, render: Box::new(|_| div().into_any()), priority: 0, }, @@ -1720,25 +1962,22 @@ mod tests { let block_ids = writer.insert(vec![ BlockProperties { style: BlockStyle::Fixed, - position: buffer_snapshot.anchor_after(Point::new(1, 0)), + placement: BlockPlacement::Above(buffer_snapshot.anchor_after(Point::new(1, 0))), height: 1, - disposition: BlockDisposition::Above, render: Box::new(|_| div().into_any()), priority: 0, }, BlockProperties { style: BlockStyle::Fixed, - position: buffer_snapshot.anchor_after(Point::new(1, 2)), + placement: BlockPlacement::Above(buffer_snapshot.anchor_after(Point::new(1, 2))), height: 2, - disposition: BlockDisposition::Above, render: Box::new(|_| div().into_any()), priority: 0, }, BlockProperties { style: BlockStyle::Fixed, - position: buffer_snapshot.anchor_after(Point::new(3, 3)), + placement: BlockPlacement::Below(buffer_snapshot.anchor_after(Point::new(3, 3))), height: 3, - disposition: BlockDisposition::Below, render: Box::new(|_| div().into_any()), priority: 0, }, @@ -1832,16 +2071,14 @@ mod tests { writer.insert(vec![ BlockProperties { style: BlockStyle::Fixed, - position: buffer_snapshot.anchor_after(Point::new(1, 12)), - disposition: BlockDisposition::Above, + placement: BlockPlacement::Above(buffer_snapshot.anchor_after(Point::new(1, 12))), render: Box::new(|_| div().into_any()), height: 1, priority: 0, }, BlockProperties { style: BlockStyle::Fixed, - position: buffer_snapshot.anchor_after(Point::new(1, 1)), - disposition: BlockDisposition::Below, + placement: BlockPlacement::Below(buffer_snapshot.anchor_after(Point::new(1, 1))), render: Box::new(|_| div().into_any()), height: 1, priority: 0, @@ -1857,6 +2094,127 @@ mod tests { ); } + #[gpui::test] + fn test_replace_lines(cx: &mut gpui::TestAppContext) { + cx.update(init_test); + + let text = "line1\nline2\nline3\nline4\nline5"; + + let buffer = cx.update(|cx| MultiBuffer::build_simple(text, cx)); + let buffer_subscription = buffer.update(cx, |buffer, _cx| buffer.subscribe()); + let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx)); + let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); + let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot); + let tab_size = 1.try_into().unwrap(); + let (mut tab_map, tab_snapshot) = CharMap::new(fold_snapshot, tab_size); + let (wrap_map, wraps_snapshot) = + cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx)); + let mut block_map = BlockMap::new(wraps_snapshot.clone(), false, 1, 1, 0); + + let mut writer = block_map.write(wraps_snapshot.clone(), Default::default()); + writer.insert(vec![BlockProperties { + style: BlockStyle::Fixed, + placement: BlockPlacement::Replace( + buffer_snapshot.anchor_after(Point::new(1, 3)) + ..buffer_snapshot.anchor_before(Point::new(3, 1)), + ), + height: 4, + render: Box::new(|_| div().into_any()), + priority: 0, + }]); + + let blocks_snapshot = block_map.read(wraps_snapshot, Default::default()); + assert_eq!(blocks_snapshot.text(), "line1\n\n\n\n\nline5"); + + let buffer_snapshot = buffer.update(cx, |buffer, cx| { + buffer.edit([(Point::new(2, 0)..Point::new(3, 0), "")], None, cx); + buffer.snapshot(cx) + }); + let (inlay_snapshot, inlay_edits) = inlay_map.sync( + buffer_snapshot.clone(), + buffer_subscription.consume().into_inner(), + ); + let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits); + let (tab_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size); + let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { + wrap_map.sync(tab_snapshot, tab_edits, cx) + }); + let blocks_snapshot = block_map.read(wraps_snapshot.clone(), wrap_edits); + assert_eq!(blocks_snapshot.text(), "line1\n\n\n\n\nline5"); + + let buffer_snapshot = buffer.update(cx, |buffer, cx| { + buffer.edit( + [( + Point::new(1, 5)..Point::new(1, 5), + "\nline 6\nline7\nline 8\nline 9", + )], + None, + cx, + ); + buffer.snapshot(cx) + }); + let (inlay_snapshot, inlay_edits) = inlay_map.sync( + buffer_snapshot.clone(), + buffer_subscription.consume().into_inner(), + ); + let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits); + let (tab_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size); + let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { + wrap_map.sync(tab_snapshot, tab_edits, cx) + }); + let blocks_snapshot = block_map.read(wraps_snapshot.clone(), wrap_edits); + assert_eq!(blocks_snapshot.text(), "line1\n\n\n\n\nline5"); + + // Ensure blocks inserted above the start or below the end of the replaced region are shown. + let mut writer = block_map.write(wraps_snapshot.clone(), Default::default()); + writer.insert(vec![ + BlockProperties { + style: BlockStyle::Fixed, + placement: BlockPlacement::Above(buffer_snapshot.anchor_after(Point::new(1, 3))), + height: 1, + render: Box::new(|_| div().into_any()), + priority: 0, + }, + BlockProperties { + style: BlockStyle::Fixed, + placement: BlockPlacement::Below(buffer_snapshot.anchor_after(Point::new(6, 2))), + height: 1, + render: Box::new(|_| div().into_any()), + priority: 0, + }, + ]); + let blocks_snapshot = block_map.read(wraps_snapshot.clone(), Default::default()); + assert_eq!(blocks_snapshot.text(), "line1\n\n\n\n\n\n\nline5"); + + // Ensure blocks inserted *inside* replaced region are hidden. + let mut writer = block_map.write(wraps_snapshot.clone(), Default::default()); + writer.insert(vec![ + BlockProperties { + style: BlockStyle::Fixed, + placement: BlockPlacement::Below(buffer_snapshot.anchor_after(Point::new(1, 3))), + height: 1, + render: Box::new(|_| div().into_any()), + priority: 0, + }, + BlockProperties { + style: BlockStyle::Fixed, + placement: BlockPlacement::Above(buffer_snapshot.anchor_after(Point::new(2, 1))), + height: 1, + render: Box::new(|_| div().into_any()), + priority: 0, + }, + BlockProperties { + style: BlockStyle::Fixed, + placement: BlockPlacement::Above(buffer_snapshot.anchor_after(Point::new(6, 1))), + height: 1, + render: Box::new(|_| div().into_any()), + priority: 0, + }, + ]); + let blocks_snapshot = block_map.read(wraps_snapshot, Default::default()); + assert_eq!(blocks_snapshot.text(), "line1\n\n\n\n\n\n\nline5"); + } + #[gpui::test(iterations = 100)] fn test_random_blocks(cx: &mut gpui::TestAppContext, mut rng: StdRng) { cx.update(init_test); @@ -1879,14 +2237,21 @@ mod tests { log::info!("Wrap width: {:?}", wrap_width); log::info!("Excerpt Header Height: {:?}", excerpt_header_height); log::info!("Excerpt Footer Height: {:?}", excerpt_footer_height); - - let buffer = if rng.gen() { + let is_singleton = rng.gen(); + let buffer = if is_singleton { let len = rng.gen_range(0..10); let text = RandomCharIter::new(&mut rng).take(len).collect::(); - log::info!("initial buffer text: {:?}", text); + log::info!("initial singleton buffer text: {:?}", text); cx.update(|cx| MultiBuffer::build_simple(&text, cx)) } else { - cx.update(|cx| MultiBuffer::build_random(&mut rng, cx)) + cx.update(|cx| { + let multibuffer = MultiBuffer::build_random(&mut rng, cx); + log::info!( + "initial multi-buffer text: {:?}", + multibuffer.read(cx).read(cx).text() + ); + multibuffer + }) }; let mut buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx)); @@ -1902,7 +2267,6 @@ mod tests { excerpt_header_height, excerpt_footer_height, ); - let mut custom_blocks = Vec::new(); for _ in 0..operations { let mut buffer_edits = Vec::new(); @@ -1921,27 +2285,33 @@ mod tests { let block_properties = (0..block_count) .map(|_| { let buffer = cx.update(|cx| buffer.read(cx).read(cx).clone()); - let position = buffer.anchor_after( - buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Left), - ); - - let disposition = if rng.gen() { - BlockDisposition::Above - } else { - BlockDisposition::Below + let offset = + buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Left); + let mut min_height = 0; + let placement = match rng.gen_range(0..3) { + 0 => { + min_height = 1; + let start = buffer.anchor_after(offset); + let end = buffer.anchor_after(buffer.clip_offset( + rng.gen_range(offset..=buffer.len()), + Bias::Left, + )); + BlockPlacement::Replace(start..end) + } + 1 => BlockPlacement::Above(buffer.anchor_after(offset)), + _ => BlockPlacement::Below(buffer.anchor_after(offset)), }; - let height = rng.gen_range(0..5); + + let height = rng.gen_range(min_height..5); log::info!( - "inserting block {:?} {:?} with height {}", - disposition, - position.to_point(&buffer), + "inserting block {:?} with height {}", + placement.as_ref().map(|p| p.to_point(&buffer)), height ); BlockProperties { style: BlockStyle::Fixed, - position, + placement, height, - disposition, render: Box::new(|_| div().into_any()), priority: 0, } @@ -1957,28 +2327,21 @@ mod tests { wrap_map.sync(char_snapshot, tab_edits, cx) }); let mut block_map = block_map.write(wraps_snapshot, wrap_edits); - let block_ids = - block_map.insert(block_properties.iter().map(|props| BlockProperties { - position: props.position, - height: props.height, - style: props.style, - render: Box::new(|_| div().into_any()), - disposition: props.disposition, - priority: 0, - })); - for (block_id, props) in block_ids.into_iter().zip(block_properties) { - custom_blocks.push((block_id, props)); - } + block_map.insert(block_properties.iter().map(|props| BlockProperties { + placement: props.placement.clone(), + height: props.height, + style: props.style, + render: Box::new(|_| div().into_any()), + priority: 0, + })); } - 40..=59 if !custom_blocks.is_empty() => { - let block_count = rng.gen_range(1..=4.min(custom_blocks.len())); - let block_ids_to_remove = (0..block_count) - .map(|_| { - custom_blocks - .remove(rng.gen_range(0..custom_blocks.len())) - .0 - }) - .collect(); + 40..=59 if !block_map.custom_blocks.is_empty() => { + let block_count = rng.gen_range(1..=4.min(block_map.custom_blocks.len())); + let block_ids_to_remove = block_map + .custom_blocks + .choose_multiple(&mut rng, block_count) + .map(|block| block.id) + .collect::>(); let (inlay_snapshot, inlay_edits) = inlay_map.sync(buffer_snapshot.clone(), vec![]); @@ -2015,47 +2378,39 @@ mod tests { blocks_snapshot.transforms.summary().input_rows, wraps_snapshot.max_point().row() + 1 ); + log::info!("wrapped text: {:?}", wraps_snapshot.text()); log::info!("blocks text: {:?}", blocks_snapshot.text()); let mut expected_blocks = Vec::new(); - expected_blocks.extend(custom_blocks.iter().map(|(id, block)| { - let mut position = block.position.to_point(&buffer_snapshot); - match block.disposition { - BlockDisposition::Above => { - position.column = 0; - } - BlockDisposition::Below => { - position.column = buffer_snapshot.line_len(MultiBufferRow(position.row)); - } - }; - let row = wraps_snapshot.make_wrap_point(position, Bias::Left).row(); - ( - row, - ExpectedBlock::Custom { - disposition: block.disposition, - id: *id, - height: block.height, - priority: block.priority, - }, - ) + expected_blocks.extend(block_map.custom_blocks.iter().filter_map(|block| { + Some(( + block.placement.to_wrap_row(&wraps_snapshot)?, + Block::Custom(block.clone()), + )) })); // Note that this needs to be synced with the related section in BlockMap::sync - expected_blocks.extend( - BlockMap::header_and_footer_blocks( - true, - excerpt_footer_height, - buffer_start_header_height, - excerpt_header_height, - &buffer_snapshot, - 0.., - &wraps_snapshot, - ) - .map(|(row, block)| (row, block.into())), - ); + expected_blocks.extend(BlockMap::header_and_footer_blocks( + true, + excerpt_footer_height, + buffer_start_header_height, + excerpt_header_height, + &buffer_snapshot, + 0.., + &wraps_snapshot, + )); BlockMap::sort_blocks(&mut expected_blocks); + for (placement, block) in &expected_blocks { + log::info!( + "Block {:?} placement: {:?} Height: {:?}", + block.id(), + placement, + block.height() + ); + } + let mut sorted_blocks_iter = expected_blocks.into_iter().peekable(); let input_buffer_rows = buffer_snapshot @@ -2065,49 +2420,97 @@ mod tests { let mut expected_text = String::new(); let mut expected_block_positions = Vec::new(); let input_text = wraps_snapshot.text(); - for (row, input_line) in input_text.split('\n').enumerate() { - let row = row as u32; - if row > 0 { - expected_text.push('\n'); - } - let buffer_row = input_buffer_rows[wraps_snapshot - .to_point(WrapPoint::new(row, 0), Bias::Left) - .row as usize]; - - while let Some((block_row, block)) = sorted_blocks_iter.peek() { - if *block_row == row && block.disposition() == BlockDisposition::Above { + // Loop over the input lines, creating (N - 1) empty lines for + // blocks of height N. + // + // It's important to note that output *starts* as one empty line, + // so we special case row 0 to assume a leading '\n'. + // + // Linehood is the birthright of strings. + let mut input_text_lines = input_text.split('\n').enumerate().peekable(); + let mut block_row = 0; + while let Some((wrap_row, input_line)) = input_text_lines.next() { + let wrap_row = wrap_row as u32; + + // Create empty lines for the above block + while let Some((placement, block)) = sorted_blocks_iter.peek() { + if placement.start().0 == wrap_row && block.place_above() { let (_, block) = sorted_blocks_iter.next().unwrap(); - let height = block.height() as usize; - expected_block_positions - .push((expected_text.matches('\n').count() as u32, block)); - let text = "\n".repeat(height); - expected_text.push_str(&text); - for _ in 0..height { - expected_buffer_rows.push(None); + expected_block_positions.push((block_row, block.id())); + if block.height() > 0 { + let text = "\n".repeat((block.height() - 1) as usize); + if block_row > 0 { + expected_text.push('\n') + } + expected_text.push_str(&text); + for _ in 0..block.height() { + expected_buffer_rows.push(None); + } + block_row += block.height(); } } else { break; } } - let soft_wrapped = wraps_snapshot - .to_char_point(WrapPoint::new(row, 0)) - .column() - > 0; - expected_buffer_rows.push(if soft_wrapped { None } else { buffer_row }); - expected_text.push_str(input_line); + // Skip lines within replace blocks, then create empty lines for the replace block's height + let mut is_in_replace_block = false; + if let Some((BlockPlacement::Replace(replace_range), block)) = + sorted_blocks_iter.peek() + { + if wrap_row >= replace_range.start.0 { + is_in_replace_block = true; + if wrap_row == replace_range.end.0 { + expected_block_positions.push((block_row, block.id())); + if block.height() > 0 { + let text = "\n".repeat((block.height() - 1) as usize); + if block_row > 0 { + expected_text.push('\n'); + } + expected_text.push_str(&text); + for _ in 0..block.height() { + expected_buffer_rows.push(None); + } + block_row += block.height(); + } + + sorted_blocks_iter.next(); + } + } + } + + if !is_in_replace_block { + let buffer_row = input_buffer_rows[wraps_snapshot + .to_point(WrapPoint::new(wrap_row, 0), Bias::Left) + .row as usize]; + + let soft_wrapped = wraps_snapshot + .to_char_point(WrapPoint::new(wrap_row, 0)) + .column() + > 0; + expected_buffer_rows.push(if soft_wrapped { None } else { buffer_row }); + if block_row > 0 { + expected_text.push('\n'); + } + expected_text.push_str(input_line); + block_row += 1; + } - while let Some((block_row, block)) = sorted_blocks_iter.peek() { - if *block_row == row && block.disposition() == BlockDisposition::Below { + while let Some((placement, block)) = sorted_blocks_iter.peek() { + if placement.end().0 == wrap_row && block.place_below() { let (_, block) = sorted_blocks_iter.next().unwrap(); - let height = block.height() as usize; - expected_block_positions - .push((expected_text.matches('\n').count() as u32 + 1, block)); - let text = "\n".repeat(height); - expected_text.push_str(&text); - for _ in 0..height { - expected_buffer_rows.push(None); + expected_block_positions.push((block_row, block.id())); + if block.height() > 0 { + let text = "\n".repeat((block.height() - 1) as usize); + if block_row > 0 { + expected_text.push('\n') + } + expected_text.push_str(&text); + for _ in 0..block.height() { + expected_buffer_rows.push(None); + } + block_row += block.height(); } } else { break; @@ -2117,11 +2520,24 @@ mod tests { let expected_lines = expected_text.split('\n').collect::>(); let expected_row_count = expected_lines.len(); + + assert_eq!( + blocks_snapshot.max_point().row + 1, + expected_row_count as u32 + ); + + log::info!("expected text: {:?}", expected_text); + for start_row in 0..expected_row_count { - let expected_text = expected_lines[start_row..].join("\n"); + let end_row = rng.gen_range(start_row + 1..=expected_row_count); + let mut expected_text = expected_lines[start_row..end_row].join("\n"); + if end_row < expected_row_count { + expected_text.push('\n'); + } + let actual_text = blocks_snapshot .chunks( - start_row as u32..blocks_snapshot.max_point().row + 1, + start_row as u32..end_row as u32, false, false, Highlights::default(), @@ -2129,9 +2545,10 @@ mod tests { .map(|chunk| chunk.text) .collect::(); assert_eq!( - actual_text, expected_text, - "incorrect text starting from row {}", - start_row + actual_text, + expected_text, + "incorrect text starting row row range {:?}", + start_row..end_row ); assert_eq!( blocks_snapshot @@ -2145,7 +2562,7 @@ mod tests { assert_eq!( blocks_snapshot .blocks_in_range(0..(expected_row_count as u32)) - .map(|(row, block)| (row, block.clone().into())) + .map(|(row, block)| (row, block.id())) .collect::>(), expected_block_positions, "invalid blocks_in_range({:?})", @@ -2162,8 +2579,8 @@ mod tests { ); } - for (block_row, block) in expected_block_positions { - if let BlockType::Custom(block_id) = block.block_type() { + for (block_row, block_id) in expected_block_positions { + if let BlockId::Custom(block_id) = block_id { assert_eq!( blocks_snapshot.row_for_block(block_id), Some(BlockRow(block_row)) @@ -2204,10 +2621,12 @@ mod tests { longest_line_len, ); + // Ensure that conversion between block points and wrap points is stable. for row in 0..=blocks_snapshot.wrap_snapshot.max_point().row() { - let wrap_point = WrapPoint::new(row, 0); - let block_point = blocks_snapshot.to_block_point(wrap_point); - assert_eq!(blocks_snapshot.to_wrap_point(block_point), wrap_point); + let original_wrap_point = WrapPoint::new(row, 0); + let block_point = blocks_snapshot.to_block_point(original_wrap_point); + let wrap_point = blocks_snapshot.to_wrap_point(block_point); + assert_eq!(blocks_snapshot.to_block_point(wrap_point), block_point); } let mut block_point = BlockPoint::new(0, 0); @@ -2216,7 +2635,9 @@ mod tests { let left_buffer_point = blocks_snapshot.to_point(left_point, Bias::Left); assert_eq!( blocks_snapshot.to_block_point(blocks_snapshot.to_wrap_point(left_point)), - left_point + left_point, + "wrap point: {:?}", + blocks_snapshot.to_wrap_point(left_point) ); assert_eq!( left_buffer_point, @@ -2229,7 +2650,9 @@ mod tests { let right_buffer_point = blocks_snapshot.to_point(right_point, Bias::Right); assert_eq!( blocks_snapshot.to_block_point(blocks_snapshot.to_wrap_point(right_point)), - right_point + right_point, + "wrap point: {:?}", + blocks_snapshot.to_wrap_point(right_point) ); assert_eq!( right_buffer_point, @@ -2245,86 +2668,6 @@ mod tests { } } } - - #[derive(Debug, Eq, PartialEq)] - enum ExpectedBlock { - ExcerptBoundary { - height: u32, - starts_new_buffer: bool, - is_last: bool, - }, - Custom { - disposition: BlockDisposition, - id: CustomBlockId, - height: u32, - priority: usize, - }, - } - - impl BlockLike for ExpectedBlock { - fn block_type(&self) -> BlockType { - match self { - ExpectedBlock::Custom { id, .. } => BlockType::Custom(*id), - ExpectedBlock::ExcerptBoundary { .. } => BlockType::ExcerptBoundary, - } - } - - fn disposition(&self) -> BlockDisposition { - self.disposition() - } - - fn priority(&self) -> usize { - match self { - ExpectedBlock::Custom { priority, .. } => *priority, - ExpectedBlock::ExcerptBoundary { .. } => usize::MAX, - } - } - } - - impl ExpectedBlock { - fn height(&self) -> u32 { - match self { - ExpectedBlock::ExcerptBoundary { height, .. } => *height, - ExpectedBlock::Custom { height, .. } => *height, - } - } - - fn disposition(&self) -> BlockDisposition { - match self { - ExpectedBlock::ExcerptBoundary { is_last, .. } => { - if *is_last { - BlockDisposition::Below - } else { - BlockDisposition::Above - } - } - ExpectedBlock::Custom { disposition, .. } => *disposition, - } - } - } - - impl From for ExpectedBlock { - fn from(block: Block) -> Self { - match block { - Block::Custom(block) => ExpectedBlock::Custom { - id: block.id, - disposition: block.disposition, - height: block.height, - priority: block.priority, - }, - Block::ExcerptBoundary { - height, - starts_new_buffer, - next_excerpt, - .. - } => ExpectedBlock::ExcerptBoundary { - height, - starts_new_buffer, - is_last: next_excerpt.is_none(), - }, - } - } - } } fn init_test(cx: &mut gpui::AppContext) { diff --git a/crates/editor/src/display_map/char_map.rs b/crates/editor/src/display_map/char_map.rs index 443f8199a6157cfce262e2c4cb0a086f60349d62..8c467b180327dc94f13cb34d95d4c571d3575b5e 100644 --- a/crates/editor/src/display_map/char_map.rs +++ b/crates/editor/src/display_map/char_map.rs @@ -252,6 +252,7 @@ impl CharSnapshot { }; TabChunks { + snapshot: self, fold_chunks: self.fold_snapshot.chunks( input_start..input_end, language_aware, @@ -492,6 +493,7 @@ impl<'a> std::ops::AddAssign<&'a Self> for TextSummary { const SPACES: &str = " "; pub struct TabChunks<'a> { + snapshot: &'a CharSnapshot, fold_chunks: FoldChunks<'a>, chunk: Chunk<'a>, column: u32, @@ -503,6 +505,37 @@ pub struct TabChunks<'a> { inside_leading_tab: bool, } +impl<'a> TabChunks<'a> { + pub(crate) fn seek(&mut self, range: Range) { + let (input_start, expanded_char_column, to_next_stop) = + self.snapshot.to_fold_point(range.start, Bias::Left); + let input_column = input_start.column(); + let input_start = input_start.to_offset(&self.snapshot.fold_snapshot); + let input_end = self + .snapshot + .to_fold_point(range.end, Bias::Right) + .0 + .to_offset(&self.snapshot.fold_snapshot); + let to_next_stop = if range.start.0 + Point::new(0, to_next_stop) > range.end.0 { + range.end.column() - range.start.column() + } else { + to_next_stop + }; + + self.fold_chunks.seek(input_start..input_end); + self.input_column = input_column; + self.column = expanded_char_column; + self.output_position = range.start.0; + self.max_output_position = range.end.0; + self.chunk = Chunk { + text: &SPACES[0..(to_next_stop as usize)], + is_tab: true, + ..Default::default() + }; + self.inside_leading_tab = to_next_stop > 0; + } +} + impl<'a> Iterator for TabChunks<'a> { type Item = Chunk<'a>; diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 5eb26ff9693883c0298c8bdd99def46e657705fb..2cfe4b41f5b8d4c2848f3dcc04810bf0813e35cd 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -1100,6 +1100,17 @@ pub struct FoldBufferRows<'a> { fold_point: FoldPoint, } +impl<'a> FoldBufferRows<'a> { + pub(crate) fn seek(&mut self, row: u32) { + let fold_point = FoldPoint::new(row, 0); + self.cursor.seek(&fold_point, Bias::Left, &()); + let overshoot = fold_point.0 - self.cursor.start().0 .0; + let inlay_point = InlayPoint(self.cursor.start().1 .0 + overshoot); + self.input_buffer_rows.seek(inlay_point.row()); + self.fold_point = fold_point; + } +} + impl<'a> Iterator for FoldBufferRows<'a> { type Item = Option; @@ -1135,6 +1146,38 @@ pub struct FoldChunks<'a> { max_output_offset: FoldOffset, } +impl<'a> FoldChunks<'a> { + pub(crate) fn seek(&mut self, range: Range) { + self.transform_cursor.seek(&range.start, Bias::Right, &()); + + let inlay_start = { + let overshoot = range.start.0 - self.transform_cursor.start().0 .0; + self.transform_cursor.start().1 + InlayOffset(overshoot) + }; + + let transform_end = self.transform_cursor.end(&()); + + let inlay_end = if self + .transform_cursor + .item() + .map_or(true, |transform| transform.is_fold()) + { + inlay_start + } else if range.end < transform_end.0 { + let overshoot = range.end.0 - self.transform_cursor.start().0 .0; + self.transform_cursor.start().1 + InlayOffset(overshoot) + } else { + transform_end.1 + }; + + self.inlay_chunks.seek(inlay_start..inlay_end); + self.inlay_chunk = None; + self.inlay_offset = inlay_start; + self.output_offset = range.start; + self.max_output_offset = range.end; + } +} + impl<'a> Iterator for FoldChunks<'a> { type Item = Chunk<'a>; diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index f418f45fec7fab5cfc27218557f40c1abc812e5f..15f6595f193d1d4aa189015a8fe742cbf95b2750 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -56,6 +56,7 @@ pub struct WrapChunks<'a> { output_position: WrapPoint, max_output_row: u32, transforms: Cursor<'a, Transform, (WrapPoint, CharPoint)>, + snapshot: &'a WrapSnapshot, } #[derive(Clone)] @@ -68,6 +69,21 @@ pub struct WrapBufferRows<'a> { transforms: Cursor<'a, Transform, (WrapPoint, CharPoint)>, } +impl<'a> WrapBufferRows<'a> { + pub(crate) fn seek(&mut self, start_row: u32) { + self.transforms + .seek(&WrapPoint::new(start_row, 0), Bias::Left, &()); + let mut input_row = self.transforms.start().1.row(); + if self.transforms.item().map_or(false, |t| t.is_isomorphic()) { + input_row += start_row - self.transforms.start().0.row(); + } + self.soft_wrapped = self.transforms.item().map_or(false, |t| !t.is_isomorphic()); + self.input_buffer_rows.seek(input_row); + self.input_buffer_row = self.input_buffer_rows.next().unwrap(); + self.output_row = start_row; + } +} + impl WrapMap { pub fn new( char_snapshot: CharSnapshot, @@ -602,6 +618,7 @@ impl WrapSnapshot { output_position: output_start, max_output_row: rows.end, transforms, + snapshot: self, } } @@ -629,6 +646,67 @@ impl WrapSnapshot { } } + pub fn text_summary_for_range(&self, rows: Range) -> TextSummary { + let mut summary = TextSummary::default(); + + let start = WrapPoint::new(rows.start, 0); + let end = WrapPoint::new(rows.end, 0); + + let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&()); + cursor.seek(&start, Bias::Right, &()); + if let Some(transform) = cursor.item() { + let start_in_transform = start.0 - cursor.start().0 .0; + let end_in_transform = cmp::min(end, cursor.end(&()).0).0 - cursor.start().0 .0; + if transform.is_isomorphic() { + let char_start = CharPoint(cursor.start().1 .0 + start_in_transform); + let char_end = CharPoint(cursor.start().1 .0 + end_in_transform); + summary += &self + .char_snapshot + .text_summary_for_range(char_start..char_end); + } else { + debug_assert_eq!(start_in_transform.row, end_in_transform.row); + let indent_len = end_in_transform.column - start_in_transform.column; + summary += &TextSummary { + lines: Point::new(0, indent_len), + first_line_chars: indent_len, + last_line_chars: indent_len, + longest_row: 0, + longest_row_chars: indent_len, + }; + } + + cursor.next(&()); + } + + if rows.end > cursor.start().0.row() { + summary += &cursor + .summary::<_, TransformSummary>(&WrapPoint::new(rows.end, 0), Bias::Right, &()) + .output; + + if let Some(transform) = cursor.item() { + let end_in_transform = end.0 - cursor.start().0 .0; + if transform.is_isomorphic() { + let char_start = cursor.start().1; + let char_end = CharPoint(char_start.0 + end_in_transform); + summary += &self + .char_snapshot + .text_summary_for_range(char_start..char_end); + } else { + debug_assert_eq!(end_in_transform, Point::new(1, 0)); + summary += &TextSummary { + lines: Point::new(1, 0), + first_line_chars: 0, + last_line_chars: 0, + longest_row: 0, + longest_row_chars: 0, + }; + } + } + } + + summary + } + pub fn soft_wrap_indent(&self, row: u32) -> Option { let mut cursor = self.transforms.cursor::(&()); cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Right, &()); @@ -745,6 +823,21 @@ impl WrapSnapshot { None } + #[cfg(test)] + pub fn text(&self) -> String { + self.text_chunks(0).collect() + } + + #[cfg(test)] + pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator { + self.chunks( + wrap_row..self.max_point().row() + 1, + false, + Highlights::default(), + ) + .map(|h| h.text) + } + fn check_invariants(&self) { #[cfg(test)] { @@ -791,6 +884,26 @@ impl WrapSnapshot { } } +impl<'a> WrapChunks<'a> { + pub(crate) fn seek(&mut self, rows: Range) { + let output_start = WrapPoint::new(rows.start, 0); + let output_end = WrapPoint::new(rows.end, 0); + self.transforms.seek(&output_start, Bias::Right, &()); + let mut input_start = CharPoint(self.transforms.start().1 .0); + if self.transforms.item().map_or(false, |t| t.is_isomorphic()) { + input_start.0 += output_start.0 - self.transforms.start().0 .0; + } + let input_end = self + .snapshot + .to_char_point(output_end) + .min(self.snapshot.char_snapshot.max_point()); + self.input_chunks.seek(input_start..input_end); + self.input_chunk = Chunk::default(); + self.output_position = output_start; + self.max_output_row = rows.end; + } +} + impl<'a> Iterator for WrapChunks<'a> { type Item = Chunk<'a>; @@ -1336,19 +1449,6 @@ mod tests { } impl WrapSnapshot { - pub fn text(&self) -> String { - self.text_chunks(0).collect() - } - - pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator { - self.chunks( - wrap_row..self.max_point().row() + 1, - false, - Highlights::default(), - ) - .map(|h| h.text) - } - fn verify_chunks(&mut self, rng: &mut impl Rng) { for _ in 0..5 { let mut end_row = rng.gen_range(0..=self.max_point().row()); diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index d3e2134eac48913e97634d1c21f6e1fcd6ebfd51..f3fb5cd3606c85b2b0b21bd6e39ad3a957076560 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -10210,7 +10210,7 @@ impl Editor { let block_id = this.insert_blocks( [BlockProperties { style: BlockStyle::Flex, - position: range.start, + placement: BlockPlacement::Below(range.start), height: 1, render: Box::new({ let rename_editor = rename_editor.clone(); @@ -10246,7 +10246,6 @@ impl Editor { .into_any_element() } }), - disposition: BlockDisposition::Below, priority: 0, }], Some(Autoscroll::fit()), @@ -10531,10 +10530,11 @@ impl Editor { let message_height = diagnostic.message.matches('\n').count() as u32 + 1; BlockProperties { style: BlockStyle::Fixed, - position: buffer.anchor_after(entry.range.start), + placement: BlockPlacement::Below( + buffer.anchor_after(entry.range.start), + ), height: message_height, render: diagnostic_block_renderer(diagnostic, None, true, true), - disposition: BlockDisposition::Below, priority: 0, } }), diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index fdcfaab82fb33b2e9d5165599b339f490cda6b1a..99b5cb663789b28460d5ee0a5aacb25f93f53f40 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -3868,8 +3868,7 @@ fn test_move_line_up_down_with_blocks(cx: &mut TestAppContext) { editor.insert_blocks( [BlockProperties { style: BlockStyle::Fixed, - position: snapshot.anchor_after(Point::new(2, 0)), - disposition: BlockDisposition::Below, + placement: BlockPlacement::Below(snapshot.anchor_after(Point::new(2, 0))), height: 1, render: Box::new(|_| div().into_any()), priority: 0, diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 5a356965a491e909f8788134c6d25a698f42b27c..753b7f246d11dcee30431c9f1ada39d7da662c91 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -2071,7 +2071,7 @@ impl EditorElement { let mut element = match block { Block::Custom(block) => { let align_to = block - .position() + .start() .to_point(&snapshot.buffer_snapshot) .to_display_point(snapshot); let anchor_x = text_x @@ -6294,7 +6294,7 @@ fn compute_auto_height_layout( mod tests { use super::*; use crate::{ - display_map::{BlockDisposition, BlockProperties}, + display_map::{BlockPlacement, BlockProperties}, editor_tests::{init_test, update_test_language_settings}, Editor, MultiBuffer, }; @@ -6550,9 +6550,8 @@ mod tests { editor.insert_blocks( [BlockProperties { style: BlockStyle::Fixed, - disposition: BlockDisposition::Above, + placement: BlockPlacement::Above(Anchor::min()), height: 3, - position: Anchor::min(), render: Box::new(|cx| div().h(3. * cx.line_height()).into_any()), priority: 0, }], diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 1b9408df7e4d04abdade589696a9883fbbb860f0..9f66d27a644e501081ecdc4ff3f42cf52775398e 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -17,7 +17,7 @@ use workspace::Item; use crate::{ editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, ApplyDiffHunk, - BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, DisplayRow, + BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, DisplayRow, DisplaySnapshot, Editor, EditorElement, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, RevertFile, RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff, }; @@ -417,10 +417,9 @@ impl Editor { }; BlockProperties { - position: hunk.multi_buffer_range.start, + placement: BlockPlacement::Above(hunk.multi_buffer_range.start), height: 1, style: BlockStyle::Sticky, - disposition: BlockDisposition::Above, priority: 0, render: Box::new({ let editor = cx.view().clone(); @@ -700,10 +699,9 @@ impl Editor { let hunk = hunk.clone(); let height = editor_height.max(deleted_text_height); BlockProperties { - position: hunk.multi_buffer_range.start, + placement: BlockPlacement::Above(hunk.multi_buffer_range.start), height, style: BlockStyle::Flex, - disposition: BlockDisposition::Above, priority: 0, render: Box::new(move |cx| { let width = EditorElement::diff_hunk_strip_width(cx.line_height()); diff --git a/crates/repl/src/session.rs b/crates/repl/src/session.rs index 7f312023c34aae9d0acf00ed0a0b09f2d0161597..2eba678fdee2c059f710cf9738d2088144bb2f6e 100644 --- a/crates/repl/src/session.rs +++ b/crates/repl/src/session.rs @@ -8,7 +8,7 @@ use client::telemetry::Telemetry; use collections::{HashMap, HashSet}; use editor::{ display_map::{ - BlockContext, BlockDisposition, BlockId, BlockProperties, BlockStyle, CustomBlockId, + BlockContext, BlockId, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, RenderBlock, }, scroll::Autoscroll, @@ -90,12 +90,11 @@ impl EditorBlock { let invalidation_anchor = buffer.read(cx).read(cx).anchor_before(next_row_start); let block = BlockProperties { - position: code_range.end, + placement: BlockPlacement::Below(code_range.end), // Take up at least one height for status, allow the editor to determine the real height based on the content from render height: 1, style: BlockStyle::Sticky, render: Self::create_output_area_renderer(execution_view.clone(), on_close.clone()), - disposition: BlockDisposition::Below, priority: 0, }; From 0173479d18e2526c1f9c8b25ac94ec66b992a2b2 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 25 Oct 2024 12:42:31 +0200 Subject: [PATCH 31/76] ssh remoting: Lock file becomes stale if connection drops & no update if binary is running (#19724) Release Notes: - Changed the update process of the remote server binary to not attempt an update if we can detect that the current binary is used by another process. - Changed the update process of the remote server binary to mark the lock file as stale in case the SSH connection of the process that created the lock file isn't open anymore. --- crates/remote/src/ssh_session.rs | 203 ++++++++++++++++++++++++++++--- 1 file changed, 183 insertions(+), 20 deletions(-) diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index d47e0375ea75f5b359a42f9779f548e195b2d96b..8e0c345f74165c930be5c5b54abbae73aa2b94eb 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -1527,11 +1527,14 @@ impl SshRemoteConnection { cx: &mut AsyncAppContext, ) -> Result<()> { let lock_file = dst_path.with_extension("lock"); - let timestamp = SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap() - .as_secs(); - let lock_content = timestamp.to_string(); + let lock_content = { + let timestamp = SystemTime::now() + .duration_since(UNIX_EPOCH) + .context("failed to get timestamp")? + .as_secs(); + let source_port = self.get_ssh_source_port().await?; + format!("{} {}", source_port, timestamp) + }; let lock_stale_age = Duration::from_secs(10 * 60); let max_wait_time = Duration::from_secs(10 * 60); @@ -1541,6 +1544,7 @@ impl SshRemoteConnection { loop { let lock_acquired = self.create_lock_file(&lock_file, &lock_content).await?; if lock_acquired { + delegate.set_status(Some("Acquired lock file on host"), cx); let result = self .update_server_binary_if_needed(delegate, dst_path, platform, cx) .await; @@ -1551,6 +1555,10 @@ impl SshRemoteConnection { } else { if let Ok(is_stale) = self.is_lock_stale(&lock_file, &lock_stale_age).await { if is_stale { + delegate.set_status( + Some("Detected lock file on host being stale. Removing"), + cx, + ); self.remove_lock_file(&lock_file).await?; continue; } else { @@ -1581,18 +1589,29 @@ impl SshRemoteConnection { } } + async fn get_ssh_source_port(&self) -> Result { + let output = run_cmd( + self.socket + .ssh_command("sh") + .arg("-c") + .arg(r#""echo $SSH_CLIENT | cut -d' ' -f2""#), + ) + .await + .context("failed to get source port from SSH_CLIENT on host")?; + + Ok(output.trim().to_string()) + } + async fn create_lock_file(&self, lock_file: &Path, content: &str) -> Result { let parent_dir = lock_file .parent() .ok_or_else(|| anyhow!("Lock file path has no parent directory"))?; - // Be mindful of the escaping here: we need to make sure that we have quotes - // inside the string, so that `sh -c` gets a quoted string passed to it. let script = format!( - "\"mkdir -p '{0}' && [ ! -f '{1}' ] && echo '{2}' > '{1}' && echo 'created' || echo 'exists'\"", - parent_dir.display(), - lock_file.display(), - content + r#"'mkdir -p "{parent_dir}" && [ ! -f "{lock_file}" ] && echo "{content}" > "{lock_file}" && echo "created" || echo "exists"'"#, + parent_dir = parent_dir.display(), + lock_file = lock_file.display(), + content = content, ); let output = run_cmd(self.socket.ssh_command("sh").arg("-c").arg(&script)) @@ -1602,24 +1621,56 @@ impl SshRemoteConnection { Ok(output.trim() == "created") } - async fn is_lock_stale(&self, lock_file: &Path, max_age: &Duration) -> Result { - let threshold = max_age.as_secs(); + fn generate_stale_check_script(lock_file: &Path, max_age: u64) -> String { + format!( + r#" + if [ ! -f "{lock_file}" ]; then + echo "lock file does not exist" + exit 0 + fi - // Be mindful of the escaping here: we need to make sure that we have quotes - // inside the string, so that `sh -c` gets a quoted string passed to it. + read -r port timestamp < "{lock_file}" + + # Check if port is still active + if command -v ss >/dev/null 2>&1; then + if ! ss -n | grep -q ":$port[[:space:]]"; then + echo "ss reports port $port is not open" + exit 0 + fi + elif command -v netstat >/dev/null 2>&1; then + if ! netstat -n | grep -q ":$port[[:space:]]"; then + echo "netstat reports port $port is not open" + exit 0 + fi + fi + + # Check timestamp + if [ $(( $(date +%s) - timestamp )) -gt {max_age} ]; then + echo "timestamp in lockfile is too old" + else + echo "recent" + fi"#, + lock_file = lock_file.display(), + max_age = max_age + ) + } + + async fn is_lock_stale(&self, lock_file: &Path, max_age: &Duration) -> Result { let script = format!( - "\"[ -f '{0}' ] && [ $(( $(date +%s) - $(date -r '{0}' +%s) )) -gt {1} ] && echo 'stale' || echo 'recent'\"", - lock_file.display(), - threshold + "'{}'", + Self::generate_stale_check_script(lock_file, max_age.as_secs()) ); - let output = run_cmd(self.socket.ssh_command("sh").arg("-c").arg(script)) + let output = run_cmd(self.socket.ssh_command("sh").arg("-c").arg(&script)) .await .with_context(|| { format!("failed to check whether lock file {:?} is stale", lock_file) })?; - Ok(output.trim() == "stale") + let trimmed = output.trim(); + let is_stale = trimmed != "recent"; + log::info!("checked lockfile for staleness. stale: {is_stale}, output: {trimmed:?}"); + Ok(is_stale) } async fn remove_lock_file(&self, lock_file: &Path) -> Result<()> { @@ -1645,6 +1696,15 @@ impl SshRemoteConnection { } } + if self.is_binary_in_use(dst_path).await? { + log::info!("server binary is opened by another process. not updating"); + delegate.set_status( + Some("Skipping update of remote development server, since it's still in use"), + cx, + ); + return Ok(()); + } + let (binary, version) = delegate.get_server_binary(platform, cx).await??; let mut server_binary_exists = false; @@ -1676,6 +1736,33 @@ impl SshRemoteConnection { } } + async fn is_binary_in_use(&self, binary_path: &Path) -> Result { + let script = format!( + r#"' + if command -v lsof >/dev/null 2>&1; then + if lsof "{}" >/dev/null 2>&1; then + echo "in_use" + exit 0 + fi + elif command -v fuser >/dev/null 2>&1; then + if fuser "{}" >/dev/null 2>&1; then + echo "in_use" + exit 0 + fi + fi + echo "not_in_use" + '"#, + binary_path.display(), + binary_path.display(), + ); + + let output = run_cmd(self.socket.ssh_command("sh").arg("-c").arg(script)) + .await + .context("failed to check if binary is in use")?; + + Ok(output.trim() == "in_use") + } + async fn download_binary_on_server( &self, url: &str, @@ -2246,3 +2333,79 @@ mod fake { fn set_status(&self, _: Option<&str>, _: &mut AsyncAppContext) {} } } + +#[cfg(all(test, unix))] +mod tests { + use super::*; + use std::fs; + use tempfile::TempDir; + + fn run_stale_check_script( + lock_file: &Path, + max_age: Duration, + simulate_port_open: Option<&str>, + ) -> Result { + let wrapper = format!( + r#" + # Mock ss/netstat commands + ss() {{ + # Only handle the -n argument + if [ "$1" = "-n" ]; then + # If we're simulating an open port, output a line containing that port + if [ "{simulated_port}" != "" ]; then + echo "ESTAB 0 0 1.2.3.4:{simulated_port} 5.6.7.8:12345" + fi + fi + }} + netstat() {{ + ss "$@" + }} + export -f ss netstat + + # Real script starts here + {script}"#, + simulated_port = simulate_port_open.unwrap_or(""), + script = SshRemoteConnection::generate_stale_check_script(lock_file, max_age.as_secs()) + ); + + let output = std::process::Command::new("bash") + .arg("-c") + .arg(&wrapper) + .output()?; + + if !output.stderr.is_empty() { + eprintln!("Script stderr: {}", String::from_utf8_lossy(&output.stderr)); + } + + Ok(String::from_utf8(output.stdout)?.trim().to_string()) + } + + #[test] + fn test_lock_staleness() -> Result<()> { + let temp_dir = TempDir::new()?; + let lock_file = temp_dir.path().join("test.lock"); + + // Test 1: No lock file + let output = run_stale_check_script(&lock_file, Duration::from_secs(600), None)?; + assert_eq!(output, "lock file does not exist"); + + // Test 2: Lock file with port that's not open + fs::write(&lock_file, "54321 1234567890")?; + let output = run_stale_check_script(&lock_file, Duration::from_secs(600), Some("98765"))?; + assert_eq!(output, "ss reports port 54321 is not open"); + + // Test 3: Lock file with port that is open but old timestamp + let old_timestamp = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs() - 700; // 700 seconds ago + fs::write(&lock_file, format!("54321 {}", old_timestamp))?; + let output = run_stale_check_script(&lock_file, Duration::from_secs(600), Some("54321"))?; + assert_eq!(output, "timestamp in lockfile is too old"); + + // Test 4: Lock file with port that is open and recent timestamp + let recent_timestamp = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs() - 60; // 1 minute ago + fs::write(&lock_file, format!("54321 {}", recent_timestamp))?; + let output = run_stale_check_script(&lock_file, Duration::from_secs(600), Some("54321"))?; + assert_eq!(output, "recent"); + + Ok(()) + } +} From 5769065f274d9298e951172ea154331879f6a290 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Fri, 25 Oct 2024 13:47:01 +0200 Subject: [PATCH 32/76] project panel: Persist full filename when renaming auto-folded entries (#19728) This fixes a debug-only panic when processing filenames. The underflow that happens in Preview/Stable shouldn't cause any issues (other than maybe unmarking an entry in the project panel). /cc @notpeter Closes #ISSUE Release Notes: - N/A --- crates/project_panel/src/project_panel.rs | 65 ++++++++++++++++++----- 1 file changed, 51 insertions(+), 14 deletions(-) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 62d5da762ad0987ef79bb9904dfb541463158f2d..50c9d2d1269c6a2028581fb4129fcfaaacb7afee 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -94,12 +94,18 @@ pub struct ProjectPanel { struct EditState { worktree_id: WorktreeId, entry_id: ProjectEntryId, - is_new_entry: bool, + leaf_entry_id: Option, is_dir: bool, depth: usize, processing_filename: Option, } +impl EditState { + fn is_new_entry(&self) -> bool { + self.leaf_entry_id.is_none() + } +} + #[derive(Clone, Debug)] enum ClipboardEntry { Copied(BTreeSet), @@ -824,10 +830,10 @@ impl ProjectPanel { cx.focus(&self.focus_handle); let worktree_id = edit_state.worktree_id; - let is_new_entry = edit_state.is_new_entry; + let is_new_entry = edit_state.is_new_entry(); let filename = self.filename_editor.read(cx).text(cx); edit_state.is_dir = edit_state.is_dir - || (edit_state.is_new_entry && filename.ends_with(std::path::MAIN_SEPARATOR)); + || (edit_state.is_new_entry() && filename.ends_with(std::path::MAIN_SEPARATOR)); let is_dir = edit_state.is_dir; let worktree = self.project.read(cx).worktree_for_id(worktree_id, cx)?; let entry = worktree.read(cx).entry_for_id(edit_state.entry_id)?.clone(); @@ -858,7 +864,6 @@ impl ProjectPanel { if path_already_exists(new_path.as_path()) { return None; } - edited_entry_id = entry.id; edit_task = self.project.update(cx, |project, cx| { project.rename_entry(entry.id, new_path.as_path(), cx) @@ -1013,7 +1018,7 @@ impl ProjectPanel { self.edit_state = Some(EditState { worktree_id, entry_id: directory_id, - is_new_entry: true, + leaf_entry_id: None, is_dir, processing_filename: None, depth: 0, @@ -1047,12 +1052,12 @@ impl ProjectPanel { }) = self.selection { if let Some(worktree) = self.project.read(cx).worktree_for_id(worktree_id, cx) { - let entry_id = self.unflatten_entry_id(entry_id); - if let Some(entry) = worktree.read(cx).entry_for_id(entry_id) { + let sub_entry_id = self.unflatten_entry_id(entry_id); + if let Some(entry) = worktree.read(cx).entry_for_id(sub_entry_id) { self.edit_state = Some(EditState { worktree_id, - entry_id, - is_new_entry: false, + entry_id: sub_entry_id, + leaf_entry_id: Some(entry_id), is_dir: entry.is_dir(), processing_filename: None, depth: 0, @@ -1835,7 +1840,7 @@ impl ProjectPanel { let mut new_entry_parent_id = None; let mut new_entry_kind = EntryKind::Dir; if let Some(edit_state) = &self.edit_state { - if edit_state.worktree_id == worktree_id && edit_state.is_new_entry { + if edit_state.worktree_id == worktree_id && edit_state.is_new_entry() { new_entry_parent_id = Some(edit_state.entry_id); new_entry_kind = if edit_state.is_dir { EntryKind::Dir @@ -2351,7 +2356,7 @@ impl ProjectPanel { }; if let Some(edit_state) = &self.edit_state { - let is_edited_entry = if edit_state.is_new_entry { + let is_edited_entry = if edit_state.is_new_entry() { entry.id == NEW_ENTRY_ID } else { entry.id == edit_state.entry_id @@ -2369,10 +2374,41 @@ impl ProjectPanel { if is_edited_entry { if let Some(processing_filename) = &edit_state.processing_filename { details.is_processing = true; - details.filename.clear(); - details.filename.push_str(processing_filename); + if let Some(ancestors) = edit_state + .leaf_entry_id + .and_then(|entry| self.ancestors.get(&entry)) + { + let position = ancestors.ancestors.iter().position(|entry_id| *entry_id == edit_state.entry_id).expect("Edited sub-entry should be an ancestor of selected leaf entry") + 1; + let all_components = ancestors.ancestors.len(); + + let prefix_components = all_components - position; + let suffix_components = position.checked_sub(1); + let mut previous_components = + Path::new(&details.filename).components(); + let mut new_path = previous_components + .by_ref() + .take(prefix_components) + .collect::(); + if let Some(last_component) = + Path::new(processing_filename).components().last() + { + new_path.push(last_component); + previous_components.next(); + } + + if let Some(_) = suffix_components { + new_path.push(previous_components); + } + if let Some(str) = new_path.to_str() { + details.filename.clear(); + details.filename.push_str(str); + } + } else { + details.filename.clear(); + details.filename.push_str(processing_filename); + } } else { - if edit_state.is_new_entry { + if edit_state.is_new_entry() { details.filename.clear(); } details.is_editing = true; @@ -2571,6 +2607,7 @@ impl ProjectPanel { comp_str }) .collect::>(); + let components_len = components.len(); let active_index = components_len - 1 From 5c2238c7a5573ff836e8f3f3bbee37256561ca26 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 25 Oct 2024 16:27:36 +0200 Subject: [PATCH 33/76] ssh remoting: Use matching versions of remote server binary (#19740) This changes the download logic to not fetch the latest version, but to fetch the version matching the current version of Zed. Release Notes: - Changed the update logic of the SSH remote server to not fetch the latest version for a current channel, but to fetch the version matching the current Zed version. If Zed is updated, the server is updated too. If the server is newer than the Zed version an error will be displayed. --- crates/auto_update/src/auto_update.rs | 102 ++++++++++-------- crates/recent_projects/src/ssh_connections.rs | 25 ++++- crates/remote/src/ssh_session.rs | 29 +++-- 3 files changed, 100 insertions(+), 56 deletions(-) diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index a45eb3a05b1838b95886151f5f3801d9768ee8c3..61154cb5043eb84cd0a965d8180d0cf25fa37419 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -432,10 +432,11 @@ impl AutoUpdater { cx.notify(); } - pub async fn get_latest_remote_server_release( + pub async fn download_remote_server_release( os: &str, arch: &str, - mut release_channel: ReleaseChannel, + release_channel: ReleaseChannel, + version: Option, cx: &mut AsyncAppContext, ) -> Result { let this = cx.update(|cx| { @@ -445,15 +446,12 @@ impl AutoUpdater { .ok_or_else(|| anyhow!("auto-update not initialized")) })??; - if release_channel == ReleaseChannel::Dev { - release_channel = ReleaseChannel::Nightly; - } - - let release = Self::get_latest_release( + let release = Self::get_release( &this, "zed-remote-server", os, arch, + version, Some(release_channel), cx, ) @@ -468,17 +466,21 @@ impl AutoUpdater { let client = this.read_with(cx, |this, _| this.http_client.clone())?; if smol::fs::metadata(&version_path).await.is_err() { - log::info!("downloading zed-remote-server {os} {arch}"); + log::info!( + "downloading zed-remote-server {os} {arch} version {}", + release.version + ); download_remote_server_binary(&version_path, release, client, cx).await?; } Ok(version_path) } - pub async fn get_latest_remote_server_release_url( + pub async fn get_remote_server_release_url( os: &str, arch: &str, - mut release_channel: ReleaseChannel, + release_channel: ReleaseChannel, + version: Option, cx: &mut AsyncAppContext, ) -> Result<(String, String)> { let this = cx.update(|cx| { @@ -488,15 +490,12 @@ impl AutoUpdater { .ok_or_else(|| anyhow!("auto-update not initialized")) })??; - if release_channel == ReleaseChannel::Dev { - release_channel = ReleaseChannel::Nightly; - } - - let release = Self::get_latest_release( + let release = Self::get_release( &this, "zed-remote-server", os, arch, + version, Some(release_channel), cx, ) @@ -508,46 +507,65 @@ impl AutoUpdater { Ok((release.url, body)) } - async fn get_latest_release( + async fn get_release( this: &Model, asset: &str, os: &str, arch: &str, + version: Option, release_channel: Option, cx: &mut AsyncAppContext, ) -> Result { let client = this.read_with(cx, |this, _| this.http_client.clone())?; - let mut url_string = client.build_url(&format!( - "/api/releases/latest?asset={}&os={}&arch={}", - asset, os, arch - )); - if let Some(param) = release_channel.and_then(|c| c.release_query_param()) { - url_string += "&"; - url_string += param; - } - let mut response = client.get(&url_string, Default::default(), true).await?; + if let Some(version) = version { + let channel = release_channel.map(|c| c.dev_name()).unwrap_or("stable"); - let mut body = Vec::new(); - response - .body_mut() - .read_to_end(&mut body) - .await - .context("error reading release")?; + let url = format!("/api/releases/{channel}/{version}/{asset}-{os}-{arch}.gz?update=1",); + + Ok(JsonRelease { + version: version.to_string(), + url: client.build_url(&url), + }) + } else { + let mut url_string = client.build_url(&format!( + "/api/releases/latest?asset={}&os={}&arch={}", + asset, os, arch + )); + if let Some(param) = release_channel.and_then(|c| c.release_query_param()) { + url_string += "&"; + url_string += param; + } + + let mut response = client.get(&url_string, Default::default(), true).await?; + let mut body = Vec::new(); + response.body_mut().read_to_end(&mut body).await?; + + if !response.status().is_success() { + return Err(anyhow!( + "failed to fetch release: {:?}", + String::from_utf8_lossy(&body), + )); + } - if !response.status().is_success() { - Err(anyhow!( - "failed to fetch release: {:?}", - String::from_utf8_lossy(&body), - ))?; + serde_json::from_slice(body.as_slice()).with_context(|| { + format!( + "error deserializing release {:?}", + String::from_utf8_lossy(&body), + ) + }) } + } - serde_json::from_slice(body.as_slice()).with_context(|| { - format!( - "error deserializing release {:?}", - String::from_utf8_lossy(&body), - ) - }) + async fn get_latest_release( + this: &Model, + asset: &str, + os: &str, + arch: &str, + release_channel: Option, + cx: &mut AsyncAppContext, + ) -> Result { + Self::get_release(this, asset, os, arch, None, release_channel, cx).await } async fn update(this: Model, mut cx: AsyncAppContext) -> Result<()> { diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index 0718c4f983d5479b60450c20833bf29b990d4cdf..47e4c91dbd528d7f0e1456c7fc222b6aadd7d9a9 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -517,17 +517,31 @@ impl SshClientDelegate { } } + // For nightly channel, always get latest + let current_version = if release_channel == ReleaseChannel::Nightly { + None + } else { + Some(version) + }; + + self.update_status( + Some(&format!("Checking remote server release {}", version)), + cx, + ); + if download_binary_on_host { - let (request_url, request_body) = AutoUpdater::get_latest_remote_server_release_url( + let (request_url, request_body) = AutoUpdater::get_remote_server_release_url( platform.os, platform.arch, release_channel, + current_version, cx, ) .await .map_err(|e| { anyhow!( - "Failed to get remote server binary download url (os: {}, arch: {}): {}", + "Failed to get remote server binary download url (version: {}, os: {}, arch: {}): {}", + version, platform.os, platform.arch, e @@ -542,17 +556,18 @@ impl SshClientDelegate { version, )) } else { - self.update_status(Some("Checking for latest version of remote server"), cx); - let binary_path = AutoUpdater::get_latest_remote_server_release( + let binary_path = AutoUpdater::download_remote_server_release( platform.os, platform.arch, release_channel, + current_version, cx, ) .await .map_err(|e| { anyhow!( - "Failed to download remote server binary (os: {}, arch: {}): {}", + "Failed to download remote server binary (version: {}, os: {}, arch: {}): {}", + version, platform.os, platform.arch, e diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 8e0c345f74165c930be5c5b54abbae73aa2b94eb..656560f0b6b838715fa9aba330e7e5774af2538e 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -1707,21 +1707,32 @@ impl SshRemoteConnection { let (binary, version) = delegate.get_server_binary(platform, cx).await??; - let mut server_binary_exists = false; - if !server_binary_exists && cfg!(not(debug_assertions)) { + let mut remote_version = None; + if cfg!(not(debug_assertions)) { if let Ok(installed_version) = run_cmd(self.socket.ssh_command(dst_path).arg("version")).await { - if installed_version.trim() == version.to_string() { - server_binary_exists = true; + if let Ok(version) = installed_version.trim().parse::() { + remote_version = Some(version); + } else { + log::warn!("failed to parse version of remote server: {installed_version:?}",); } - log::info!("checked remote server binary for version. latest version: {}. remote server version: {}", version.to_string(), installed_version.trim()); } - } - if server_binary_exists { - log::info!("remote development server already present",); - return Ok(()); + if let Some(remote_version) = remote_version { + if remote_version == version { + log::info!("remote development server present and matching client version"); + return Ok(()); + } else if remote_version > version { + let error = anyhow!("The version of the remote server ({}) is newer than the Zed version ({}). Please update Zed.", remote_version, version); + return Err(error); + } else { + log::info!( + "remote development server has older version: {}. updating...", + remote_version + ); + } + } } match binary { From 5f9a1482f1aab3296a9e417a801ef1b4a5a742ce Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 25 Oct 2024 11:02:27 -0400 Subject: [PATCH 34/76] assistant: Make `/file` emit events as they occur (#19743) This PR updates the `/file` command to emit its `SlashCommandEvent`s in a way that can actually be streamed. Previously it was buffering up all of the events and then returning them all at once. Note that we still don't yet support streaming in the context editor on `main`, so there won't be any visible changes just yet. Release Notes: - N/A --- .../src/slash_command/file_command.rs | 53 ++++++++++--------- 1 file changed, 29 insertions(+), 24 deletions(-) diff --git a/crates/assistant/src/slash_command/file_command.rs b/crates/assistant/src/slash_command/file_command.rs index 0a1794cae197aaa50c2d3b169912cccf7b5f5a60..1d0fa2bf3ea981c8f8d2428405257850be5f1fec 100644 --- a/crates/assistant/src/slash_command/file_command.rs +++ b/crates/assistant/src/slash_command/file_command.rs @@ -4,6 +4,7 @@ use assistant_slash_command::{ SlashCommandOutput, SlashCommandOutputSection, SlashCommandResult, }; use futures::channel::mpsc; +use futures::Stream; use fuzzy::PathMatch; use gpui::{AppContext, Model, Task, View, WeakView}; use language::{BufferSnapshot, CodeLabel, HighlightId, LineEnding, LspAdapterDelegate}; @@ -196,7 +197,12 @@ impl SlashCommand for FileSlashCommand { return Task::ready(Err(anyhow!("missing path"))); }; - collect_files(workspace.read(cx).project().clone(), arguments, cx) + Task::ready(Ok(collect_files( + workspace.read(cx).project().clone(), + arguments, + cx, + ) + .boxed())) } } @@ -204,7 +210,7 @@ fn collect_files( project: Model, glob_inputs: &[String], cx: &mut AppContext, -) -> Task { +) -> impl Stream> { let Ok(matchers) = glob_inputs .into_iter() .map(|glob_input| { @@ -213,7 +219,7 @@ fn collect_files( }) .collect::>>() else { - return Task::ready(Err(anyhow!("invalid path"))); + return futures::stream::once(async { Err(anyhow!("invalid path")) }).boxed(); }; let project_handle = project.downgrade(); @@ -357,8 +363,12 @@ fn collect_files( events_tx.unbounded_send(Ok(SlashCommandEvent::EndSection { metadata: None }))?; } } - Ok(events_rx.boxed()) + + anyhow::Ok(()) }) + .detach_and_log_err(cx); + + events_rx.boxed() } pub fn codeblock_fence_for_path( @@ -550,6 +560,7 @@ mod test { use project::Project; use serde_json::json; use settings::SettingsStore; + use smol::stream::StreamExt; use crate::slash_command::file_command::collect_files; @@ -590,11 +601,9 @@ mod test { let project = Project::test(fs, ["/root".as_ref()], cx).await; - let result_1 = cx - .update(|cx| collect_files(project.clone(), &["root/dir".to_string()], cx)) - .await - .unwrap(); - let result_1 = SlashCommandOutput::from_event_stream(result_1) + let result_1 = + cx.update(|cx| collect_files(project.clone(), &["root/dir".to_string()], cx)); + let result_1 = SlashCommandOutput::from_event_stream(result_1.boxed()) .await .unwrap(); @@ -602,20 +611,16 @@ mod test { // 4 files + 2 directories assert_eq!(result_1.sections.len(), 6); - let result_2 = cx - .update(|cx| collect_files(project.clone(), &["root/dir/".to_string()], cx)) - .await - .unwrap(); - let result_2 = SlashCommandOutput::from_event_stream(result_2) + let result_2 = + cx.update(|cx| collect_files(project.clone(), &["root/dir/".to_string()], cx)); + let result_2 = SlashCommandOutput::from_event_stream(result_2.boxed()) .await .unwrap(); assert_eq!(result_1, result_2); - let result = cx - .update(|cx| collect_files(project.clone(), &["root/dir*".to_string()], cx)) - .await - .unwrap(); + let result = + cx.update(|cx| collect_files(project.clone(), &["root/dir*".to_string()], cx).boxed()); let result = SlashCommandOutput::from_event_stream(result).await.unwrap(); assert!(result.text.starts_with("root/dir")); @@ -659,11 +664,11 @@ mod test { let project = Project::test(fs, ["/zed".as_ref()], cx).await; - let result = cx - .update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx)) + let result = + cx.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx)); + let result = SlashCommandOutput::from_event_stream(result.boxed()) .await .unwrap(); - let result = SlashCommandOutput::from_event_stream(result).await.unwrap(); // Sanity check assert!(result.text.starts_with("zed/assets/themes\n")); @@ -721,11 +726,11 @@ mod test { let project = Project::test(fs, ["/zed".as_ref()], cx).await; - let result = cx - .update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx)) + let result = + cx.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx)); + let result = SlashCommandOutput::from_event_stream(result.boxed()) .await .unwrap(); - let result = SlashCommandOutput::from_event_stream(result).await.unwrap(); assert!(result.text.starts_with("zed/assets/themes\n")); assert_eq!(result.sections[0].label, "zed/assets/themes/LICENSE"); From d40ea8fc81026b46a8a5ac244db87863ab9b6e99 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 25 Oct 2024 19:04:38 +0300 Subject: [PATCH 35/76] Make macOS bundle script compatible with GNU sed (#19745) Closes https://github.com/zed-industries/zed/issues/19742 Release Notes: - N/A --- script/bundle-mac | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/script/bundle-mac b/script/bundle-mac index bc95e1dd6a9285ec5ef1a22bd4cd210c12a302c7..230722ecfa99ed29c496e8422d28338d4b1cb794 100755 --- a/script/bundle-mac +++ b/script/bundle-mac @@ -97,7 +97,7 @@ popd pushd crates/zed cp Cargo.toml Cargo.toml.backup sed \ - -i .backup \ + -i.backup \ "s/package.metadata.bundle-${channel}/package.metadata.bundle/" \ Cargo.toml From 85bdd9329b550475aae34340e50abd4e79f2dd82 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 25 Oct 2024 11:59:22 -0600 Subject: [PATCH 36/76] Revert "Show invisibles in editor (#19298)" (#19752) Closes: #19714 This reverts commit 6dcec47235fa85f0e416b9230e2fedc61de510ee. Release Notes: - (preview only) Fixes a crash when rendering invisibles --- Cargo.lock | 1 - Cargo.toml | 2 +- crates/editor/Cargo.toml | 1 - crates/editor/src/display_map.rs | 118 ++++------ crates/editor/src/display_map/block_map.rs | 55 ++--- crates/editor/src/display_map/invisibles.rs | 157 ------------- .../display_map/{char_map.rs => tab_map.rs} | 162 +++++--------- crates/editor/src/display_map/wrap_map.rs | 211 +++++++++--------- crates/editor/src/element.rs | 17 +- crates/editor/src/hover_popover.rs | 45 +--- crates/gpui/src/text_system/line.rs | 51 +---- crates/language/src/buffer.rs | 3 +- 12 files changed, 260 insertions(+), 563 deletions(-) delete mode 100644 crates/editor/src/display_map/invisibles.rs rename crates/editor/src/display_map/{char_map.rs => tab_map.rs} (82%) diff --git a/Cargo.lock b/Cargo.lock index f4e84f7a030eefc3375d305438b695a43c09dd6e..75d058db3848bd366c6ca757ec9fbf2f4822d3a7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3711,7 +3711,6 @@ dependencies = [ "tree-sitter-rust", "tree-sitter-typescript", "ui", - "unicode-segmentation", "unindent", "url", "util", diff --git a/Cargo.toml b/Cargo.toml index 732306a9aff4a3d9636863c599a68f5f1c456ade..64a2546020982f95a8e36da84b40cea87e8c98b4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -464,7 +464,7 @@ tree-sitter-typescript = "0.23" tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" } unicase = "2.6" unindent = "0.1.7" -unicode-segmentation = "1.11" +unicode-segmentation = "1.10" url = "2.2" uuid = { version = "1.1.2", features = ["v4", "v5", "serde"] } wasmparser = "0.215" diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index f6a0058c7b3a9e80b18480235d55672e4c3fbe95..cfd9284f8076509a18c4d3a460636e6ca071e1f1 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -81,7 +81,6 @@ ui.workspace = true url.workspace = true util.workspace = true workspace.workspace = true -unicode-segmentation.workspace = true [dev-dependencies] ctor.workspace = true diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index e24336d1e9979422005d7781fdcbf44c07be44af..67b7e5b60f7e9117ae7bbd40b9b66a5b6a1e2a29 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -8,7 +8,7 @@ //! of several smaller structures that form a hierarchy (starting at the bottom): //! - [`InlayMap`] that decides where the [`Inlay`]s should be displayed. //! - [`FoldMap`] that decides where the fold indicators should be; it also tracks parts of a source file that are currently folded. -//! - [`CharMap`] that replaces tabs and non-printable characters +//! - [`TabMap`] that keeps track of hard tabs in a buffer. //! - [`WrapMap`] that handles soft wrapping. //! - [`BlockMap`] that tracks custom blocks such as diagnostics that should be displayed within buffer. //! - [`DisplayMap`] that adds background highlights to the regions of text. @@ -18,11 +18,10 @@ //! [EditorElement]: crate::element::EditorElement mod block_map; -mod char_map; mod crease_map; mod fold_map; mod inlay_map; -mod invisibles; +mod tab_map; mod wrap_map; use crate::{ @@ -33,7 +32,6 @@ pub use block_map::{ BlockPlacement, BlockPoint, BlockProperties, BlockStyle, CustomBlockId, RenderBlock, }; use block_map::{BlockRow, BlockSnapshot}; -use char_map::{CharMap, CharSnapshot}; use collections::{HashMap, HashSet}; pub use crease_map::*; pub use fold_map::{Fold, FoldId, FoldPlaceholder, FoldPoint}; @@ -44,7 +42,6 @@ use gpui::{ pub(crate) use inlay_map::Inlay; use inlay_map::{InlayMap, InlaySnapshot}; pub use inlay_map::{InlayOffset, InlayPoint}; -pub use invisibles::is_invisible; use language::{ language_settings::language_settings, ChunkRenderer, OffsetUtf16, Point, Subscription as BufferSubscription, @@ -64,9 +61,9 @@ use std::{ sync::Arc, }; use sum_tree::{Bias, TreeMap}; +use tab_map::{TabMap, TabSnapshot}; use text::LineIndent; -use ui::{px, WindowContext}; -use unicode_segmentation::UnicodeSegmentation; +use ui::WindowContext; use wrap_map::{WrapMap, WrapSnapshot}; #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -97,7 +94,7 @@ pub struct DisplayMap { /// Decides where the fold indicators should be and tracks parts of a source file that are currently folded. fold_map: FoldMap, /// Keeps track of hard tabs in a buffer. - char_map: CharMap, + tab_map: TabMap, /// Handles soft wrapping. wrap_map: Model, /// Tracks custom blocks such as diagnostics that should be displayed within buffer. @@ -134,7 +131,7 @@ impl DisplayMap { let crease_map = CreaseMap::new(&buffer_snapshot); let (inlay_map, snapshot) = InlayMap::new(buffer_snapshot); let (fold_map, snapshot) = FoldMap::new(snapshot); - let (char_map, snapshot) = CharMap::new(snapshot, tab_size); + let (tab_map, snapshot) = TabMap::new(snapshot, tab_size); let (wrap_map, snapshot) = WrapMap::new(snapshot, font, font_size, wrap_width, cx); let block_map = BlockMap::new( snapshot, @@ -151,7 +148,7 @@ impl DisplayMap { buffer_subscription, fold_map, inlay_map, - char_map, + tab_map, wrap_map, block_map, crease_map, @@ -169,17 +166,17 @@ impl DisplayMap { let (inlay_snapshot, edits) = self.inlay_map.sync(buffer_snapshot, edits); let (fold_snapshot, edits) = self.fold_map.read(inlay_snapshot.clone(), edits); let tab_size = Self::tab_size(&self.buffer, cx); - let (char_snapshot, edits) = self.char_map.sync(fold_snapshot.clone(), edits, tab_size); + let (tab_snapshot, edits) = self.tab_map.sync(fold_snapshot.clone(), edits, tab_size); let (wrap_snapshot, edits) = self .wrap_map - .update(cx, |map, cx| map.sync(char_snapshot.clone(), edits, cx)); + .update(cx, |map, cx| map.sync(tab_snapshot.clone(), edits, cx)); let block_snapshot = self.block_map.read(wrap_snapshot.clone(), edits).snapshot; DisplaySnapshot { buffer_snapshot: self.buffer.read(cx).snapshot(cx), fold_snapshot, inlay_snapshot, - char_snapshot, + tab_snapshot, wrap_snapshot, block_snapshot, crease_snapshot: self.crease_map.snapshot(), @@ -215,13 +212,13 @@ impl DisplayMap { let tab_size = Self::tab_size(&self.buffer, cx); let (snapshot, edits) = self.inlay_map.sync(snapshot, edits); let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits); - let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size); + let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); let (snapshot, edits) = self .wrap_map .update(cx, |map, cx| map.sync(snapshot, edits, cx)); self.block_map.read(snapshot, edits); let (snapshot, edits) = fold_map.fold(ranges); - let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size); + let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); let (snapshot, edits) = self .wrap_map .update(cx, |map, cx| map.sync(snapshot, edits, cx)); @@ -239,13 +236,13 @@ impl DisplayMap { let tab_size = Self::tab_size(&self.buffer, cx); let (snapshot, edits) = self.inlay_map.sync(snapshot, edits); let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits); - let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size); + let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); let (snapshot, edits) = self .wrap_map .update(cx, |map, cx| map.sync(snapshot, edits, cx)); self.block_map.read(snapshot, edits); let (snapshot, edits) = fold_map.unfold(ranges, inclusive); - let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size); + let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); let (snapshot, edits) = self .wrap_map .update(cx, |map, cx| map.sync(snapshot, edits, cx)); @@ -280,7 +277,7 @@ impl DisplayMap { let tab_size = Self::tab_size(&self.buffer, cx); let (snapshot, edits) = self.inlay_map.sync(snapshot, edits); let (snapshot, edits) = self.fold_map.read(snapshot, edits); - let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size); + let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); let (snapshot, edits) = self .wrap_map .update(cx, |map, cx| map.sync(snapshot, edits, cx)); @@ -298,7 +295,7 @@ impl DisplayMap { let tab_size = Self::tab_size(&self.buffer, cx); let (snapshot, edits) = self.inlay_map.sync(snapshot, edits); let (snapshot, edits) = self.fold_map.read(snapshot, edits); - let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size); + let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); let (snapshot, edits) = self .wrap_map .update(cx, |map, cx| map.sync(snapshot, edits, cx)); @@ -316,7 +313,7 @@ impl DisplayMap { let tab_size = Self::tab_size(&self.buffer, cx); let (snapshot, edits) = self.inlay_map.sync(snapshot, edits); let (snapshot, edits) = self.fold_map.read(snapshot, edits); - let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size); + let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); let (snapshot, edits) = self .wrap_map .update(cx, |map, cx| map.sync(snapshot, edits, cx)); @@ -334,7 +331,7 @@ impl DisplayMap { let tab_size = Self::tab_size(&self.buffer, cx); let (snapshot, edits) = self.inlay_map.sync(snapshot, edits); let (snapshot, edits) = self.fold_map.read(snapshot, edits); - let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size); + let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); let (snapshot, edits) = self .wrap_map .update(cx, |map, cx| map.sync(snapshot, edits, cx)); @@ -410,7 +407,7 @@ impl DisplayMap { let (snapshot, edits) = self.inlay_map.sync(buffer_snapshot, edits); let (snapshot, edits) = self.fold_map.read(snapshot, edits); let tab_size = Self::tab_size(&self.buffer, cx); - let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size); + let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); let (snapshot, edits) = self .wrap_map .update(cx, |map, cx| map.sync(snapshot, edits, cx)); @@ -418,7 +415,7 @@ impl DisplayMap { let (snapshot, edits) = self.inlay_map.splice(to_remove, to_insert); let (snapshot, edits) = self.fold_map.read(snapshot, edits); - let (snapshot, edits) = self.char_map.sync(snapshot, edits, tab_size); + let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); let (snapshot, edits) = self .wrap_map .update(cx, |map, cx| map.sync(snapshot, edits, cx)); @@ -470,7 +467,7 @@ pub struct DisplaySnapshot { pub fold_snapshot: FoldSnapshot, pub crease_snapshot: CreaseSnapshot, inlay_snapshot: InlaySnapshot, - char_snapshot: CharSnapshot, + tab_snapshot: TabSnapshot, wrap_snapshot: WrapSnapshot, block_snapshot: BlockSnapshot, text_highlights: TextHighlights, @@ -570,8 +567,8 @@ impl DisplaySnapshot { fn point_to_display_point(&self, point: MultiBufferPoint, bias: Bias) -> DisplayPoint { let inlay_point = self.inlay_snapshot.to_inlay_point(point); let fold_point = self.fold_snapshot.to_fold_point(inlay_point, bias); - let char_point = self.char_snapshot.to_char_point(fold_point); - let wrap_point = self.wrap_snapshot.char_point_to_wrap_point(char_point); + let tab_point = self.tab_snapshot.to_tab_point(fold_point); + let wrap_point = self.wrap_snapshot.tab_point_to_wrap_point(tab_point); let block_point = self.block_snapshot.to_block_point(wrap_point); DisplayPoint(block_point) } @@ -599,21 +596,21 @@ impl DisplaySnapshot { fn display_point_to_inlay_point(&self, point: DisplayPoint, bias: Bias) -> InlayPoint { let block_point = point.0; let wrap_point = self.block_snapshot.to_wrap_point(block_point); - let char_point = self.wrap_snapshot.to_char_point(wrap_point); - let fold_point = self.char_snapshot.to_fold_point(char_point, bias).0; + let tab_point = self.wrap_snapshot.to_tab_point(wrap_point); + let fold_point = self.tab_snapshot.to_fold_point(tab_point, bias).0; fold_point.to_inlay_point(&self.fold_snapshot) } pub fn display_point_to_fold_point(&self, point: DisplayPoint, bias: Bias) -> FoldPoint { let block_point = point.0; let wrap_point = self.block_snapshot.to_wrap_point(block_point); - let char_point = self.wrap_snapshot.to_char_point(wrap_point); - self.char_snapshot.to_fold_point(char_point, bias).0 + let tab_point = self.wrap_snapshot.to_tab_point(wrap_point); + self.tab_snapshot.to_fold_point(tab_point, bias).0 } pub fn fold_point_to_display_point(&self, fold_point: FoldPoint) -> DisplayPoint { - let char_point = self.char_snapshot.to_char_point(fold_point); - let wrap_point = self.wrap_snapshot.char_point_to_wrap_point(char_point); + let tab_point = self.tab_snapshot.to_tab_point(fold_point); + let wrap_point = self.wrap_snapshot.tab_point_to_wrap_point(tab_point); let block_point = self.block_snapshot.to_block_point(wrap_point); DisplayPoint(block_point) } @@ -691,23 +688,6 @@ impl DisplaySnapshot { } } - if chunk.is_invisible { - let invisible_highlight = HighlightStyle { - background_color: Some(editor_style.status.hint_background), - underline: Some(UnderlineStyle { - color: Some(editor_style.status.hint), - thickness: px(1.), - wavy: false, - }), - ..Default::default() - }; - if let Some(highlight_style) = highlight_style.as_mut() { - highlight_style.highlight(invisible_highlight); - } else { - highlight_style = Some(invisible_highlight); - } - } - let mut diagnostic_highlight = HighlightStyle::default(); if chunk.is_unnecessary { @@ -804,11 +784,12 @@ impl DisplaySnapshot { layout_line.closest_index_for_x(x) as u32 } - pub fn grapheme_at(&self, mut point: DisplayPoint) -> Option { + pub fn display_chars_at( + &self, + mut point: DisplayPoint, + ) -> impl Iterator + '_ { point = DisplayPoint(self.block_snapshot.clip_point(point.0, Bias::Left)); - - let chars = self - .text_chunks(point.row()) + self.text_chunks(point.row()) .flat_map(str::chars) .skip_while({ let mut column = 0; @@ -818,21 +799,16 @@ impl DisplaySnapshot { !at_point } }) - .take_while({ - let mut prev = false; - move |char| { - let now = char.is_ascii(); - let end = char.is_ascii() && (char.is_ascii_whitespace() || prev); - prev = now; - !end + .map(move |ch| { + let result = (ch, point); + if ch == '\n' { + *point.row_mut() += 1; + *point.column_mut() = 0; + } else { + *point.column_mut() += ch.len_utf8() as u32; } - }); - - chars - .collect::() - .graphemes(true) - .next() - .map(|s| s.to_owned()) + result + }) } pub fn buffer_chars_at(&self, mut offset: usize) -> impl Iterator + '_ { @@ -1144,8 +1120,8 @@ impl DisplayPoint { pub fn to_offset(self, map: &DisplaySnapshot, bias: Bias) -> usize { let wrap_point = map.block_snapshot.to_wrap_point(self.0); - let char_point = map.wrap_snapshot.to_char_point(wrap_point); - let fold_point = map.char_snapshot.to_fold_point(char_point, bias).0; + let tab_point = map.wrap_snapshot.to_tab_point(wrap_point); + let fold_point = map.tab_snapshot.to_fold_point(tab_point, bias).0; let inlay_point = fold_point.to_inlay_point(&map.fold_snapshot); map.inlay_snapshot .to_buffer_offset(map.inlay_snapshot.to_offset(inlay_point)) @@ -1253,7 +1229,7 @@ pub mod tests { let snapshot = map.update(cx, |map, cx| map.snapshot(cx)); log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text()); log::info!("fold text: {:?}", snapshot.fold_snapshot.text()); - log::info!("char text: {:?}", snapshot.char_snapshot.text()); + log::info!("tab text: {:?}", snapshot.tab_snapshot.text()); log::info!("wrap text: {:?}", snapshot.wrap_snapshot.text()); log::info!("block text: {:?}", snapshot.block_snapshot.text()); log::info!("display text: {:?}", snapshot.text()); @@ -1368,7 +1344,7 @@ pub mod tests { fold_count = snapshot.fold_count(); log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text()); log::info!("fold text: {:?}", snapshot.fold_snapshot.text()); - log::info!("char text: {:?}", snapshot.char_snapshot.text()); + log::info!("tab text: {:?}", snapshot.tab_snapshot.text()); log::info!("wrap text: {:?}", snapshot.wrap_snapshot.text()); log::info!("block text: {:?}", snapshot.block_snapshot.text()); log::info!("display text: {:?}", snapshot.text()); diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 44a540bc9510a1a9254c674661952d9d31c6e708..a7d0ca9c63e7ba83616854784eb67a470267b85f 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1666,7 +1666,7 @@ fn offset_for_row(s: &str, target: u32) -> (u32, usize) { mod tests { use super::*; use crate::display_map::{ - char_map::CharMap, fold_map::FoldMap, inlay_map::InlayMap, wrap_map::WrapMap, + fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap, wrap_map::WrapMap, }; use gpui::{div, font, px, AppContext, Context as _, Element}; use language::{Buffer, Capability}; @@ -1701,9 +1701,9 @@ mod tests { let subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot); - let (mut char_map, char_snapshot) = CharMap::new(fold_snapshot, 1.try_into().unwrap()); + let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 1.try_into().unwrap()); let (wrap_map, wraps_snapshot) = - cx.update(|cx| WrapMap::new(char_snapshot, font("Helvetica"), px(14.0), None, cx)); + cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx)); let mut block_map = BlockMap::new(wraps_snapshot.clone(), true, 1, 1, 1); let mut writer = block_map.write(wraps_snapshot.clone(), Default::default()); @@ -1851,10 +1851,10 @@ mod tests { let (inlay_snapshot, inlay_edits) = inlay_map.sync(buffer_snapshot, subscription.consume().into_inner()); let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits); - let (char_snapshot, tab_edits) = - char_map.sync(fold_snapshot, fold_edits, 4.try_into().unwrap()); + let (tab_snapshot, tab_edits) = + tab_map.sync(fold_snapshot, fold_edits, 4.try_into().unwrap()); let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { - wrap_map.sync(char_snapshot, tab_edits, cx) + wrap_map.sync(tab_snapshot, tab_edits, cx) }); let snapshot = block_map.read(wraps_snapshot, wrap_edits); assert_eq!(snapshot.text(), "aaa\n\nb!!!\n\n\nbb\nccc\nddd\n\n\n"); @@ -1914,9 +1914,8 @@ mod tests { let multi_buffer_snapshot = multi_buffer.read(cx).snapshot(cx); let (_, inlay_snapshot) = InlayMap::new(multi_buffer_snapshot.clone()); let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); - let (_, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap()); - let (_, wraps_snapshot) = - WrapMap::new(char_snapshot, font, font_size, Some(wrap_width), cx); + let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); + let (_, wraps_snapshot) = WrapMap::new(tab_snapshot, font, font_size, Some(wrap_width), cx); let block_map = BlockMap::new(wraps_snapshot.clone(), true, 1, 1, 1); let snapshot = block_map.read(wraps_snapshot, Default::default()); @@ -1953,9 +1952,9 @@ mod tests { let _subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); let (_inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); let (_fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot); - let (_char_map, char_snapshot) = CharMap::new(fold_snapshot, 1.try_into().unwrap()); + let (_tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 1.try_into().unwrap()); let (_wrap_map, wraps_snapshot) = - cx.update(|cx| WrapMap::new(char_snapshot, font("Helvetica"), px(14.0), None, cx)); + cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx)); let mut block_map = BlockMap::new(wraps_snapshot.clone(), false, 1, 1, 0); let mut writer = block_map.write(wraps_snapshot.clone(), Default::default()); @@ -2055,15 +2054,9 @@ mod tests { let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx)); let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); - let (_, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap()); + let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); let (_, wraps_snapshot) = cx.update(|cx| { - WrapMap::new( - char_snapshot, - font("Helvetica"), - px(14.0), - Some(px(60.)), - cx, - ) + WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), Some(px(60.)), cx) }); let mut block_map = BlockMap::new(wraps_snapshot.clone(), true, 1, 1, 0); @@ -2106,7 +2099,7 @@ mod tests { let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot); let tab_size = 1.try_into().unwrap(); - let (mut tab_map, tab_snapshot) = CharMap::new(fold_snapshot, tab_size); + let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, tab_size); let (wrap_map, wraps_snapshot) = cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx)); let mut block_map = BlockMap::new(wraps_snapshot.clone(), false, 1, 1, 0); @@ -2257,9 +2250,9 @@ mod tests { let mut buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx)); let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot); - let (mut char_map, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap()); + let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); let (wrap_map, wraps_snapshot) = cx - .update(|cx| WrapMap::new(char_snapshot, font("Helvetica"), font_size, wrap_width, cx)); + .update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), font_size, wrap_width, cx)); let mut block_map = BlockMap::new( wraps_snapshot, true, @@ -2321,10 +2314,10 @@ mod tests { let (inlay_snapshot, inlay_edits) = inlay_map.sync(buffer_snapshot.clone(), vec![]); let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits); - let (char_snapshot, tab_edits) = - char_map.sync(fold_snapshot, fold_edits, tab_size); + let (tab_snapshot, tab_edits) = + tab_map.sync(fold_snapshot, fold_edits, tab_size); let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { - wrap_map.sync(char_snapshot, tab_edits, cx) + wrap_map.sync(tab_snapshot, tab_edits, cx) }); let mut block_map = block_map.write(wraps_snapshot, wrap_edits); block_map.insert(block_properties.iter().map(|props| BlockProperties { @@ -2346,10 +2339,10 @@ mod tests { let (inlay_snapshot, inlay_edits) = inlay_map.sync(buffer_snapshot.clone(), vec![]); let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits); - let (char_snapshot, tab_edits) = - char_map.sync(fold_snapshot, fold_edits, tab_size); + let (tab_snapshot, tab_edits) = + tab_map.sync(fold_snapshot, fold_edits, tab_size); let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { - wrap_map.sync(char_snapshot, tab_edits, cx) + wrap_map.sync(tab_snapshot, tab_edits, cx) }); let mut block_map = block_map.write(wraps_snapshot, wrap_edits); block_map.remove(block_ids_to_remove); @@ -2369,9 +2362,9 @@ mod tests { let (inlay_snapshot, inlay_edits) = inlay_map.sync(buffer_snapshot.clone(), buffer_edits); let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits); - let (char_snapshot, tab_edits) = char_map.sync(fold_snapshot, fold_edits, tab_size); + let (tab_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size); let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { - wrap_map.sync(char_snapshot, tab_edits, cx) + wrap_map.sync(tab_snapshot, tab_edits, cx) }); let blocks_snapshot = block_map.read(wraps_snapshot.clone(), wrap_edits); assert_eq!( @@ -2486,7 +2479,7 @@ mod tests { .row as usize]; let soft_wrapped = wraps_snapshot - .to_char_point(WrapPoint::new(wrap_row, 0)) + .to_tab_point(WrapPoint::new(wrap_row, 0)) .column() > 0; expected_buffer_rows.push(if soft_wrapped { None } else { buffer_row }); diff --git a/crates/editor/src/display_map/invisibles.rs b/crates/editor/src/display_map/invisibles.rs deleted file mode 100644 index 19a5bebbeb1ef997effec02d7aba64524db8dafa..0000000000000000000000000000000000000000 --- a/crates/editor/src/display_map/invisibles.rs +++ /dev/null @@ -1,157 +0,0 @@ -use std::sync::LazyLock; - -use collections::HashMap; - -// Invisibility in a Unicode context is not well defined, so we have to guess. -// -// We highlight all ASCII control codes, and unicode whitespace because they are likely -// confused with a normal space (U+0020). -// -// We also highlight the handful of blank non-space characters: -// U+2800 BRAILLE PATTERN BLANK - Category: So -// U+115F HANGUL CHOSEONG FILLER - Category: Lo -// U+1160 HANGUL CHOSEONG FILLER - Category: Lo -// U+3164 HANGUL FILLER - Category: Lo -// U+FFA0 HALFWIDTH HANGUL FILLER - Category: Lo -// U+FFFC OBJECT REPLACEMENT CHARACTER - Category: So -// -// For the rest of Unicode, invisibility happens for two reasons: -// * A Format character (like a byte order mark or right-to-left override) -// * An invisible Nonspacing Mark character (like U+034F, or variation selectors) -// -// We don't consider unassigned codepoints invisible as the font renderer already shows -// a replacement character in that case (and there are a *lot* of them) -// -// Control characters are mostly fine to highlight; except: -// * U+E0020..=U+E007F are used in emoji flags. We don't highlight them right now, but we could if we tightened our heuristics. -// * U+200D is used to join characters. We highlight this but don't replace it. As our font system ignores mid-glyph highlights this mostly works to highlight unexpected uses. -// -// Nonspacing marks are handled like U+200D. This means that mid-glyph we ignore them, but -// probably causes issues with end-of-glyph usage. -// -// ref: https://invisible-characters.com -// ref: https://www.compart.com/en/unicode/category/Cf -// ref: https://gist.github.com/ConradIrwin/f759e1fc29267143c4c7895aa495dca5?h=1 -// ref: https://unicode.org/Public/emoji/13.0/emoji-test.txt -// https://github.com/bits/UTF-8-Unicode-Test-Documents/blob/master/UTF-8_sequence_separated/utf8_sequence_0-0x10ffff_assigned_including-unprintable-asis.txt -pub fn is_invisible(c: char) -> bool { - if c <= '\u{1f}' { - c != '\t' && c != '\n' && c != '\r' - } else if c >= '\u{7f}' { - c <= '\u{9f}' || c.is_whitespace() || contains(c, &FORMAT) || contains(c, &OTHER) - } else { - false - } -} - -pub(crate) fn replacement(c: char) -> Option<&'static str> { - if !is_invisible(c) { - return None; - } - if c <= '\x7f' { - REPLACEMENTS.get(&c).copied() - } else if contains(c, &PRESERVE) { - None - } else { - Some(" ") - } -} - -const REPLACEMENTS: LazyLock> = LazyLock::new(|| { - [ - ('\x00', "␀"), - ('\x01', "␁"), - ('\x02', "␂"), - ('\x03', "␃"), - ('\x04', "␄"), - ('\x05', "␅"), - ('\x06', "␆"), - ('\x07', "␇"), - ('\x08', "␈"), - ('\x0B', "␋"), - ('\x0C', "␌"), - ('\x0D', "␍"), - ('\x0E', "␎"), - ('\x0F', "␏"), - ('\x10', "␐"), - ('\x11', "␑"), - ('\x12', "␒"), - ('\x13', "␓"), - ('\x14', "␔"), - ('\x15', "␕"), - ('\x16', "␖"), - ('\x17', "␗"), - ('\x18', "␘"), - ('\x19', "␙"), - ('\x1A', "␚"), - ('\x1B', "␛"), - ('\x1C', "␜"), - ('\x1D', "␝"), - ('\x1E', "␞"), - ('\x1F', "␟"), - ('\u{007F}', "␡"), - ] - .into_iter() - .collect() -}); - -// generated using ucd-generate: ucd-generate general-category --include Format --chars ucd-16.0.0 -pub const FORMAT: &'static [(char, char)] = &[ - ('\u{ad}', '\u{ad}'), - ('\u{600}', '\u{605}'), - ('\u{61c}', '\u{61c}'), - ('\u{6dd}', '\u{6dd}'), - ('\u{70f}', '\u{70f}'), - ('\u{890}', '\u{891}'), - ('\u{8e2}', '\u{8e2}'), - ('\u{180e}', '\u{180e}'), - ('\u{200b}', '\u{200f}'), - ('\u{202a}', '\u{202e}'), - ('\u{2060}', '\u{2064}'), - ('\u{2066}', '\u{206f}'), - ('\u{feff}', '\u{feff}'), - ('\u{fff9}', '\u{fffb}'), - ('\u{110bd}', '\u{110bd}'), - ('\u{110cd}', '\u{110cd}'), - ('\u{13430}', '\u{1343f}'), - ('\u{1bca0}', '\u{1bca3}'), - ('\u{1d173}', '\u{1d17a}'), - ('\u{e0001}', '\u{e0001}'), - ('\u{e0020}', '\u{e007f}'), -]; - -// hand-made base on https://invisible-characters.com (Excluding Cf) -pub const OTHER: &'static [(char, char)] = &[ - ('\u{034f}', '\u{034f}'), - ('\u{115F}', '\u{1160}'), - ('\u{17b4}', '\u{17b5}'), - ('\u{180b}', '\u{180d}'), - ('\u{2800}', '\u{2800}'), - ('\u{3164}', '\u{3164}'), - ('\u{fe00}', '\u{fe0d}'), - ('\u{ffa0}', '\u{ffa0}'), - ('\u{fffc}', '\u{fffc}'), - ('\u{e0100}', '\u{e01ef}'), -]; - -// a subset of FORMAT/OTHER that may appear within glyphs -const PRESERVE: &'static [(char, char)] = &[ - ('\u{034f}', '\u{034f}'), - ('\u{200d}', '\u{200d}'), - ('\u{17b4}', '\u{17b5}'), - ('\u{180b}', '\u{180d}'), - ('\u{e0061}', '\u{e007a}'), - ('\u{e007f}', '\u{e007f}'), -]; - -fn contains(c: char, list: &[(char, char)]) -> bool { - for (start, end) in list { - if c < *start { - return false; - } - if c <= *end { - return true; - } - } - false -} diff --git a/crates/editor/src/display_map/char_map.rs b/crates/editor/src/display_map/tab_map.rs similarity index 82% rename from crates/editor/src/display_map/char_map.rs rename to crates/editor/src/display_map/tab_map.rs index 8c467b180327dc94f13cb34d95d4c571d3575b5e..86fa492712a066545ec32faee452f75f160c1c04 100644 --- a/crates/editor/src/display_map/char_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -1,6 +1,5 @@ use super::{ fold_map::{self, FoldChunks, FoldEdit, FoldPoint, FoldSnapshot}, - invisibles::{is_invisible, replacement}, Highlights, }; use language::{Chunk, Point}; @@ -10,14 +9,14 @@ use sum_tree::Bias; const MAX_EXPANSION_COLUMN: u32 = 256; -/// Keeps track of hard tabs and non-printable characters in a text buffer. +/// Keeps track of hard tabs in a text buffer. /// /// See the [`display_map` module documentation](crate::display_map) for more information. -pub struct CharMap(CharSnapshot); +pub struct TabMap(TabSnapshot); -impl CharMap { - pub fn new(fold_snapshot: FoldSnapshot, tab_size: NonZeroU32) -> (Self, CharSnapshot) { - let snapshot = CharSnapshot { +impl TabMap { + pub fn new(fold_snapshot: FoldSnapshot, tab_size: NonZeroU32) -> (Self, TabSnapshot) { + let snapshot = TabSnapshot { fold_snapshot, tab_size, max_expansion_column: MAX_EXPANSION_COLUMN, @@ -27,7 +26,7 @@ impl CharMap { } #[cfg(test)] - pub fn set_max_expansion_column(&mut self, column: u32) -> CharSnapshot { + pub fn set_max_expansion_column(&mut self, column: u32) -> TabSnapshot { self.0.max_expansion_column = column; self.0.clone() } @@ -37,9 +36,9 @@ impl CharMap { fold_snapshot: FoldSnapshot, mut fold_edits: Vec, tab_size: NonZeroU32, - ) -> (CharSnapshot, Vec) { + ) -> (TabSnapshot, Vec) { let old_snapshot = &mut self.0; - let mut new_snapshot = CharSnapshot { + let mut new_snapshot = TabSnapshot { fold_snapshot, tab_size, max_expansion_column: old_snapshot.max_expansion_column, @@ -138,15 +137,15 @@ impl CharMap { let new_start = fold_edit.new.start.to_point(&new_snapshot.fold_snapshot); let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot); tab_edits.push(TabEdit { - old: old_snapshot.to_char_point(old_start)..old_snapshot.to_char_point(old_end), - new: new_snapshot.to_char_point(new_start)..new_snapshot.to_char_point(new_end), + old: old_snapshot.to_tab_point(old_start)..old_snapshot.to_tab_point(old_end), + new: new_snapshot.to_tab_point(new_start)..new_snapshot.to_tab_point(new_end), }); } } else { new_snapshot.version += 1; tab_edits.push(TabEdit { - old: CharPoint::zero()..old_snapshot.max_point(), - new: CharPoint::zero()..new_snapshot.max_point(), + old: TabPoint::zero()..old_snapshot.max_point(), + new: TabPoint::zero()..new_snapshot.max_point(), }); } @@ -156,14 +155,14 @@ impl CharMap { } #[derive(Clone)] -pub struct CharSnapshot { +pub struct TabSnapshot { pub fold_snapshot: FoldSnapshot, pub tab_size: NonZeroU32, pub max_expansion_column: u32, pub version: usize, } -impl CharSnapshot { +impl TabSnapshot { pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot { &self.fold_snapshot.inlay_snapshot.buffer } @@ -171,7 +170,7 @@ impl CharSnapshot { pub fn line_len(&self, row: u32) -> u32 { let max_point = self.max_point(); if row < max_point.row() { - self.to_char_point(FoldPoint::new(row, self.fold_snapshot.line_len(row))) + self.to_tab_point(FoldPoint::new(row, self.fold_snapshot.line_len(row))) .0 .column } else { @@ -180,10 +179,10 @@ impl CharSnapshot { } pub fn text_summary(&self) -> TextSummary { - self.text_summary_for_range(CharPoint::zero()..self.max_point()) + self.text_summary_for_range(TabPoint::zero()..self.max_point()) } - pub fn text_summary_for_range(&self, range: Range) -> TextSummary { + pub fn text_summary_for_range(&self, range: Range) -> TextSummary { let input_start = self.to_fold_point(range.start, Bias::Left).0; let input_end = self.to_fold_point(range.end, Bias::Right).0; let input_summary = self @@ -212,7 +211,7 @@ impl CharSnapshot { } else { for _ in self .chunks( - CharPoint::new(range.end.row(), 0)..range.end, + TabPoint::new(range.end.row(), 0)..range.end, false, Highlights::default(), ) @@ -233,7 +232,7 @@ impl CharSnapshot { pub fn chunks<'a>( &'a self, - range: Range, + range: Range, language_aware: bool, highlights: Highlights<'a>, ) -> TabChunks<'a> { @@ -280,7 +279,7 @@ impl CharSnapshot { #[cfg(test)] pub fn text(&self) -> String { self.chunks( - CharPoint::zero()..self.max_point(), + TabPoint::zero()..self.max_point(), false, Highlights::default(), ) @@ -288,24 +287,24 @@ impl CharSnapshot { .collect() } - pub fn max_point(&self) -> CharPoint { - self.to_char_point(self.fold_snapshot.max_point()) + pub fn max_point(&self) -> TabPoint { + self.to_tab_point(self.fold_snapshot.max_point()) } - pub fn clip_point(&self, point: CharPoint, bias: Bias) -> CharPoint { - self.to_char_point( + pub fn clip_point(&self, point: TabPoint, bias: Bias) -> TabPoint { + self.to_tab_point( self.fold_snapshot .clip_point(self.to_fold_point(point, bias).0, bias), ) } - pub fn to_char_point(&self, input: FoldPoint) -> CharPoint { + pub fn to_tab_point(&self, input: FoldPoint) -> TabPoint { let chars = self.fold_snapshot.chars_at(FoldPoint::new(input.row(), 0)); let expanded = self.expand_tabs(chars, input.column()); - CharPoint::new(input.row(), expanded) + TabPoint::new(input.row(), expanded) } - pub fn to_fold_point(&self, output: CharPoint, bias: Bias) -> (FoldPoint, u32, u32) { + pub fn to_fold_point(&self, output: TabPoint, bias: Bias) -> (FoldPoint, u32, u32) { let chars = self.fold_snapshot.chars_at(FoldPoint::new(output.row(), 0)); let expanded = output.column(); let (collapsed, expanded_char_column, to_next_stop) = @@ -317,13 +316,13 @@ impl CharSnapshot { ) } - pub fn make_char_point(&self, point: Point, bias: Bias) -> CharPoint { + pub fn make_tab_point(&self, point: Point, bias: Bias) -> TabPoint { let inlay_point = self.fold_snapshot.inlay_snapshot.to_inlay_point(point); let fold_point = self.fold_snapshot.to_fold_point(inlay_point, bias); - self.to_char_point(fold_point) + self.to_tab_point(fold_point) } - pub fn to_point(&self, point: CharPoint, bias: Bias) -> Point { + pub fn to_point(&self, point: TabPoint, bias: Bias) -> Point { let fold_point = self.to_fold_point(point, bias).0; let inlay_point = fold_point.to_inlay_point(&self.fold_snapshot); self.fold_snapshot @@ -346,9 +345,6 @@ impl CharSnapshot { let tab_len = tab_size - expanded_chars % tab_size; expanded_bytes += tab_len; expanded_chars += tab_len; - } else if let Some(replacement) = replacement(c) { - expanded_chars += replacement.chars().count() as u32; - expanded_bytes += replacement.len() as u32; } else { expanded_bytes += c.len_utf8() as u32; expanded_chars += 1; @@ -388,9 +384,6 @@ impl CharSnapshot { Bias::Right => (collapsed_bytes + 1, expanded_chars, 0), }; } - } else if let Some(replacement) = replacement(c) { - expanded_chars += replacement.chars().count() as u32; - expanded_bytes += replacement.len() as u32; } else { expanded_chars += 1; expanded_bytes += c.len_utf8() as u32; @@ -412,9 +405,9 @@ impl CharSnapshot { } #[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)] -pub struct CharPoint(pub Point); +pub struct TabPoint(pub Point); -impl CharPoint { +impl TabPoint { pub fn new(row: u32, column: u32) -> Self { Self(Point::new(row, column)) } @@ -432,13 +425,13 @@ impl CharPoint { } } -impl From for CharPoint { +impl From for TabPoint { fn from(point: Point) -> Self { Self(point) } } -pub type TabEdit = text::Edit; +pub type TabEdit = text::Edit; #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct TextSummary { @@ -493,7 +486,7 @@ impl<'a> std::ops::AddAssign<&'a Self> for TextSummary { const SPACES: &str = " "; pub struct TabChunks<'a> { - snapshot: &'a CharSnapshot, + snapshot: &'a TabSnapshot, fold_chunks: FoldChunks<'a>, chunk: Chunk<'a>, column: u32, @@ -506,7 +499,7 @@ pub struct TabChunks<'a> { } impl<'a> TabChunks<'a> { - pub(crate) fn seek(&mut self, range: Range) { + pub(crate) fn seek(&mut self, range: Range) { let (input_start, expanded_char_column, to_next_stop) = self.snapshot.to_fold_point(range.start, Bias::Left); let input_column = input_start.column(); @@ -591,37 +584,6 @@ impl<'a> Iterator for TabChunks<'a> { self.input_column = 0; self.output_position += Point::new(1, 0); } - _ if is_invisible(c) => { - if ix > 0 { - let (prefix, suffix) = self.chunk.text.split_at(ix); - self.chunk.text = suffix; - return Some(Chunk { - text: prefix, - is_invisible: false, - ..self.chunk.clone() - }); - } - let c_len = c.len_utf8(); - let replacement = replacement(c).unwrap_or(&self.chunk.text[..c_len]); - if self.chunk.text.len() >= c_len { - self.chunk.text = &self.chunk.text[c_len..]; - } else { - self.chunk.text = ""; - } - let len = replacement.chars().count() as u32; - let next_output_position = cmp::min( - self.output_position + Point::new(0, len), - self.max_output_position, - ); - self.column += len; - self.input_column += 1; - self.output_position = next_output_position; - return Some(Chunk { - text: replacement, - is_invisible: true, - ..self.chunk.clone() - }); - } _ => { self.column += 1; if !self.inside_leading_tab { @@ -651,11 +613,11 @@ mod tests { let buffer_snapshot = buffer.read(cx).snapshot(cx); let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); - let (_, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap()); + let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); - assert_eq!(char_snapshot.expand_tabs("\t".chars(), 0), 0); - assert_eq!(char_snapshot.expand_tabs("\t".chars(), 1), 4); - assert_eq!(char_snapshot.expand_tabs("\ta".chars(), 2), 5); + assert_eq!(tab_snapshot.expand_tabs("\t".chars(), 0), 0); + assert_eq!(tab_snapshot.expand_tabs("\t".chars(), 1), 4); + assert_eq!(tab_snapshot.expand_tabs("\ta".chars(), 2), 5); } #[gpui::test] @@ -668,16 +630,16 @@ mod tests { let buffer_snapshot = buffer.read(cx).snapshot(cx); let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); - let (_, mut char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap()); + let (_, mut tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); - char_snapshot.max_expansion_column = max_expansion_column; - assert_eq!(char_snapshot.text(), output); + tab_snapshot.max_expansion_column = max_expansion_column; + assert_eq!(tab_snapshot.text(), output); for (ix, c) in input.char_indices() { assert_eq!( - char_snapshot + tab_snapshot .chunks( - CharPoint::new(0, ix as u32)..char_snapshot.max_point(), + TabPoint::new(0, ix as u32)..tab_snapshot.max_point(), false, Highlights::default(), ) @@ -691,13 +653,13 @@ mod tests { let input_point = Point::new(0, ix as u32); let output_point = Point::new(0, output.find(c).unwrap() as u32); assert_eq!( - char_snapshot.to_char_point(FoldPoint(input_point)), - CharPoint(output_point), - "to_char_point({input_point:?})" + tab_snapshot.to_tab_point(FoldPoint(input_point)), + TabPoint(output_point), + "to_tab_point({input_point:?})" ); assert_eq!( - char_snapshot - .to_fold_point(CharPoint(output_point), Bias::Left) + tab_snapshot + .to_fold_point(TabPoint(output_point), Bias::Left) .0, FoldPoint(input_point), "to_fold_point({output_point:?})" @@ -715,10 +677,10 @@ mod tests { let buffer_snapshot = buffer.read(cx).snapshot(cx); let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); - let (_, mut char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap()); + let (_, mut tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); - char_snapshot.max_expansion_column = max_expansion_column; - assert_eq!(char_snapshot.text(), input); + tab_snapshot.max_expansion_column = max_expansion_column; + assert_eq!(tab_snapshot.text(), input); } #[gpui::test] @@ -729,10 +691,10 @@ mod tests { let buffer_snapshot = buffer.read(cx).snapshot(cx); let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); - let (_, char_snapshot) = CharMap::new(fold_snapshot, 4.try_into().unwrap()); + let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); assert_eq!( - chunks(&char_snapshot, CharPoint::zero()), + chunks(&tab_snapshot, TabPoint::zero()), vec![ (" ".to_string(), true), (" ".to_string(), false), @@ -741,7 +703,7 @@ mod tests { ] ); assert_eq!( - chunks(&char_snapshot, CharPoint::new(0, 2)), + chunks(&tab_snapshot, TabPoint::new(0, 2)), vec![ (" ".to_string(), true), (" ".to_string(), false), @@ -750,7 +712,7 @@ mod tests { ] ); - fn chunks(snapshot: &CharSnapshot, start: CharPoint) -> Vec<(String, bool)> { + fn chunks(snapshot: &TabSnapshot, start: TabPoint) -> Vec<(String, bool)> { let mut chunks = Vec::new(); let mut was_tab = false; let mut text = String::new(); @@ -796,12 +758,12 @@ mod tests { let (inlay_snapshot, _) = inlay_map.randomly_mutate(&mut 0, &mut rng); log::info!("InlayMap text: {:?}", inlay_snapshot.text()); - let (mut char_map, _) = CharMap::new(fold_snapshot.clone(), tab_size); - let tabs_snapshot = char_map.set_max_expansion_column(32); + let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size); + let tabs_snapshot = tab_map.set_max_expansion_column(32); let text = text::Rope::from(tabs_snapshot.text().as_str()); log::info!( - "CharMap text (tab size: {}): {:?}", + "TabMap text (tab size: {}): {:?}", tab_size, tabs_snapshot.text(), ); @@ -809,11 +771,11 @@ mod tests { for _ in 0..5 { let end_row = rng.gen_range(0..=text.max_point().row); let end_column = rng.gen_range(0..=text.line_len(end_row)); - let mut end = CharPoint(text.clip_point(Point::new(end_row, end_column), Bias::Right)); + let mut end = TabPoint(text.clip_point(Point::new(end_row, end_column), Bias::Right)); let start_row = rng.gen_range(0..=text.max_point().row); let start_column = rng.gen_range(0..=text.line_len(start_row)); let mut start = - CharPoint(text.clip_point(Point::new(start_row, start_column), Bias::Left)); + TabPoint(text.clip_point(Point::new(start_row, start_column), Bias::Left)); if start > end { mem::swap(&mut start, &mut end); } diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 15f6595f193d1d4aa189015a8fe742cbf95b2750..ceb91ce0ab24c5d554b7a12cd10db94222a999da 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -1,6 +1,6 @@ use super::{ - char_map::{self, CharPoint, CharSnapshot, TabEdit}, fold_map::FoldBufferRows, + tab_map::{self, TabEdit, TabPoint, TabSnapshot}, Highlights, }; use gpui::{AppContext, Context, Font, LineWrapper, Model, ModelContext, Pixels, Task}; @@ -12,7 +12,7 @@ use std::{cmp, collections::VecDeque, mem, ops::Range, time::Duration}; use sum_tree::{Bias, Cursor, SumTree}; use text::Patch; -pub use super::char_map::TextSummary; +pub use super::tab_map::TextSummary; pub type WrapEdit = text::Edit; /// Handles soft wrapping of text. @@ -20,7 +20,7 @@ pub type WrapEdit = text::Edit; /// See the [`display_map` module documentation](crate::display_map) for more information. pub struct WrapMap { snapshot: WrapSnapshot, - pending_edits: VecDeque<(CharSnapshot, Vec)>, + pending_edits: VecDeque<(TabSnapshot, Vec)>, interpolated_edits: Patch, edits_since_sync: Patch, wrap_width: Option, @@ -30,7 +30,7 @@ pub struct WrapMap { #[derive(Clone)] pub struct WrapSnapshot { - char_snapshot: CharSnapshot, + tab_snapshot: TabSnapshot, transforms: SumTree, interpolated: bool, } @@ -51,11 +51,11 @@ struct TransformSummary { pub struct WrapPoint(pub Point); pub struct WrapChunks<'a> { - input_chunks: char_map::TabChunks<'a>, + input_chunks: tab_map::TabChunks<'a>, input_chunk: Chunk<'a>, output_position: WrapPoint, max_output_row: u32, - transforms: Cursor<'a, Transform, (WrapPoint, CharPoint)>, + transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>, snapshot: &'a WrapSnapshot, } @@ -66,7 +66,7 @@ pub struct WrapBufferRows<'a> { output_row: u32, soft_wrapped: bool, max_output_row: u32, - transforms: Cursor<'a, Transform, (WrapPoint, CharPoint)>, + transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>, } impl<'a> WrapBufferRows<'a> { @@ -86,7 +86,7 @@ impl<'a> WrapBufferRows<'a> { impl WrapMap { pub fn new( - char_snapshot: CharSnapshot, + tab_snapshot: TabSnapshot, font: Font, font_size: Pixels, wrap_width: Option, @@ -99,7 +99,7 @@ impl WrapMap { pending_edits: Default::default(), interpolated_edits: Default::default(), edits_since_sync: Default::default(), - snapshot: WrapSnapshot::new(char_snapshot), + snapshot: WrapSnapshot::new(tab_snapshot), background_task: None, }; this.set_wrap_width(wrap_width, cx); @@ -117,17 +117,17 @@ impl WrapMap { pub fn sync( &mut self, - char_snapshot: CharSnapshot, + tab_snapshot: TabSnapshot, edits: Vec, cx: &mut ModelContext, ) -> (WrapSnapshot, Patch) { if self.wrap_width.is_some() { - self.pending_edits.push_back((char_snapshot, edits)); + self.pending_edits.push_back((tab_snapshot, edits)); self.flush_edits(cx); } else { self.edits_since_sync = self .edits_since_sync - .compose(self.snapshot.interpolate(char_snapshot, &edits)); + .compose(self.snapshot.interpolate(tab_snapshot, &edits)); self.snapshot.interpolated = false; } @@ -177,11 +177,11 @@ impl WrapMap { let (font, font_size) = self.font_with_size.clone(); let task = cx.background_executor().spawn(async move { let mut line_wrapper = text_system.line_wrapper(font, font_size); - let char_snapshot = new_snapshot.char_snapshot.clone(); - let range = CharPoint::zero()..char_snapshot.max_point(); + let tab_snapshot = new_snapshot.tab_snapshot.clone(); + let range = TabPoint::zero()..tab_snapshot.max_point(); let edits = new_snapshot .update( - char_snapshot, + tab_snapshot, &[TabEdit { old: range.clone(), new: range.clone(), @@ -221,7 +221,7 @@ impl WrapMap { } else { let old_rows = self.snapshot.transforms.summary().output.lines.row + 1; self.snapshot.transforms = SumTree::default(); - let summary = self.snapshot.char_snapshot.text_summary(); + let summary = self.snapshot.tab_snapshot.text_summary(); if !summary.lines.is_zero() { self.snapshot .transforms @@ -239,8 +239,8 @@ impl WrapMap { fn flush_edits(&mut self, cx: &mut ModelContext) { if !self.snapshot.interpolated { let mut to_remove_len = 0; - for (char_snapshot, _) in &self.pending_edits { - if char_snapshot.version <= self.snapshot.char_snapshot.version { + for (tab_snapshot, _) in &self.pending_edits { + if tab_snapshot.version <= self.snapshot.tab_snapshot.version { to_remove_len += 1; } else { break; @@ -262,9 +262,9 @@ impl WrapMap { let update_task = cx.background_executor().spawn(async move { let mut edits = Patch::default(); let mut line_wrapper = text_system.line_wrapper(font, font_size); - for (char_snapshot, tab_edits) in pending_edits { + for (tab_snapshot, tab_edits) in pending_edits { let wrap_edits = snapshot - .update(char_snapshot, &tab_edits, wrap_width, &mut line_wrapper) + .update(tab_snapshot, &tab_edits, wrap_width, &mut line_wrapper) .await; edits = edits.compose(&wrap_edits); } @@ -301,11 +301,11 @@ impl WrapMap { let was_interpolated = self.snapshot.interpolated; let mut to_remove_len = 0; - for (char_snapshot, edits) in &self.pending_edits { - if char_snapshot.version <= self.snapshot.char_snapshot.version { + for (tab_snapshot, edits) in &self.pending_edits { + if tab_snapshot.version <= self.snapshot.tab_snapshot.version { to_remove_len += 1; } else { - let interpolated_edits = self.snapshot.interpolate(char_snapshot.clone(), edits); + let interpolated_edits = self.snapshot.interpolate(tab_snapshot.clone(), edits); self.edits_since_sync = self.edits_since_sync.compose(&interpolated_edits); self.interpolated_edits = self.interpolated_edits.compose(&interpolated_edits); } @@ -318,49 +318,45 @@ impl WrapMap { } impl WrapSnapshot { - fn new(char_snapshot: CharSnapshot) -> Self { + fn new(tab_snapshot: TabSnapshot) -> Self { let mut transforms = SumTree::default(); - let extent = char_snapshot.text_summary(); + let extent = tab_snapshot.text_summary(); if !extent.lines.is_zero() { transforms.push(Transform::isomorphic(extent), &()); } Self { transforms, - char_snapshot, + tab_snapshot, interpolated: true, } } pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot { - self.char_snapshot.buffer_snapshot() + self.tab_snapshot.buffer_snapshot() } - fn interpolate( - &mut self, - new_char_snapshot: CharSnapshot, - tab_edits: &[TabEdit], - ) -> Patch { + fn interpolate(&mut self, new_tab_snapshot: TabSnapshot, tab_edits: &[TabEdit]) -> Patch { let mut new_transforms; if tab_edits.is_empty() { new_transforms = self.transforms.clone(); } else { - let mut old_cursor = self.transforms.cursor::(&()); + let mut old_cursor = self.transforms.cursor::(&()); let mut tab_edits_iter = tab_edits.iter().peekable(); new_transforms = old_cursor.slice(&tab_edits_iter.peek().unwrap().old.start, Bias::Right, &()); while let Some(edit) = tab_edits_iter.next() { - if edit.new.start > CharPoint::from(new_transforms.summary().input.lines) { - let summary = new_char_snapshot.text_summary_for_range( - CharPoint::from(new_transforms.summary().input.lines)..edit.new.start, + if edit.new.start > TabPoint::from(new_transforms.summary().input.lines) { + let summary = new_tab_snapshot.text_summary_for_range( + TabPoint::from(new_transforms.summary().input.lines)..edit.new.start, ); new_transforms.push_or_extend(Transform::isomorphic(summary)); } if !edit.new.is_empty() { new_transforms.push_or_extend(Transform::isomorphic( - new_char_snapshot.text_summary_for_range(edit.new.clone()), + new_tab_snapshot.text_summary_for_range(edit.new.clone()), )); } @@ -369,7 +365,7 @@ impl WrapSnapshot { if next_edit.old.start > old_cursor.end(&()) { if old_cursor.end(&()) > edit.old.end { let summary = self - .char_snapshot + .tab_snapshot .text_summary_for_range(edit.old.end..old_cursor.end(&())); new_transforms.push_or_extend(Transform::isomorphic(summary)); } @@ -383,7 +379,7 @@ impl WrapSnapshot { } else { if old_cursor.end(&()) > edit.old.end { let summary = self - .char_snapshot + .tab_snapshot .text_summary_for_range(edit.old.end..old_cursor.end(&())); new_transforms.push_or_extend(Transform::isomorphic(summary)); } @@ -396,7 +392,7 @@ impl WrapSnapshot { let old_snapshot = mem::replace( self, WrapSnapshot { - char_snapshot: new_char_snapshot, + tab_snapshot: new_tab_snapshot, transforms: new_transforms, interpolated: true, }, @@ -407,7 +403,7 @@ impl WrapSnapshot { async fn update( &mut self, - new_char_snapshot: CharSnapshot, + new_tab_snapshot: TabSnapshot, tab_edits: &[TabEdit], wrap_width: Pixels, line_wrapper: &mut LineWrapper, @@ -444,27 +440,27 @@ impl WrapSnapshot { new_transforms = self.transforms.clone(); } else { let mut row_edits = row_edits.into_iter().peekable(); - let mut old_cursor = self.transforms.cursor::(&()); + let mut old_cursor = self.transforms.cursor::(&()); new_transforms = old_cursor.slice( - &CharPoint::new(row_edits.peek().unwrap().old_rows.start, 0), + &TabPoint::new(row_edits.peek().unwrap().old_rows.start, 0), Bias::Right, &(), ); while let Some(edit) = row_edits.next() { if edit.new_rows.start > new_transforms.summary().input.lines.row { - let summary = new_char_snapshot.text_summary_for_range( - CharPoint(new_transforms.summary().input.lines) - ..CharPoint::new(edit.new_rows.start, 0), + let summary = new_tab_snapshot.text_summary_for_range( + TabPoint(new_transforms.summary().input.lines) + ..TabPoint::new(edit.new_rows.start, 0), ); new_transforms.push_or_extend(Transform::isomorphic(summary)); } let mut line = String::new(); let mut remaining = None; - let mut chunks = new_char_snapshot.chunks( - CharPoint::new(edit.new_rows.start, 0)..new_char_snapshot.max_point(), + let mut chunks = new_tab_snapshot.chunks( + TabPoint::new(edit.new_rows.start, 0)..new_tab_snapshot.max_point(), false, Highlights::default(), ); @@ -511,19 +507,19 @@ impl WrapSnapshot { } new_transforms.extend(edit_transforms, &()); - old_cursor.seek_forward(&CharPoint::new(edit.old_rows.end, 0), Bias::Right, &()); + old_cursor.seek_forward(&TabPoint::new(edit.old_rows.end, 0), Bias::Right, &()); if let Some(next_edit) = row_edits.peek() { if next_edit.old_rows.start > old_cursor.end(&()).row() { - if old_cursor.end(&()) > CharPoint::new(edit.old_rows.end, 0) { - let summary = self.char_snapshot.text_summary_for_range( - CharPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()), + if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) { + let summary = self.tab_snapshot.text_summary_for_range( + TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()), ); new_transforms.push_or_extend(Transform::isomorphic(summary)); } old_cursor.next(&()); new_transforms.append( old_cursor.slice( - &CharPoint::new(next_edit.old_rows.start, 0), + &TabPoint::new(next_edit.old_rows.start, 0), Bias::Right, &(), ), @@ -531,9 +527,9 @@ impl WrapSnapshot { ); } } else { - if old_cursor.end(&()) > CharPoint::new(edit.old_rows.end, 0) { - let summary = self.char_snapshot.text_summary_for_range( - CharPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()), + if old_cursor.end(&()) > TabPoint::new(edit.old_rows.end, 0) { + let summary = self.tab_snapshot.text_summary_for_range( + TabPoint::new(edit.old_rows.end, 0)..old_cursor.end(&()), ); new_transforms.push_or_extend(Transform::isomorphic(summary)); } @@ -546,7 +542,7 @@ impl WrapSnapshot { let old_snapshot = mem::replace( self, WrapSnapshot { - char_snapshot: new_char_snapshot, + tab_snapshot: new_tab_snapshot, transforms: new_transforms, interpolated: false, }, @@ -599,17 +595,17 @@ impl WrapSnapshot { ) -> WrapChunks<'a> { let output_start = WrapPoint::new(rows.start, 0); let output_end = WrapPoint::new(rows.end, 0); - let mut transforms = self.transforms.cursor::<(WrapPoint, CharPoint)>(&()); + let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); transforms.seek(&output_start, Bias::Right, &()); - let mut input_start = CharPoint(transforms.start().1 .0); + let mut input_start = TabPoint(transforms.start().1 .0); if transforms.item().map_or(false, |t| t.is_isomorphic()) { input_start.0 += output_start.0 - transforms.start().0 .0; } let input_end = self - .to_char_point(output_end) - .min(self.char_snapshot.max_point()); + .to_tab_point(output_end) + .min(self.tab_snapshot.max_point()); WrapChunks { - input_chunks: self.char_snapshot.chunks( + input_chunks: self.tab_snapshot.chunks( input_start..input_end, language_aware, highlights, @@ -627,7 +623,7 @@ impl WrapSnapshot { } pub fn line_len(&self, row: u32) -> u32 { - let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&()); + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); cursor.seek(&WrapPoint::new(row + 1, 0), Bias::Left, &()); if cursor .item() @@ -635,7 +631,7 @@ impl WrapSnapshot { { let overshoot = row - cursor.start().0.row(); let tab_row = cursor.start().1.row() + overshoot; - let tab_line_len = self.char_snapshot.line_len(tab_row); + let tab_line_len = self.tab_snapshot.line_len(tab_row); if overshoot == 0 { cursor.start().0.column() + (tab_line_len - cursor.start().1.column()) } else { @@ -652,17 +648,15 @@ impl WrapSnapshot { let start = WrapPoint::new(rows.start, 0); let end = WrapPoint::new(rows.end, 0); - let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&()); + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); cursor.seek(&start, Bias::Right, &()); if let Some(transform) = cursor.item() { let start_in_transform = start.0 - cursor.start().0 .0; let end_in_transform = cmp::min(end, cursor.end(&()).0).0 - cursor.start().0 .0; if transform.is_isomorphic() { - let char_start = CharPoint(cursor.start().1 .0 + start_in_transform); - let char_end = CharPoint(cursor.start().1 .0 + end_in_transform); - summary += &self - .char_snapshot - .text_summary_for_range(char_start..char_end); + let tab_start = TabPoint(cursor.start().1 .0 + start_in_transform); + let tab_end = TabPoint(cursor.start().1 .0 + end_in_transform); + summary += &self.tab_snapshot.text_summary_for_range(tab_start..tab_end); } else { debug_assert_eq!(start_in_transform.row, end_in_transform.row); let indent_len = end_in_transform.column - start_in_transform.column; @@ -687,9 +681,9 @@ impl WrapSnapshot { let end_in_transform = end.0 - cursor.start().0 .0; if transform.is_isomorphic() { let char_start = cursor.start().1; - let char_end = CharPoint(char_start.0 + end_in_transform); + let char_end = TabPoint(char_start.0 + end_in_transform); summary += &self - .char_snapshot + .tab_snapshot .text_summary_for_range(char_start..char_end); } else { debug_assert_eq!(end_in_transform, Point::new(1, 0)); @@ -724,14 +718,14 @@ impl WrapSnapshot { } pub fn buffer_rows(&self, start_row: u32) -> WrapBufferRows { - let mut transforms = self.transforms.cursor::<(WrapPoint, CharPoint)>(&()); + let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &()); let mut input_row = transforms.start().1.row(); if transforms.item().map_or(false, |t| t.is_isomorphic()) { input_row += start_row - transforms.start().0.row(); } let soft_wrapped = transforms.item().map_or(false, |t| !t.is_isomorphic()); - let mut input_buffer_rows = self.char_snapshot.buffer_rows(input_row); + let mut input_buffer_rows = self.tab_snapshot.buffer_rows(input_row); let input_buffer_row = input_buffer_rows.next().unwrap(); WrapBufferRows { transforms, @@ -743,26 +737,26 @@ impl WrapSnapshot { } } - pub fn to_char_point(&self, point: WrapPoint) -> CharPoint { - let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&()); + pub fn to_tab_point(&self, point: WrapPoint) -> TabPoint { + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); cursor.seek(&point, Bias::Right, &()); - let mut char_point = cursor.start().1 .0; + let mut tab_point = cursor.start().1 .0; if cursor.item().map_or(false, |t| t.is_isomorphic()) { - char_point += point.0 - cursor.start().0 .0; + tab_point += point.0 - cursor.start().0 .0; } - CharPoint(char_point) + TabPoint(tab_point) } pub fn to_point(&self, point: WrapPoint, bias: Bias) -> Point { - self.char_snapshot.to_point(self.to_char_point(point), bias) + self.tab_snapshot.to_point(self.to_tab_point(point), bias) } pub fn make_wrap_point(&self, point: Point, bias: Bias) -> WrapPoint { - self.char_point_to_wrap_point(self.char_snapshot.make_char_point(point, bias)) + self.tab_point_to_wrap_point(self.tab_snapshot.make_tab_point(point, bias)) } - pub fn char_point_to_wrap_point(&self, point: CharPoint) -> WrapPoint { - let mut cursor = self.transforms.cursor::<(CharPoint, WrapPoint)>(&()); + pub fn tab_point_to_wrap_point(&self, point: TabPoint) -> WrapPoint { + let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>(&()); cursor.seek(&point, Bias::Right, &()); WrapPoint(cursor.start().1 .0 + (point.0 - cursor.start().0 .0)) } @@ -777,10 +771,7 @@ impl WrapSnapshot { } } - self.char_point_to_wrap_point( - self.char_snapshot - .clip_point(self.to_char_point(point), bias), - ) + self.tab_point_to_wrap_point(self.tab_snapshot.clip_point(self.to_tab_point(point), bias)) } pub fn prev_row_boundary(&self, mut point: WrapPoint) -> u32 { @@ -790,7 +781,7 @@ impl WrapSnapshot { *point.column_mut() = 0; - let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&()); + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); cursor.seek(&point, Bias::Right, &()); if cursor.item().is_none() { cursor.prev(&()); @@ -810,7 +801,7 @@ impl WrapSnapshot { pub fn next_row_boundary(&self, mut point: WrapPoint) -> Option { point.0 += Point::new(1, 0); - let mut cursor = self.transforms.cursor::<(WrapPoint, CharPoint)>(&()); + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(&()); cursor.seek(&point, Bias::Right, &()); while let Some(transform) = cursor.item() { if transform.is_isomorphic() && cursor.start().1.column() == 0 { @@ -842,8 +833,8 @@ impl WrapSnapshot { #[cfg(test)] { assert_eq!( - CharPoint::from(self.transforms.summary().input.lines), - self.char_snapshot.max_point() + TabPoint::from(self.transforms.summary().input.lines), + self.tab_snapshot.max_point() ); { @@ -856,18 +847,18 @@ impl WrapSnapshot { } let text = language::Rope::from(self.text().as_str()); - let mut input_buffer_rows = self.char_snapshot.buffer_rows(0); + let mut input_buffer_rows = self.tab_snapshot.buffer_rows(0); let mut expected_buffer_rows = Vec::new(); let mut prev_tab_row = 0; for display_row in 0..=self.max_point().row() { - let char_point = self.to_char_point(WrapPoint::new(display_row, 0)); - if char_point.row() == prev_tab_row && display_row != 0 { + let tab_point = self.to_tab_point(WrapPoint::new(display_row, 0)); + if tab_point.row() == prev_tab_row && display_row != 0 { expected_buffer_rows.push(None); } else { expected_buffer_rows.push(input_buffer_rows.next().unwrap()); } - prev_tab_row = char_point.row(); + prev_tab_row = tab_point.row(); assert_eq!(self.line_len(display_row), text.line_len(display_row)); } @@ -889,14 +880,14 @@ impl<'a> WrapChunks<'a> { let output_start = WrapPoint::new(rows.start, 0); let output_end = WrapPoint::new(rows.end, 0); self.transforms.seek(&output_start, Bias::Right, &()); - let mut input_start = CharPoint(self.transforms.start().1 .0); + let mut input_start = TabPoint(self.transforms.start().1 .0); if self.transforms.item().map_or(false, |t| t.is_isomorphic()) { input_start.0 += output_start.0 - self.transforms.start().0 .0; } let input_end = self .snapshot - .to_char_point(output_end) - .min(self.snapshot.char_snapshot.max_point()); + .to_tab_point(output_end) + .min(self.snapshot.tab_snapshot.max_point()); self.input_chunks.seek(input_start..input_end); self.input_chunk = Chunk::default(); self.output_position = output_start; @@ -951,11 +942,13 @@ impl<'a> Iterator for WrapChunks<'a> { } else { *self.output_position.column_mut() += char_len as u32; } + if self.output_position >= transform_end { self.transforms.next(&()); break; } } + let (prefix, suffix) = self.input_chunk.text.split_at(input_len); self.input_chunk.text = suffix; Some(Chunk { @@ -1110,7 +1103,7 @@ impl sum_tree::Summary for TransformSummary { } } -impl<'a> sum_tree::Dimension<'a, TransformSummary> for CharPoint { +impl<'a> sum_tree::Dimension<'a, TransformSummary> for TabPoint { fn zero(_cx: &()) -> Self { Default::default() } @@ -1120,7 +1113,7 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for CharPoint { } } -impl<'a> sum_tree::SeekTarget<'a, TransformSummary, TransformSummary> for CharPoint { +impl<'a> sum_tree::SeekTarget<'a, TransformSummary, TransformSummary> for TabPoint { fn cmp(&self, cursor_location: &TransformSummary, _: &()) -> std::cmp::Ordering { Ord::cmp(&self.0, &cursor_location.input.lines) } @@ -1168,7 +1161,7 @@ fn consolidate_wrap_edits(edits: Vec) -> Vec { mod tests { use super::*; use crate::{ - display_map::{char_map::CharMap, fold_map::FoldMap, inlay_map::InlayMap}, + display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap}, MultiBuffer, }; use gpui::{font, px, test::observe}; @@ -1220,9 +1213,9 @@ mod tests { log::info!("InlayMap text: {:?}", inlay_snapshot.text()); let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot.clone()); log::info!("FoldMap text: {:?}", fold_snapshot.text()); - let (mut char_map, _) = CharMap::new(fold_snapshot.clone(), tab_size); - let tabs_snapshot = char_map.set_max_expansion_column(32); - log::info!("CharMap text: {:?}", tabs_snapshot.text()); + let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size); + let tabs_snapshot = tab_map.set_max_expansion_column(32); + log::info!("TabMap text: {:?}", tabs_snapshot.text()); let mut line_wrapper = text_system.line_wrapper(font.clone(), font_size); let unwrapped_text = tabs_snapshot.text(); @@ -1268,7 +1261,7 @@ mod tests { 20..=39 => { for (fold_snapshot, fold_edits) in fold_map.randomly_mutate(&mut rng) { let (tabs_snapshot, tab_edits) = - char_map.sync(fold_snapshot, fold_edits, tab_size); + tab_map.sync(fold_snapshot, fold_edits, tab_size); let (mut snapshot, wrap_edits) = wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx)); snapshot.check_invariants(); @@ -1281,7 +1274,7 @@ mod tests { inlay_map.randomly_mutate(&mut next_inlay_id, &mut rng); let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits); let (tabs_snapshot, tab_edits) = - char_map.sync(fold_snapshot, fold_edits, tab_size); + tab_map.sync(fold_snapshot, fold_edits, tab_size); let (mut snapshot, wrap_edits) = wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx)); snapshot.check_invariants(); @@ -1305,8 +1298,8 @@ mod tests { log::info!("InlayMap text: {:?}", inlay_snapshot.text()); let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits); log::info!("FoldMap text: {:?}", fold_snapshot.text()); - let (tabs_snapshot, tab_edits) = char_map.sync(fold_snapshot, fold_edits, tab_size); - log::info!("CharMap text: {:?}", tabs_snapshot.text()); + let (tabs_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size); + log::info!("TabMap text: {:?}", tabs_snapshot.text()); let unwrapped_text = tabs_snapshot.text(); let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper); @@ -1352,7 +1345,7 @@ mod tests { if tab_size.get() == 1 || !wrapped_snapshot - .char_snapshot + .tab_snapshot .fold_snapshot .text() .contains('\t') diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 753b7f246d11dcee30431c9f1ada39d7da662c91..7402badc1e9923d1705f19925b68e1e827e78df2 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -68,7 +68,6 @@ use sum_tree::Bias; use theme::{ActiveTheme, Appearance, PlayerColor}; use ui::prelude::*; use ui::{h_flex, ButtonLike, ButtonStyle, ContextMenu, Tooltip}; -use unicode_segmentation::UnicodeSegmentation; use util::RangeExt; use util::ResultExt; use workspace::{item::Item, Workspace}; @@ -1026,21 +1025,23 @@ impl EditorElement { } let block_text = if let CursorShape::Block = selection.cursor_shape { snapshot - .grapheme_at(cursor_position) + .display_chars_at(cursor_position) + .next() .or_else(|| { if cursor_column == 0 { - snapshot.placeholder_text().and_then(|s| { - s.graphemes(true).next().map(|s| s.to_owned()) - }) + snapshot + .placeholder_text() + .and_then(|s| s.chars().next()) + .map(|c| (c, cursor_position)) } else { None } }) - .and_then(|grapheme| { - let text = if grapheme == "\n" { + .and_then(|(character, _)| { + let text = if character == '\n' { SharedString::from(" ") } else { - SharedString::from(grapheme) + SharedString::from(character.to_string()) }; let len = text.len(); diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 688ebf57bb30bc59cab0f9c01ebf428efe9d4111..9200dd7b8c697c5838fa46e738b19e91289aa83e 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -1,7 +1,6 @@ use crate::{ display_map::{InlayOffset, ToDisplayPoint}, hover_links::{InlayHighlight, RangeInEditor}, - is_invisible, scroll::ScrollAmount, Anchor, AnchorRangeExt, DisplayPoint, DisplayRow, Editor, EditorSettings, EditorSnapshot, Hover, RangeToAnchorExt, @@ -12,7 +11,7 @@ use gpui::{ StyleRefinement, Styled, Task, TextStyleRefinement, View, ViewContext, }; use itertools::Itertools; -use language::{Diagnostic, DiagnosticEntry, Language, LanguageRegistry}; +use language::{DiagnosticEntry, Language, LanguageRegistry}; use lsp::DiagnosticSeverity; use markdown::{Markdown, MarkdownStyle}; use multi_buffer::ToOffset; @@ -200,6 +199,7 @@ fn show_hover( if editor.pending_rename.is_some() { return None; } + let snapshot = editor.snapshot(cx); let (buffer, buffer_position) = editor @@ -259,7 +259,7 @@ fn show_hover( } // If there's a diagnostic, assign it on the hover state and notify - let mut local_diagnostic = snapshot + let local_diagnostic = snapshot .buffer_snapshot .diagnostics_in_range::<_, usize>(anchor..anchor, false) // Find the entry with the most specific range @@ -281,42 +281,6 @@ fn show_hover( }) }); - if let Some(invisible) = snapshot - .buffer_snapshot - .chars_at(anchor) - .next() - .filter(|&c| is_invisible(c)) - { - let after = snapshot.buffer_snapshot.anchor_after( - anchor.to_offset(&snapshot.buffer_snapshot) + invisible.len_utf8(), - ); - local_diagnostic = Some(DiagnosticEntry { - diagnostic: Diagnostic { - severity: DiagnosticSeverity::HINT, - message: format!("Unicode character U+{:02X}", invisible as u32), - ..Default::default() - }, - range: anchor..after, - }) - } else if let Some(invisible) = snapshot - .buffer_snapshot - .reversed_chars_at(anchor) - .next() - .filter(|&c| is_invisible(c)) - { - let before = snapshot.buffer_snapshot.anchor_before( - anchor.to_offset(&snapshot.buffer_snapshot) - invisible.len_utf8(), - ); - local_diagnostic = Some(DiagnosticEntry { - diagnostic: Diagnostic { - severity: DiagnosticSeverity::HINT, - message: format!("Unicode character U+{:02X}", invisible as u32), - ..Default::default() - }, - range: before..anchor, - }) - } - let diagnostic_popover = if let Some(local_diagnostic) = local_diagnostic { let text = match local_diagnostic.diagnostic.source { Some(ref source) => { @@ -324,6 +288,7 @@ fn show_hover( } None => local_diagnostic.diagnostic.message.clone(), }; + let mut border_color: Option = None; let mut background_color: Option = None; @@ -379,6 +344,7 @@ fn show_hover( Markdown::new_text(text, markdown_style.clone(), None, cx, None) }) .ok(); + Some(DiagnosticPopover { local_diagnostic, primary_diagnostic, @@ -466,6 +432,7 @@ fn show_hover( cx.notify(); cx.refresh(); })?; + anyhow::Ok(()) } .log_err() diff --git a/crates/gpui/src/text_system/line.rs b/crates/gpui/src/text_system/line.rs index ff5d91f0224713f7ab5ab7801977837cb1b0f8fc..240654e57e1488301956768485d61b34a5cb56f0 100644 --- a/crates/gpui/src/text_system/line.rs +++ b/crates/gpui/src/text_system/line.rs @@ -1,7 +1,6 @@ use crate::{ - black, fill, point, px, size, Bounds, Half, Hsla, LineLayout, Pixels, Point, Result, - SharedString, StrikethroughStyle, UnderlineStyle, WindowContext, WrapBoundary, - WrappedLineLayout, + black, fill, point, px, size, Bounds, Hsla, LineLayout, Pixels, Point, Result, SharedString, + StrikethroughStyle, UnderlineStyle, WindowContext, WrapBoundary, WrappedLineLayout, }; use derive_more::{Deref, DerefMut}; use smallvec::SmallVec; @@ -130,9 +129,8 @@ fn paint_line( let text_system = cx.text_system().clone(); let mut glyph_origin = origin; let mut prev_glyph_position = Point::default(); - let mut max_glyph_size = size(px(0.), px(0.)); for (run_ix, run) in layout.runs.iter().enumerate() { - max_glyph_size = text_system.bounding_box(run.font_id, layout.font_size).size; + let max_glyph_size = text_system.bounding_box(run.font_id, layout.font_size).size; for (glyph_ix, glyph) in run.glyphs.iter().enumerate() { glyph_origin.x += glyph.position.x - prev_glyph_position.x; @@ -141,9 +139,6 @@ fn paint_line( wraps.next(); if let Some((background_origin, background_color)) = current_background.as_mut() { - if glyph_origin.x == background_origin.x { - background_origin.x -= max_glyph_size.width.half() - } cx.paint_quad(fill( Bounds { origin: *background_origin, @@ -155,9 +150,6 @@ fn paint_line( background_origin.y += line_height; } if let Some((underline_origin, underline_style)) = current_underline.as_mut() { - if glyph_origin.x == underline_origin.x { - underline_origin.x -= max_glyph_size.width.half(); - }; cx.paint_underline( *underline_origin, glyph_origin.x - underline_origin.x, @@ -169,9 +161,6 @@ fn paint_line( if let Some((strikethrough_origin, strikethrough_style)) = current_strikethrough.as_mut() { - if glyph_origin.x == strikethrough_origin.x { - strikethrough_origin.x -= max_glyph_size.width.half(); - }; cx.paint_strikethrough( *strikethrough_origin, glyph_origin.x - strikethrough_origin.x, @@ -190,18 +179,7 @@ fn paint_line( let mut finished_underline: Option<(Point, UnderlineStyle)> = None; let mut finished_strikethrough: Option<(Point, StrikethroughStyle)> = None; if glyph.index >= run_end { - let mut style_run = decoration_runs.next(); - - // ignore style runs that apply to a partial glyph - while let Some(run) = style_run { - if glyph.index < run_end + (run.len as usize) { - break; - } - run_end += run.len as usize; - style_run = decoration_runs.next(); - } - - if let Some(style_run) = style_run { + if let Some(style_run) = decoration_runs.next() { if let Some((_, background_color)) = &mut current_background { if style_run.background_color.as_ref() != Some(background_color) { finished_background = current_background.take(); @@ -262,14 +240,10 @@ fn paint_line( } if let Some((background_origin, background_color)) = finished_background { - let mut width = glyph_origin.x - background_origin.x; - if width == px(0.) { - width = px(5.) - }; cx.paint_quad(fill( Bounds { origin: background_origin, - size: size(width, line_height), + size: size(glyph_origin.x - background_origin.x, line_height), }, background_color, )); @@ -325,10 +299,7 @@ fn paint_line( last_line_end_x -= glyph.position.x; } - if let Some((mut background_origin, background_color)) = current_background.take() { - if last_line_end_x == background_origin.x { - background_origin.x -= max_glyph_size.width.half() - }; + if let Some((background_origin, background_color)) = current_background.take() { cx.paint_quad(fill( Bounds { origin: background_origin, @@ -338,10 +309,7 @@ fn paint_line( )); } - if let Some((mut underline_start, underline_style)) = current_underline.take() { - if last_line_end_x == underline_start.x { - underline_start.x -= max_glyph_size.width.half() - }; + if let Some((underline_start, underline_style)) = current_underline.take() { cx.paint_underline( underline_start, last_line_end_x - underline_start.x, @@ -349,10 +317,7 @@ fn paint_line( ); } - if let Some((mut strikethrough_start, strikethrough_style)) = current_strikethrough.take() { - if last_line_end_x == strikethrough_start.x { - strikethrough_start.x -= max_glyph_size.width.half() - }; + if let Some((strikethrough_start, strikethrough_style)) = current_strikethrough.take() { cx.paint_strikethrough( strikethrough_start, last_line_end_x - strikethrough_start.x, diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 06492a0c58b423dd3f59b080e5fe6a9a66709998..26fb620ac1498fff82630fb2d4f3abde5007623b 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -501,8 +501,6 @@ pub struct Chunk<'a> { pub is_unnecessary: bool, /// Whether this chunk of text was originally a tab character. pub is_tab: bool, - /// Whether this chunk of text is an invisible character. - pub is_invisible: bool, /// An optional recipe for how the chunk should be presented. pub renderer: Option, } @@ -4213,6 +4211,7 @@ impl<'a> Iterator for BufferChunks<'a> { if self.range.start == self.chunks.offset() + chunk.len() { self.chunks.next().unwrap(); } + Some(Chunk { text: slice, syntax_highlight_id: highlight_id, From 24cb694494a1914b19610663a31fd5db90316ae2 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Fri, 25 Oct 2024 12:09:21 -0600 Subject: [PATCH 37/76] Update placeholder text with key bindings to focus context panel and navigate history (#19447) Hopefully, this will help people understand how easy it is to add context to an inline transformation. ![CleanShot 2024-10-18 at 22 41 00@2x](https://github.com/user-attachments/assets/c09c1d89-3df2-4079-9849-9de7ac63c003) @as-cii @maxdeviant @rtfeldman could somebody update this to display the actual correct key bindings and ship it. I have them hard coded for now. Release Notes: - Updated placeholder text with key bindings to focus context panel and navigate history. --------- Co-authored-by: Richard Feldman --- crates/assistant/src/inline_assistant.rs | 23 ++++++++++++++++++++--- crates/assistant/src/prompts.rs | 2 +- crates/ui/src/components/keybinding.rs | 2 +- 3 files changed, 22 insertions(+), 5 deletions(-) diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index 1134747d558041c5a238522d7082ecdc0d86727d..9af8193605f00f8bff2de3f9f8ed268fdb8267ff 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -54,7 +54,7 @@ use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase}; use terminal_view::terminal_panel::TerminalPanel; use text::{OffsetRangeExt, ToPoint as _}; use theme::ThemeSettings; -use ui::{prelude::*, CheckboxWithLabel, IconButtonShape, Popover, Tooltip}; +use ui::{prelude::*, text_for_action, CheckboxWithLabel, IconButtonShape, Popover, Tooltip}; use util::{RangeExt, ResultExt}; use workspace::{notifications::NotificationId, ItemHandle, Toast, Workspace}; @@ -1596,7 +1596,7 @@ impl PromptEditor { // always show the cursor (even when it isn't focused) because // typing in one will make what you typed appear in all of them. editor.set_show_cursor_when_unfocused(true, cx); - editor.set_placeholder_text("Add a prompt…", cx); + editor.set_placeholder_text(Self::placeholder_text(codegen.read(cx), cx), cx); editor }); @@ -1653,6 +1653,7 @@ impl PromptEditor { self.editor = cx.new_view(|cx| { let mut editor = Editor::auto_height(Self::MAX_LINES as usize, cx); editor.set_soft_wrap_mode(language::language_settings::SoftWrap::EditorWidth, cx); + editor.set_placeholder_text(Self::placeholder_text(self.codegen.read(cx), cx), cx); editor.set_placeholder_text("Add a prompt…", cx); editor.set_text(prompt, cx); if focus { @@ -1663,6 +1664,20 @@ impl PromptEditor { self.subscribe_to_editor(cx); } + fn placeholder_text(codegen: &Codegen, cx: &WindowContext) -> String { + let context_keybinding = text_for_action(&crate::ToggleFocus, cx) + .map(|keybinding| format!(" • {keybinding} for context")) + .unwrap_or_default(); + + let action = if codegen.is_insertion { + "Generate" + } else { + "Transform" + }; + + format!("{action}…{context_keybinding} • ↓↑ for history") + } + fn prompt(&self, cx: &AppContext) -> String { self.editor.read(cx).text(cx) } @@ -2260,6 +2275,7 @@ pub struct Codegen { initial_transaction_id: Option, telemetry: Option>, builder: Arc, + is_insertion: bool, } impl Codegen { @@ -2282,6 +2298,7 @@ impl Codegen { ) }); let mut this = Self { + is_insertion: range.to_offset(&buffer.read(cx).snapshot(cx)).is_empty(), alternatives: vec![codegen], active_alternative: 0, seen_alternatives: HashSet::default(), @@ -2683,7 +2700,7 @@ impl CodegenAlternative { let prompt = self .builder - .generate_content_prompt(user_prompt, language_name, buffer, range) + .generate_inline_transformation_prompt(user_prompt, language_name, buffer, range) .map_err(|e| anyhow::anyhow!("Failed to generate content prompt: {}", e))?; let mut messages = Vec::new(); diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs index 132b3df68f4bc6746021d06360889daa5992d78f..2d0829086c8bfcd2a48d46b67edd5a926a733948 100644 --- a/crates/assistant/src/prompts.rs +++ b/crates/assistant/src/prompts.rs @@ -204,7 +204,7 @@ impl PromptBuilder { Ok(()) } - pub fn generate_content_prompt( + pub fn generate_inline_transformation_prompt( &self, user_prompt: String, language_name: Option<&LanguageName>, diff --git a/crates/ui/src/components/keybinding.rs b/crates/ui/src/components/keybinding.rs index 135599b914bc03b8be0698d9f069e1eb5d0dfb18..cd45a11d9fd4d49c1b8b98099238101d86007516 100644 --- a/crates/ui/src/components/keybinding.rs +++ b/crates/ui/src/components/keybinding.rs @@ -196,7 +196,7 @@ impl KeyIcon { } /// Returns a textual representation of the key binding for the given [`Action`]. -pub fn text_for_action(action: &dyn Action, cx: &mut WindowContext) -> Option { +pub fn text_for_action(action: &dyn Action, cx: &WindowContext) -> Option { let key_binding = cx.bindings_for_action(action).last().cloned()?; Some(text_for_key_binding(key_binding, PlatformStyle::platform())) } From c9db1b9a7bc272b26e436111ca0e38038c0b9ae4 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Fri, 25 Oct 2024 14:10:04 -0400 Subject: [PATCH 38/76] Add keybindings for accepting hunks (#19749) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I went with Cmd-Shift-Y on macOS (Ctrl-Shift-Y on Linux) for "yes accept this individual hunk" - both are currently unused. I went with Cmd-Shift-A on macOS (Ctrl-Alt-A on Linux) for "accept all hunks" - both are unused. (Ctrl-Shift-A on Linux was taken, as is Ctrl-Alt-Y, so although the pairing of Ctrl-Shift-Y and Ctrl-Alt-A isn't necessarily obvious, the letters seem intuitive - "yes" and "all" - and those key combinations don't conflict with anything.) Release Notes: - Added keybindings for applying hunks in Proposed Changes Screenshot 2024-10-25 at 12 47 00 PM --- assets/keymaps/default-linux.json | 7 +++++ assets/keymaps/default-macos.json | 7 +++++ crates/editor/src/actions.rs | 1 + crates/editor/src/element.rs | 1 + crates/editor/src/hunk_diff.rs | 14 ++++++--- crates/editor/src/proposed_changes_editor.rs | 33 ++++++++++++-------- 6 files changed, 45 insertions(+), 18 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index fca38a45a436fc37bc3a8f60cac2c91394f9660b..9df8debb70ea98d9dea5a7fb40eadc5c1d3a05bf 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -505,6 +505,13 @@ "ctrl-enter": "assistant::InlineAssist" } }, + { + "context": "ProposedChangesEditor", + "bindings": { + "ctrl-shift-y": "editor::ApplyDiffHunk", + "ctrl-alt-a": "editor::ApplyAllDiffHunks" + } + }, { "context": "Editor && jupyter && !ContextEditor", "bindings": { diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index c39b7c06daed595a67af10f1cd8c97b08004f000..c0bad344ab621c9a3e6edc63d54057839b2ba92d 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -538,6 +538,13 @@ "ctrl-enter": "assistant::InlineAssist" } }, + { + "context": "ProposedChangesEditor", + "bindings": { + "cmd-shift-y": "editor::ApplyDiffHunk", + "cmd-shift-a": "editor::ApplyAllDiffHunks" + } + }, { "context": "PromptEditor", "bindings": { diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index 5f866f9997297e71cfedac4cab6c44dcd47f68b7..b190283776eae2d48f5449cb7072128903e7cbf8 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -193,6 +193,7 @@ gpui::actions!( AcceptPartialInlineCompletion, AddSelectionAbove, AddSelectionBelow, + ApplyAllDiffHunks, ApplyDiffHunk, Backspace, Cancel, diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 7402badc1e9923d1705f19925b68e1e827e78df2..12e9d74914aa43c14660d90991e36f58896723c7 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -444,6 +444,7 @@ impl EditorElement { register_action(view, cx, Editor::accept_inline_completion); register_action(view, cx, Editor::revert_file); register_action(view, cx, Editor::revert_selected_hunks); + register_action(view, cx, Editor::apply_all_diff_hunks); register_action(view, cx, Editor::apply_selected_diff_hunks); register_action(view, cx, Editor::open_active_item_in_terminal); register_action(view, cx, Editor::reload_file) diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 9f66d27a644e501081ecdc4ff3f42cf52775398e..c55f36a9be4801aa61af92cf84de234606acb2dc 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -16,10 +16,10 @@ use util::RangeExt; use workspace::Item; use crate::{ - editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, ApplyDiffHunk, - BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, DisplayRow, - DisplaySnapshot, Editor, EditorElement, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, RevertFile, - RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff, + editor_settings::CurrentLineHighlight, hunk_status, hunks_for_selections, ApplyAllDiffHunks, + ApplyDiffHunk, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, DiffRowHighlight, + DisplayRow, DisplaySnapshot, Editor, EditorElement, ExpandAllHunkDiffs, GoToHunk, GoToPrevHunk, + RevertFile, RevertSelectedHunks, ToDisplayPoint, ToggleHunkDiff, }; #[derive(Debug, Clone)] @@ -352,7 +352,11 @@ impl Editor { None } - pub(crate) fn apply_all_diff_hunks(&mut self, cx: &mut ViewContext) { + pub(crate) fn apply_all_diff_hunks( + &mut self, + _: &ApplyAllDiffHunks, + cx: &mut ViewContext, + ) { let buffers = self.buffer.read(cx).all_buffers(); for branch_buffer in buffers { branch_buffer.update(cx, |branch_buffer, cx| { diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs index ae9f251a1f14db4b1f1d521c14aef109b1c1515f..ac97fe18da757d856da2843b45c568bc1e0a46a6 100644 --- a/crates/editor/src/proposed_changes_editor.rs +++ b/crates/editor/src/proposed_changes_editor.rs @@ -1,4 +1,4 @@ -use crate::{Editor, EditorEvent, SemanticsProvider}; +use crate::{ApplyAllDiffHunks, Editor, EditorEvent, SemanticsProvider}; use collections::HashSet; use futures::{channel::mpsc, future::join_all}; use gpui::{AppContext, EventEmitter, FocusableView, Model, Render, Subscription, Task, View}; @@ -8,7 +8,7 @@ use project::Project; use smol::stream::StreamExt; use std::{any::TypeId, ops::Range, rc::Rc, time::Duration}; use text::ToOffset; -use ui::prelude::*; +use ui::{prelude::*, ButtonLike, KeyBinding}; use workspace::{ searchable::SearchableItemHandle, Item, ItemHandle as _, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, @@ -232,7 +232,10 @@ impl ProposedChangesEditor { impl Render for ProposedChangesEditor { fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { - self.editor.clone() + div() + .size_full() + .key_context("ProposedChangesEditor") + .child(self.editor.clone()) } } @@ -331,17 +334,21 @@ impl ProposedChangesEditorToolbar { } impl Render for ProposedChangesEditorToolbar { - fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { - let editor = self.current_editor.clone(); - Button::new("apply-changes", "Apply All").on_click(move |_, cx| { - if let Some(editor) = &editor { - editor.update(cx, |editor, cx| { - editor.editor.update(cx, |editor, cx| { - editor.apply_all_diff_hunks(cx); - }) - }); + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + let button_like = ButtonLike::new("apply-changes").child(Label::new("Apply All")); + + match &self.current_editor { + Some(editor) => { + let focus_handle = editor.focus_handle(cx); + let keybinding = KeyBinding::for_action_in(&ApplyAllDiffHunks, &focus_handle, cx) + .map(|binding| binding.into_any_element()); + + button_like.children(keybinding).on_click({ + move |_event, cx| focus_handle.dispatch_action(&ApplyAllDiffHunks, cx) + }) } - }) + None => button_like.disabled(true), + } } } From 6de5ace116114678b3567196277c5135a5578d8d Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 25 Oct 2024 22:04:02 +0300 Subject: [PATCH 39/76] Update outline panel representation when a theme is changed (#19747) Release Notes: - N/A --- crates/outline_panel/src/outline_panel.rs | 28 ++++++++++++++++------- crates/theme/src/schema.rs | 16 ++++++------- crates/theme/src/settings.rs | 4 ++-- crates/theme/src/styles/accents.rs | 2 +- crates/theme/src/styles/colors.rs | 4 ++-- crates/theme/src/styles/players.rs | 4 ++-- crates/theme/src/styles/status.rs | 2 +- crates/theme/src/styles/system.rs | 2 +- crates/theme/src/theme.rs | 2 +- 9 files changed, 38 insertions(+), 26 deletions(-) diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 4944f770e73a6c8106a1630bccb25c0ecc2b24cb..25dd5cba8dcbfa091b0f5dfdddd9ed822d6e1306 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -41,7 +41,7 @@ use search::{BufferSearchBar, ProjectSearchView}; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsStore}; use smol::channel; -use theme::SyntaxTheme; +use theme::{SyntaxTheme, ThemeSettings}; use util::{debug_panic, RangeExt, ResultExt, TryFutureExt}; use workspace::{ dock::{DockPosition, Panel, PanelEvent}, @@ -653,13 +653,25 @@ impl OutlinePanel { }); let mut outline_panel_settings = *OutlinePanelSettings::get_global(cx); - let settings_subscription = cx.observe_global::(move |_, cx| { - let new_settings = *OutlinePanelSettings::get_global(cx); - if outline_panel_settings != new_settings { - outline_panel_settings = new_settings; - cx.notify(); - } - }); + let mut current_theme = ThemeSettings::get_global(cx).clone(); + let settings_subscription = + cx.observe_global::(move |outline_panel, cx| { + let new_settings = OutlinePanelSettings::get_global(cx); + let new_theme = ThemeSettings::get_global(cx); + if ¤t_theme != new_theme { + outline_panel_settings = *new_settings; + current_theme = new_theme.clone(); + for excerpts in outline_panel.excerpts.values_mut() { + for excerpt in excerpts.values_mut() { + excerpt.invalidate_outlines(); + } + } + outline_panel.update_non_fs_items(cx); + } else if &outline_panel_settings != new_settings { + outline_panel_settings = *new_settings; + cx.notify(); + } + }); let mut outline_panel = Self { mode: ItemsDisplayMode::Outline, diff --git a/crates/theme/src/schema.rs b/crates/theme/src/schema.rs index 88e24f08ff56322edfcd6f3857cf3363f99e700d..440ac332c4744c435115f64913d3c98d6525b381 100644 --- a/crates/theme/src/schema.rs +++ b/crates/theme/src/schema.rs @@ -71,7 +71,7 @@ pub struct ThemeContent { } /// The content of a serialized theme. -#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema, PartialEq)] #[serde(default)] pub struct ThemeStyleContent { #[serde(default, rename = "background.appearance")] @@ -133,7 +133,7 @@ impl ThemeStyleContent { } } -#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema, PartialEq)] #[serde(default)] pub struct ThemeColorsContent { /// Border color. Used for most borders, is usually a high contrast color. @@ -952,7 +952,7 @@ impl ThemeColorsContent { } } -#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema, PartialEq)] #[serde(default)] pub struct StatusColorsContent { /// Indicates some kind of conflict, like a file changed on disk while it was open, or @@ -1273,17 +1273,17 @@ impl StatusColorsContent { } } -#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, PartialEq)] pub struct AccentContent(pub Option); -#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, PartialEq)] pub struct PlayerColorContent { pub cursor: Option, pub background: Option, pub selection: Option, } -#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema, PartialEq)] #[serde(rename_all = "snake_case")] pub enum FontStyleContent { Normal, @@ -1301,7 +1301,7 @@ impl From for FontStyle { } } -#[derive(Debug, Clone, Copy, Serialize_repr, Deserialize_repr)] +#[derive(Debug, Clone, Copy, Serialize_repr, Deserialize_repr, PartialEq)] #[repr(u16)] pub enum FontWeightContent { Thin = 100, @@ -1359,7 +1359,7 @@ impl From for FontWeight { } } -#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema, PartialEq)] #[serde(default)] pub struct HighlightStyleContent { pub color: Option, diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index 49f7ba3c83676626ae69817add0d3f3d905db636..4d8158388cf0654a151c617d524edbf25685da66 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -86,7 +86,7 @@ impl From for String { } /// Customizable settings for the UI and theme system. -#[derive(Clone)] +#[derive(Clone, PartialEq)] pub struct ThemeSettings { /// The UI font size. Determines the size of text in the UI, /// as well as the size of a [gpui::Rems] unit. @@ -213,7 +213,7 @@ pub(crate) struct AdjustedUiFontSize(Pixels); impl Global for AdjustedUiFontSize {} /// Represents the selection of a theme, which can be either static or dynamic. -#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] #[serde(untagged)] pub enum ThemeSelection { /// A static theme selection, represented by a single theme name. diff --git a/crates/theme/src/styles/accents.rs b/crates/theme/src/styles/accents.rs index e4d7f03cf622016d353914ffc4598c781ee13d5a..773e3319acf95c7136b2e6f71c32db3774761d6a 100644 --- a/crates/theme/src/styles/accents.rs +++ b/crates/theme/src/styles/accents.rs @@ -7,7 +7,7 @@ use crate::{ }; /// A collection of colors that are used to color indent aware lines in the editor. -#[derive(Clone, Deserialize)] +#[derive(Clone, Deserialize, PartialEq)] pub struct AccentColors(pub Vec); impl Default for AccentColors { diff --git a/crates/theme/src/styles/colors.rs b/crates/theme/src/styles/colors.rs index 485a8e4b9e13d6b82545793c893da911db78e01d..d9ea58813c6afc253f4b17f2b83fd25a18c13b7b 100644 --- a/crates/theme/src/styles/colors.rs +++ b/crates/theme/src/styles/colors.rs @@ -8,7 +8,7 @@ use crate::{ AccentColors, PlayerColors, StatusColors, StatusColorsRefinement, SyntaxTheme, SystemColors, }; -#[derive(Refineable, Clone, Debug)] +#[derive(Refineable, Clone, Debug, PartialEq)] #[refineable(Debug, serde::Deserialize)] pub struct ThemeColors { /// Border color. Used for most borders, is usually a high contrast color. @@ -249,7 +249,7 @@ pub struct ThemeColors { pub link_text_hover: Hsla, } -#[derive(Refineable, Clone)] +#[derive(Refineable, Clone, PartialEq)] pub struct ThemeStyles { /// The background appearance of the window. pub window_background_appearance: WindowBackgroundAppearance, diff --git a/crates/theme/src/styles/players.rs b/crates/theme/src/styles/players.rs index 130721033239ce67bf2fa0d527cb37e529566c1c..262048f2c65100a5ae3bea8ee465fcc2011650a0 100644 --- a/crates/theme/src/styles/players.rs +++ b/crates/theme/src/styles/players.rs @@ -7,7 +7,7 @@ use crate::{ amber, blue, jade, lime, orange, pink, purple, red, try_parse_color, PlayerColorContent, }; -#[derive(Debug, Clone, Copy, Deserialize, Default)] +#[derive(Debug, Clone, Copy, Deserialize, Default, PartialEq)] pub struct PlayerColor { pub cursor: Hsla, pub background: Hsla, @@ -20,7 +20,7 @@ pub struct PlayerColor { /// /// The rest of the default colors crisscross back and forth on the /// color wheel so that the colors are as distinct as possible. -#[derive(Clone, Deserialize)] +#[derive(Clone, Deserialize, PartialEq)] pub struct PlayerColors(pub Vec); impl Default for PlayerColors { diff --git a/crates/theme/src/styles/status.rs b/crates/theme/src/styles/status.rs index 84afae701d0f099beea955b65a7828c6c0df623a..0d59f7c51c210c0f707fec55821a092cf47f3ac1 100644 --- a/crates/theme/src/styles/status.rs +++ b/crates/theme/src/styles/status.rs @@ -5,7 +5,7 @@ use refineable::Refineable; use crate::{blue, grass, neutral, red, yellow}; -#[derive(Refineable, Clone, Debug)] +#[derive(Refineable, Clone, Debug, PartialEq)] #[refineable(Debug, serde::Deserialize)] pub struct StatusColors { /// Indicates some kind of conflict, like a file changed on disk while it was open, or diff --git a/crates/theme/src/styles/system.rs b/crates/theme/src/styles/system.rs index 54e892b79c49f96e66c3817e9750fed3504327da..4f33711793b5f6dbd004fcc2b066ac542c15257b 100644 --- a/crates/theme/src/styles/system.rs +++ b/crates/theme/src/styles/system.rs @@ -2,7 +2,7 @@ use gpui::{hsla, Hsla}; -#[derive(Clone)] +#[derive(Clone, PartialEq)] pub struct SystemColors { pub transparent: Hsla, pub mac_os_traffic_light_red: Hsla, diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index d4436e53295278349cff6b2f522d74c2585232f8..c62359242dd9d24093abfccfcc583262c94a99e9 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -140,7 +140,7 @@ pub struct ThemeFamily { impl ThemeFamily {} /// A theme is the primary mechanism for defining the appearance of the UI. -#[derive(Clone)] +#[derive(Clone, PartialEq)] pub struct Theme { /// The unique identifier for the theme. pub id: String, From 92ba18342c1bbb2fea3461e174fbd3c3b644623c Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 25 Oct 2024 22:04:09 +0300 Subject: [PATCH 40/76] Properly deserialize active pane in the workspace (#19744) Without setting the active pane metadata, no center pane events are emitted on start before the pane is focused manually, which breaks deserialization of other components like outline panel, which should show the active pane's active item outlines on start. Release Notes: - N/A Co-authored-by: Thorsten Ball --- crates/workspace/src/workspace.rs | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 6338c6fcbd169222b37375f465ebbdad0775ada8..a81174020b0ed0a947952b542d01e80ad20ca657 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -2947,9 +2947,7 @@ impl Workspace { status_bar.set_active_pane(&pane, cx); }); if self.active_pane != pane { - self.active_pane = pane.clone(); - self.active_item_path_changed(cx); - self.last_active_center_pane = Some(pane.downgrade()); + self.set_active_pane(&pane, cx); } if self.last_active_center_pane.is_none() { @@ -2972,6 +2970,12 @@ impl Workspace { cx.notify(); } + fn set_active_pane(&mut self, pane: &View, cx: &mut ViewContext) { + self.active_pane = pane.clone(); + self.active_item_path_changed(cx); + self.last_active_center_pane = Some(pane.downgrade()); + } + fn handle_panel_focused(&mut self, cx: &mut ViewContext) { self.update_active_view_for_followers(cx); } @@ -4263,12 +4267,11 @@ impl Workspace { // Swap workspace center group workspace.center = PaneGroup::with_root(center_group); - workspace.last_active_center_pane = active_pane.as_ref().map(|p| p.downgrade()); if let Some(active_pane) = active_pane { - workspace.active_pane = active_pane; + workspace.set_active_pane(&active_pane, cx); cx.focus_self(); } else { - workspace.active_pane = workspace.center.first_pane().clone(); + workspace.set_active_pane(&workspace.center.first_pane(), cx); } } From 7d0a7aff4435961546d3a8fe237cee0cd51127ee Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 25 Oct 2024 12:37:03 -0700 Subject: [PATCH 41/76] Fix condition for re-using highlights when seeking buffer chunks iterator (#19760) Fixes a syntax highlighting regression introduced in https://github.com/zed-industries/zed/pull/19531, which caused syntax highlighting to be missing after any block. Release Notes: - N/A --- crates/editor/src/display_map.rs | 105 +++++++++++++++++++++++++++++-- crates/language/src/buffer.rs | 2 +- 2 files changed, 102 insertions(+), 5 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 67b7e5b60f7e9117ae7bbd40b9b66a5b6a1e2a29..559c2321c6208963ec8f27dd1a68c160404698fd 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -1168,6 +1168,7 @@ pub mod tests { use smol::stream::StreamExt; use std::{env, sync::Arc}; use theme::{LoadThemes, SyntaxTheme}; + use unindent::Unindent as _; use util::test::{marked_text_ranges, sample_text}; use Bias::*; @@ -1624,8 +1625,6 @@ pub mod tests { #[gpui::test] async fn test_chunks(cx: &mut gpui::TestAppContext) { - use unindent::Unindent as _; - let text = r#" fn outer() {} @@ -1722,12 +1721,110 @@ pub mod tests { ); } + #[gpui::test] + async fn test_chunks_with_syntax_highlighting_across_blocks(cx: &mut gpui::TestAppContext) { + cx.background_executor + .set_block_on_ticks(usize::MAX..=usize::MAX); + + let text = r#" + const A: &str = " + one + two + three + "; + const B: &str = "four"; + "# + .unindent(); + + let theme = SyntaxTheme::new_test(vec![ + ("string", Hsla::red()), + ("punctuation", Hsla::blue()), + ("keyword", Hsla::green()), + ]); + let language = Arc::new( + Language::new( + LanguageConfig { + name: "Rust".into(), + ..Default::default() + }, + Some(tree_sitter_rust::LANGUAGE.into()), + ) + .with_highlights_query( + r#" + (string_literal) @string + "const" @keyword + [":" ";"] @punctuation + "#, + ) + .unwrap(), + ); + language.set_theme(&theme); + + cx.update(|cx| init_test(cx, |_| {})); + + let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx)); + cx.condition(&buffer, |buf, _| !buf.is_parsing()).await; + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx)); + + let map = cx.new_model(|cx| { + DisplayMap::new( + buffer, + font("Courier"), + px(16.0), + None, + true, + 1, + 1, + 0, + FoldPlaceholder::test(), + cx, + ) + }); + + // Insert a block in the middle of a multi-line string literal + map.update(cx, |map, cx| { + map.insert_blocks( + [BlockProperties { + placement: BlockPlacement::Below( + buffer_snapshot.anchor_before(Point::new(1, 0)), + ), + height: 1, + style: BlockStyle::Sticky, + render: Box::new(|_| div().into_any()), + priority: 0, + }], + cx, + ) + }); + + pretty_assertions::assert_eq!( + cx.update(|cx| syntax_chunks(DisplayRow(0)..DisplayRow(7), &map, &theme, cx)), + [ + ("const".into(), Some(Hsla::green())), + (" A".into(), None), + (":".into(), Some(Hsla::blue())), + (" &str = ".into(), None), + ("\"\n one\n".into(), Some(Hsla::red())), + ("\n".into(), None), + (" two\n three\n\"".into(), Some(Hsla::red())), + (";".into(), Some(Hsla::blue())), + ("\n".into(), None), + ("const".into(), Some(Hsla::green())), + (" B".into(), None), + (":".into(), Some(Hsla::blue())), + (" &str = ".into(), None), + ("\"four\"".into(), Some(Hsla::red())), + (";".into(), Some(Hsla::blue())), + ("\n".into(), None), + ] + ); + } + // todo(linux) fails due to pixel differences in text rendering #[cfg(target_os = "macos")] #[gpui::test] async fn test_chunks_with_soft_wrapping(cx: &mut gpui::TestAppContext) { - use unindent::Unindent as _; - cx.background_executor .set_block_on_ticks(usize::MAX..=usize::MAX); diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 26fb620ac1498fff82630fb2d4f3abde5007623b..1c64475c9a58c86c892e69bcb3adb88f7bca39a1 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -4035,7 +4035,7 @@ impl<'a> BufferChunks<'a> { let old_range = std::mem::replace(&mut self.range, range.clone()); self.chunks.set_range(self.range.clone()); if let Some(highlights) = self.highlights.as_mut() { - if old_range.start >= self.range.start && old_range.end <= self.range.end { + if old_range.start <= self.range.start && old_range.end >= self.range.end { // Reuse existing highlights stack, as the new range is a subrange of the old one. highlights .stack From 507929cb790f78bce4c90f111d9caf4e876ca98e Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Fri, 25 Oct 2024 16:08:37 -0400 Subject: [PATCH 42/76] Add `editor: fold at level ` commands (#19750) Closes https://github.com/zed-industries/zed/issues/5142 Note that I only moved the cursor to the top of the file so it wouldn't jump - the commands work no matter where you are in the file. https://github.com/user-attachments/assets/78c74ca6-5c17-477c-b5d1-97c5665e44b0 Also, is VS Code doing this right thing here? or is it busted? https://github.com/user-attachments/assets/8c503b50-9671-4221-b9f8-1e692fe8cd9a Release Notes: - Added `editor: fold at level ` commands. macOS: `cmd-k, cmd-`, Linux: `ctrl-k, ctrl-`. --------- Co-authored-by: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> --- assets/keymaps/default-linux.json | 9 +++++++ assets/keymaps/default-macos.json | 10 +++++++- crates/editor/src/actions.rs | 5 ++++ crates/editor/src/editor.rs | 41 ++++++++++++++++++++++++++----- crates/editor/src/element.rs | 1 + 5 files changed, 59 insertions(+), 7 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 9df8debb70ea98d9dea5a7fb40eadc5c1d3a05bf..4f55fa9772b4dbe058d1f096b06153285b773142 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -313,6 +313,15 @@ "ctrl-k ctrl-l": "editor::ToggleFold", "ctrl-k ctrl-[": "editor::FoldRecursive", "ctrl-k ctrl-]": "editor::UnfoldRecursive", + "ctrl-k ctrl-1": ["editor::FoldAtLevel", { "level": 1 }], + "ctrl-k ctrl-2": ["editor::FoldAtLevel", { "level": 2 }], + "ctrl-k ctrl-3": ["editor::FoldAtLevel", { "level": 3 }], + "ctrl-k ctrl-4": ["editor::FoldAtLevel", { "level": 4 }], + "ctrl-k ctrl-5": ["editor::FoldAtLevel", { "level": 5 }], + "ctrl-k ctrl-6": ["editor::FoldAtLevel", { "level": 6 }], + "ctrl-k ctrl-7": ["editor::FoldAtLevel", { "level": 7 }], + "ctrl-k ctrl-8": ["editor::FoldAtLevel", { "level": 8 }], + "ctrl-k ctrl-9": ["editor::FoldAtLevel", { "level": 9 }], "ctrl-k ctrl-0": "editor::FoldAll", "ctrl-k ctrl-j": "editor::UnfoldAll", "ctrl-space": "editor::ShowCompletions", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index c0bad344ab621c9a3e6edc63d54057839b2ba92d..ade3ece1eda930dc0180387b001fdc90fc1a04e5 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -349,7 +349,15 @@ "alt-cmd-]": "editor::UnfoldLines", "cmd-k cmd-l": "editor::ToggleFold", "cmd-k cmd-[": "editor::FoldRecursive", - "cmd-k cmd-]": "editor::UnfoldRecursive", + "cmd-k cmd-1": ["editor::FoldAtLevel", { "level": 1 }], + "cmd-k cmd-2": ["editor::FoldAtLevel", { "level": 2 }], + "cmd-k cmd-3": ["editor::FoldAtLevel", { "level": 3 }], + "cmd-k cmd-4": ["editor::FoldAtLevel", { "level": 4 }], + "cmd-k cmd-5": ["editor::FoldAtLevel", { "level": 5 }], + "cmd-k cmd-6": ["editor::FoldAtLevel", { "level": 6 }], + "cmd-k cmd-7": ["editor::FoldAtLevel", { "level": 7 }], + "cmd-k cmd-8": ["editor::FoldAtLevel", { "level": 8 }], + "cmd-k cmd-9": ["editor::FoldAtLevel", { "level": 9 }], "cmd-k cmd-0": "editor::FoldAll", "cmd-k cmd-j": "editor::UnfoldAll", "ctrl-space": "editor::ShowCompletions", diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index b190283776eae2d48f5449cb7072128903e7cbf8..83379e13aed32ef24660f4cb58cce02d161b57c1 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -153,6 +153,10 @@ pub struct DeleteToPreviousWordStart { pub ignore_newlines: bool, } +#[derive(PartialEq, Clone, Deserialize, Default)] +pub struct FoldAtLevel { + pub level: u32, +} impl_actions!( editor, [ @@ -182,6 +186,7 @@ impl_actions!( ToggleCodeActions, ToggleComments, UnfoldAt, + FoldAtLevel ] ); diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index f3fb5cd3606c85b2b0b21bd6e39ad3a957076560..641ccc27151bd9a5a373266f68a3f29281790a04 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -10728,15 +10728,44 @@ impl Editor { self.fold_ranges(fold_ranges, true, cx); } + fn fold_at_level(&mut self, fold_at: &FoldAtLevel, cx: &mut ViewContext) { + let fold_at_level = fold_at.level; + let snapshot = self.buffer.read(cx).snapshot(cx); + let mut fold_ranges = Vec::new(); + let mut stack = vec![(0, snapshot.max_buffer_row().0, 1)]; + + while let Some((mut start_row, end_row, current_level)) = stack.pop() { + while start_row < end_row { + match self.snapshot(cx).foldable_range(MultiBufferRow(start_row)) { + Some(foldable_range) => { + let nested_start_row = foldable_range.0.start.row + 1; + let nested_end_row = foldable_range.0.end.row; + + if current_level == fold_at_level { + fold_ranges.push(foldable_range); + } + + if current_level <= fold_at_level { + stack.push((nested_start_row, nested_end_row, current_level + 1)); + } + + start_row = nested_end_row + 1; + } + None => start_row += 1, + } + } + } + + self.fold_ranges(fold_ranges, true, cx); + } + pub fn fold_all(&mut self, _: &actions::FoldAll, cx: &mut ViewContext) { let mut fold_ranges = Vec::new(); - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let snapshot = self.buffer.read(cx).snapshot(cx); - for row in 0..display_map.max_buffer_row().0 { - if let Some((foldable_range, fold_text)) = - display_map.foldable_range(MultiBufferRow(row)) - { - fold_ranges.push((foldable_range, fold_text)); + for row in 0..snapshot.max_buffer_row().0 { + if let Some(foldable_range) = self.snapshot(cx).foldable_range(MultiBufferRow(row)) { + fold_ranges.push(foldable_range); } } diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 12e9d74914aa43c14660d90991e36f58896723c7..2c3bed7eb784061b74eb03b9876dad0733af561b 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -336,6 +336,7 @@ impl EditorElement { register_action(view, cx, Editor::open_url); register_action(view, cx, Editor::open_file); register_action(view, cx, Editor::fold); + register_action(view, cx, Editor::fold_at_level); register_action(view, cx, Editor::fold_all); register_action(view, cx, Editor::fold_at); register_action(view, cx, Editor::fold_recursive); From c19c89e6df1e1b0af6a12f9fd995b453a944b4d3 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 25 Oct 2024 16:29:20 -0400 Subject: [PATCH 43/76] collab: Include `checkout_complete` query parameter after checking out (#19763) This PR updates the checkout flow to include the `?checkout_complete=1` query parameter after successfully checking out. We'll use this on the account page to adapt the UI accordingly. Release Notes: - N/A --- crates/collab/src/api/billing.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/collab/src/api/billing.rs b/crates/collab/src/api/billing.rs index 5b65249e083f156227e1d737696c2cd0f327699a..d431e4c043543f399f3402cef1668245c838290d 100644 --- a/crates/collab/src/api/billing.rs +++ b/crates/collab/src/api/billing.rs @@ -252,7 +252,10 @@ async fn create_billing_subscription( let default_model = llm_db.model(rpc::LanguageModelProvider::Anthropic, "claude-3-5-sonnet")?; let stripe_model = stripe_billing.register_model(default_model).await?; - let success_url = format!("{}/account", app.config.zed_dot_dev_url()); + let success_url = format!( + "{}/account?checkout_complete=1", + app.config.zed_dot_dev_url() + ); let checkout_session_url = stripe_billing .checkout(customer_id, &user.github_login, &stripe_model, &success_url) .await?; From 43258190751251b4fa03ce721d74acd1d414a241 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 25 Oct 2024 14:30:34 -0700 Subject: [PATCH 44/76] Fix more failure cases of assistant edits (#19653) * Make `description` optional (since we describe it as optional in the prompt, and we're currently not showing it) * Fix fuzzy location bug that neglected the cost of deleting prefixes of the query. * Make auto-indent work for single-line edits. Previously, auto-indent would not occur when overwriting a single line (without inserting or deleting a newline) Release Notes: - N/A --- assets/prompts/edit_workflow.hbs | 10 +- crates/assistant/src/context/context_tests.rs | 8 +- crates/assistant/src/patch.rs | 344 ++++++++++-------- crates/language/src/buffer.rs | 29 +- crates/language/src/buffer_tests.rs | 67 ++++ crates/languages/Cargo.toml | 7 +- crates/languages/src/lib.rs | 9 - 7 files changed, 302 insertions(+), 172 deletions(-) diff --git a/assets/prompts/edit_workflow.hbs b/assets/prompts/edit_workflow.hbs index 99a594cdd88e4c41a482bb102ef3702158af334a..9a5fba43d514d2bd588d4418fa84dba9794ef158 100644 --- a/assets/prompts/edit_workflow.hbs +++ b/assets/prompts/edit_workflow.hbs @@ -88,7 +88,6 @@ origin: (f64, f64), src/shapes/rectangle.rs -Update the Rectangle's new function to take an origin parameter update fn new(width: f64, height: f64) -> Self { @@ -117,7 +116,6 @@ pub struct Circle { src/shapes/circle.rs -Update the Circle's new function to take an origin parameter update fn new(radius: f64) -> Self { @@ -134,7 +132,6 @@ fn new(origin: (f64, f64), radius: f64) -> Self { src/shapes/rectangle.rs -Add an import for the std::fmt module insert_before struct Rectangle { @@ -147,7 +144,10 @@ use std::fmt; src/shapes/rectangle.rs -Add a Display implementation for Rectangle + +Add a manual Display implementation for Rectangle. +Currently, this is the same as a derived Display implementation. + insert_after Rectangle { width, height } @@ -169,7 +169,6 @@ impl fmt::Display for Rectangle { src/shapes/circle.rs -Add an import for the `std::fmt` module insert_before struct Circle { @@ -181,7 +180,6 @@ use std::fmt; src/shapes/circle.rs -Add a Display implementation for Circle insert_after Circle { radius } diff --git a/crates/assistant/src/context/context_tests.rs b/crates/assistant/src/context/context_tests.rs index e1b74487386930c0fdeef152304a6cec8d055df6..ecbe27269350314144ed58940baf857dc24ba666 100644 --- a/crates/assistant/src/context/context_tests.rs +++ b/crates/assistant/src/context/context_tests.rs @@ -636,7 +636,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) { kind: AssistantEditKind::InsertAfter { old_text: "fn one".into(), new_text: "fn two() {}".into(), - description: "add a `two` function".into(), + description: Some("add a `two` function".into()), }, }]], cx, @@ -690,7 +690,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) { kind: AssistantEditKind::InsertAfter { old_text: "fn zero".into(), new_text: "fn two() {}".into(), - description: "add a `two` function".into(), + description: Some("add a `two` function".into()), }, }]], cx, @@ -754,7 +754,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) { kind: AssistantEditKind::InsertAfter { old_text: "fn zero".into(), new_text: "fn two() {}".into(), - description: "add a `two` function".into(), + description: Some("add a `two` function".into()), }, }]], cx, @@ -798,7 +798,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) { kind: AssistantEditKind::InsertAfter { old_text: "fn zero".into(), new_text: "fn two() {}".into(), - description: "add a `two` function".into(), + description: Some("add a `two` function".into()), }, }]], cx, diff --git a/crates/assistant/src/patch.rs b/crates/assistant/src/patch.rs index 13b719f5c60b27da464e1220fada46b7d2a9b372..ca2df7a0e01bdd3117a3f41d75d5dc1a09cbad20 100644 --- a/crates/assistant/src/patch.rs +++ b/crates/assistant/src/patch.rs @@ -33,21 +33,21 @@ pub enum AssistantEditKind { Update { old_text: String, new_text: String, - description: String, + description: Option, }, Create { new_text: String, - description: String, + description: Option, }, InsertBefore { old_text: String, new_text: String, - description: String, + description: Option, }, InsertAfter { old_text: String, new_text: String, - description: String, + description: Option, }, Delete { old_text: String, @@ -86,19 +86,37 @@ enum SearchDirection { Diagonal, } -// A measure of the currently quality of an in-progress fuzzy search. -// -// Uses 60 bits to store a numeric cost, and 4 bits to store the preceding -// operation in the search. #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] struct SearchState { - score: u32, + cost: u32, direction: SearchDirection, } impl SearchState { - fn new(score: u32, direction: SearchDirection) -> Self { - Self { score, direction } + fn new(cost: u32, direction: SearchDirection) -> Self { + Self { cost, direction } + } +} + +struct SearchMatrix { + cols: usize, + data: Vec, +} + +impl SearchMatrix { + fn new(rows: usize, cols: usize) -> Self { + SearchMatrix { + cols, + data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols], + } + } + + fn get(&self, row: usize, col: usize) -> SearchState { + self.data[row * self.cols + col] + } + + fn set(&mut self, row: usize, col: usize, cost: SearchState) { + self.data[row * self.cols + col] = cost; } } @@ -187,23 +205,23 @@ impl AssistantEdit { "update" => AssistantEditKind::Update { old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?, new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?, - description: description.ok_or_else(|| anyhow!("missing description"))?, + description, }, "insert_before" => AssistantEditKind::InsertBefore { old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?, new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?, - description: description.ok_or_else(|| anyhow!("missing description"))?, + description, }, "insert_after" => AssistantEditKind::InsertAfter { old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?, new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?, - description: description.ok_or_else(|| anyhow!("missing description"))?, + description, }, "delete" => AssistantEditKind::Delete { old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?, }, "create" => AssistantEditKind::Create { - description: description.ok_or_else(|| anyhow!("missing description"))?, + description, new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?, }, _ => Err(anyhow!("unknown operation {operation:?}"))?, @@ -264,7 +282,7 @@ impl AssistantEditKind { ResolvedEdit { range, new_text, - description: Some(description), + description, } } Self::Create { @@ -272,7 +290,7 @@ impl AssistantEditKind { description, } => ResolvedEdit { range: text::Anchor::MIN..text::Anchor::MAX, - description: Some(description), + description, new_text, }, Self::InsertBefore { @@ -285,7 +303,7 @@ impl AssistantEditKind { ResolvedEdit { range: range.start..range.start, new_text, - description: Some(description), + description, } } Self::InsertAfter { @@ -298,7 +316,7 @@ impl AssistantEditKind { ResolvedEdit { range: range.end..range.end, new_text, - description: Some(description), + description, } } Self::Delete { old_text } => { @@ -314,44 +332,29 @@ impl AssistantEditKind { fn resolve_location(buffer: &text::BufferSnapshot, search_query: &str) -> Range { const INSERTION_COST: u32 = 3; + const DELETION_COST: u32 = 10; const WHITESPACE_INSERTION_COST: u32 = 1; - const DELETION_COST: u32 = 3; const WHITESPACE_DELETION_COST: u32 = 1; - const EQUALITY_BONUS: u32 = 5; - - struct Matrix { - cols: usize, - data: Vec, - } - - impl Matrix { - fn new(rows: usize, cols: usize) -> Self { - Matrix { - cols, - data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols], - } - } - - fn get(&self, row: usize, col: usize) -> SearchState { - self.data[row * self.cols + col] - } - - fn set(&mut self, row: usize, col: usize, cost: SearchState) { - self.data[row * self.cols + col] = cost; - } - } let buffer_len = buffer.len(); let query_len = search_query.len(); - let mut matrix = Matrix::new(query_len + 1, buffer_len + 1); - + let mut matrix = SearchMatrix::new(query_len + 1, buffer_len + 1); + let mut leading_deletion_cost = 0_u32; for (row, query_byte) in search_query.bytes().enumerate() { + let deletion_cost = if query_byte.is_ascii_whitespace() { + WHITESPACE_DELETION_COST + } else { + DELETION_COST + }; + + leading_deletion_cost = leading_deletion_cost.saturating_add(deletion_cost); + matrix.set( + row + 1, + 0, + SearchState::new(leading_deletion_cost, SearchDirection::Diagonal), + ); + for (col, buffer_byte) in buffer.bytes_in_range(0..buffer.len()).flatten().enumerate() { - let deletion_cost = if query_byte.is_ascii_whitespace() { - WHITESPACE_DELETION_COST - } else { - DELETION_COST - }; let insertion_cost = if buffer_byte.is_ascii_whitespace() { WHITESPACE_INSERTION_COST } else { @@ -359,38 +362,35 @@ impl AssistantEditKind { }; let up = SearchState::new( - matrix.get(row, col + 1).score.saturating_sub(deletion_cost), + matrix.get(row, col + 1).cost.saturating_add(deletion_cost), SearchDirection::Up, ); let left = SearchState::new( - matrix - .get(row + 1, col) - .score - .saturating_sub(insertion_cost), + matrix.get(row + 1, col).cost.saturating_add(insertion_cost), SearchDirection::Left, ); let diagonal = SearchState::new( if query_byte == *buffer_byte { - matrix.get(row, col).score.saturating_add(EQUALITY_BONUS) + matrix.get(row, col).cost } else { matrix .get(row, col) - .score - .saturating_sub(deletion_cost + insertion_cost) + .cost + .saturating_add(deletion_cost + insertion_cost) }, SearchDirection::Diagonal, ); - matrix.set(row + 1, col + 1, up.max(left).max(diagonal)); + matrix.set(row + 1, col + 1, up.min(left).min(diagonal)); } } // Traceback to find the best match let mut best_buffer_end = buffer_len; - let mut best_score = 0; + let mut best_cost = u32::MAX; for col in 1..=buffer_len { - let score = matrix.get(query_len, col).score; - if score > best_score { - best_score = score; + let cost = matrix.get(query_len, col).cost; + if cost < best_cost { + best_cost = cost; best_buffer_end = col; } } @@ -560,89 +560,84 @@ mod tests { language_settings::AllLanguageSettings, Language, LanguageConfig, LanguageMatcher, }; use settings::SettingsStore; - use text::{OffsetRangeExt, Point}; use ui::BorrowAppContext; use unindent::Unindent as _; + use util::test::{generate_marked_text, marked_text_ranges}; #[gpui::test] fn test_resolve_location(cx: &mut AppContext) { - { - let buffer = cx.new_model(|cx| { - Buffer::local( - concat!( - " Lorem\n", - " ipsum\n", - " dolor sit amet\n", - " consecteur", - ), - cx, - ) - }); - let snapshot = buffer.read(cx).snapshot(); - assert_eq!( - AssistantEditKind::resolve_location(&snapshot, "ipsum\ndolor").to_point(&snapshot), - Point::new(1, 0)..Point::new(2, 18) - ); - } + assert_location_resolution( + concat!( + " Lorem\n", + "« ipsum\n", + " dolor sit amet»\n", + " consecteur", + ), + "ipsum\ndolor", + cx, + ); - { - let buffer = cx.new_model(|cx| { - Buffer::local( - concat!( - "fn foo1(a: usize) -> usize {\n", - " 40\n", - "}\n", - "\n", - "fn foo2(b: usize) -> usize {\n", - " 42\n", - "}\n", - ), - cx, - ) - }); - let snapshot = buffer.read(cx).snapshot(); - assert_eq!( - AssistantEditKind::resolve_location(&snapshot, "fn foo1(b: usize) {\n40\n}") - .to_point(&snapshot), - Point::new(0, 0)..Point::new(2, 1) - ); - } + assert_location_resolution( + &" + «fn foo1(a: usize) -> usize { + 40 + }» - { - let buffer = cx.new_model(|cx| { - Buffer::local( - concat!( - "fn main() {\n", - " Foo\n", - " .bar()\n", - " .baz()\n", - " .qux()\n", - "}\n", - "\n", - "fn foo2(b: usize) -> usize {\n", - " 42\n", - "}\n", - ), - cx, - ) - }); - let snapshot = buffer.read(cx).snapshot(); - assert_eq!( - AssistantEditKind::resolve_location(&snapshot, "Foo.bar.baz.qux()") - .to_point(&snapshot), - Point::new(1, 0)..Point::new(4, 14) - ); - } + fn foo2(b: usize) -> usize { + 42 + } + " + .unindent(), + "fn foo1(b: usize) {\n40\n}", + cx, + ); + + assert_location_resolution( + &" + fn main() { + « Foo + .bar() + .baz() + .qux()» + } + + fn foo2(b: usize) -> usize { + 42 + } + " + .unindent(), + "Foo.bar.baz.qux()", + cx, + ); + + assert_location_resolution( + &" + class Something { + one() { return 1; } + « two() { return 2222; } + three() { return 333; } + four() { return 4444; } + five() { return 5555; } + six() { return 6666; } + » seven() { return 7; } + eight() { return 8; } + } + " + .unindent(), + &" + two() { return 2222; } + four() { return 4444; } + five() { return 5555; } + six() { return 6666; } + " + .unindent(), + cx, + ); } #[gpui::test] fn test_resolve_edits(cx: &mut AppContext) { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - language::init(cx); - cx.update_global::(|settings, cx| { - settings.update_user_settings::(cx, |_| {}); - }); + init_test(cx); assert_edits( " @@ -675,7 +670,7 @@ mod tests { last_name: String, " .unindent(), - description: "".into(), + description: None, }, AssistantEditKind::Update { old_text: " @@ -690,7 +685,7 @@ mod tests { } " .unindent(), - description: "".into(), + description: None, }, ], " @@ -734,7 +729,7 @@ mod tests { qux(); }" .unindent(), - description: "implement bar".into(), + description: Some("implement bar".into()), }, AssistantEditKind::Update { old_text: " @@ -747,7 +742,7 @@ mod tests { bar(); }" .unindent(), - description: "call bar in foo".into(), + description: Some("call bar in foo".into()), }, AssistantEditKind::InsertAfter { old_text: " @@ -762,7 +757,7 @@ mod tests { } " .unindent(), - description: "implement qux".into(), + description: Some("implement qux".into()), }, ], " @@ -814,7 +809,7 @@ mod tests { } " .unindent(), - description: "pick better number".into(), + description: None, }, AssistantEditKind::Update { old_text: " @@ -829,7 +824,7 @@ mod tests { } " .unindent(), - description: "pick better number".into(), + description: None, }, AssistantEditKind::Update { old_text: " @@ -844,7 +839,7 @@ mod tests { } " .unindent(), - description: "pick better number".into(), + description: None, }, ], " @@ -865,6 +860,69 @@ mod tests { .unindent(), cx, ); + + assert_edits( + " + impl Person { + fn set_name(&mut self, name: String) { + self.name = name; + } + + fn name(&self) -> String { + return self.name; + } + } + " + .unindent(), + vec![ + AssistantEditKind::Update { + old_text: "self.name = name;".unindent(), + new_text: "self._name = name;".unindent(), + description: None, + }, + AssistantEditKind::Update { + old_text: "return self.name;\n".unindent(), + new_text: "return self._name;\n".unindent(), + description: None, + }, + ], + " + impl Person { + fn set_name(&mut self, name: String) { + self._name = name; + } + + fn name(&self) -> String { + return self._name; + } + } + " + .unindent(), + cx, + ); + } + + fn init_test(cx: &mut AppContext) { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + language::init(cx); + cx.update_global::(|settings, cx| { + settings.update_user_settings::(cx, |_| {}); + }); + } + + #[track_caller] + fn assert_location_resolution( + text_with_expected_range: &str, + query: &str, + cx: &mut AppContext, + ) { + let (text, _) = marked_text_ranges(text_with_expected_range, false); + let buffer = cx.new_model(|cx| Buffer::local(text.clone(), cx)); + let snapshot = buffer.read(cx).snapshot(); + let range = AssistantEditKind::resolve_location(&snapshot, query).to_offset(&snapshot); + let text_with_actual_range = generate_marked_text(&text, &[range], false); + pretty_assertions::assert_eq!(text_with_actual_range, text_with_expected_range); } #[track_caller] diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 1c64475c9a58c86c892e69bcb3adb88f7bca39a1..62f2f370b01ded316632ef85f573ce7b97c468bb 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1967,18 +1967,27 @@ impl Buffer { let new_text_length = new_text.len(); let old_start = range.start.to_point(&before_edit); let new_start = (delta + range.start as isize) as usize; - delta += new_text_length as isize - (range.end as isize - range.start as isize); + let range_len = range.end - range.start; + delta += new_text_length as isize - range_len as isize; + // Decide what range of the insertion to auto-indent, and whether + // the first line of the insertion should be considered a newly-inserted line + // or an edit to an existing line. let mut range_of_insertion_to_indent = 0..new_text_length; - let mut first_line_is_new = false; - let mut original_indent_column = None; + let mut first_line_is_new = true; + + let old_line_start = before_edit.indent_size_for_line(old_start.row).len; + let old_line_end = before_edit.line_len(old_start.row); + + if old_start.column > old_line_start { + first_line_is_new = false; + } - // When inserting an entire line at the beginning of an existing line, - // treat the insertion as new. - if new_text.contains('\n') - && old_start.column <= before_edit.indent_size_for_line(old_start.row).len + if !new_text.contains('\n') + && (old_start.column + (range_len as u32) < old_line_end + || old_line_end == old_line_start) { - first_line_is_new = true; + first_line_is_new = false; } // When inserting text starting with a newline, avoid auto-indenting the @@ -1988,7 +1997,7 @@ impl Buffer { first_line_is_new = true; } - // Avoid auto-indenting after the insertion. + let mut original_indent_column = None; if let AutoindentMode::Block { original_indent_columns, } = &mode @@ -2000,6 +2009,8 @@ impl Buffer { ) .len })); + + // Avoid auto-indenting the line after the edit. if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') { range_of_insertion_to_indent.end -= 1; } diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 9d2385e919139a173791eddb7c5c24f72b8a115a..f32918c4cad6f623f2c9a9584c7d42dc4a367d77 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -1241,11 +1241,43 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC Some(AutoindentMode::EachLine), cx, ); + assert_eq!( + buffer.text(), + " + fn a() { + c + .f + .g(); + d + .f + .g(); + } + " + .unindent() + ); + // Insert a newline after the open brace. It is auto-indented + buffer.edit_via_marked_text( + &" + fn a() {« + » + c + .f + .g(); + d + .f + .g(); + } + " + .unindent(), + Some(AutoindentMode::EachLine), + cx, + ); assert_eq!( buffer.text(), " fn a() { + ˇ c .f .g(); @@ -1255,7 +1287,42 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC } " .unindent() + .replace("ˇ", "") ); + + // Manually outdent the line. It stays outdented. + buffer.edit_via_marked_text( + &" + fn a() { + «» + c + .f + .g(); + d + .f + .g(); + } + " + .unindent(), + Some(AutoindentMode::EachLine), + cx, + ); + assert_eq!( + buffer.text(), + " + fn a() { + + c + .f + .g(); + d + .f + .g(); + } + " + .unindent() + ); + buffer }); diff --git a/crates/languages/Cargo.toml b/crates/languages/Cargo.toml index c4f14d13549eb63f4bdd1927bf0a08e8d4c2e572..d6746575f39b7e6b5dcc992676f5edfe9c7084bf 100644 --- a/crates/languages/Cargo.toml +++ b/crates/languages/Cargo.toml @@ -10,7 +10,7 @@ workspace = true [features] test-support = [ - "tree-sitter" + "load-grammars" ] load-grammars = [ "tree-sitter-bash", @@ -82,3 +82,8 @@ text.workspace = true theme = { workspace = true, features = ["test-support"] } unindent.workspace = true workspace = { workspace = true, features = ["test-support"] } +tree-sitter-typescript.workspace = true +tree-sitter-python.workspace = true +tree-sitter-go.workspace = true +tree-sitter-c.workspace = true +tree-sitter-css.workspace = true diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index 03c4735d6d645b34d3f72260fd92e85497472910..7e8c09c8ad068aa9384ffedf0aaef331e4acfa47 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -288,15 +288,6 @@ fn load_config(name: &str) -> LanguageConfig { .with_context(|| format!("failed to load config.toml for language {name:?}")) .unwrap(); - #[cfg(not(feature = "load-grammars"))] - { - config = LanguageConfig { - name: config.name, - matcher: config.matcher, - ..Default::default() - } - } - config } From 98d2e5fe7336d87ec1dd23225c34947014b2d2e1 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 25 Oct 2024 16:28:08 -0600 Subject: [PATCH 45/76] Quote fixes (#19765) Closes #19372 Release Notes: - Fixed autoclosing quotes when the string is already open. - Added autoclosing of rust multiline strings --------- Co-authored-by: Kurt Wolf --- crates/editor/src/editor.rs | 15 +++++++++++++++ crates/languages/src/rust/config.toml | 3 +++ 2 files changed, 18 insertions(+) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 641ccc27151bd9a5a373266f68a3f29281790a04..d23889b42767991c5021edf3ff69f9dd23c66a82 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3282,10 +3282,25 @@ impl Editor { &bracket_pair.start[..prefix_len], )); + let is_closing_quote = if bracket_pair.end == bracket_pair.start + && bracket_pair.start.len() == 1 + { + let target = bracket_pair.start.chars().next().unwrap(); + let current_line_count = snapshot + .reversed_chars_at(selection.start) + .take_while(|&c| c != '\n') + .filter(|&c| c == target) + .count(); + current_line_count % 2 == 1 + } else { + false + }; + if autoclose && bracket_pair.close && following_text_allows_autoclose && preceding_text_matches_prefix + && !is_closing_quote { let anchor = snapshot.anchor_before(selection.end); new_selections.push((selection.map(|_| anchor), text.len())); diff --git a/crates/languages/src/rust/config.toml b/crates/languages/src/rust/config.toml index d01f62e354ea483c634e16705603acf0c7e644f3..81b9c1e2d94d7f3fc66cbdea3916b30c1dc18488 100644 --- a/crates/languages/src/rust/config.toml +++ b/crates/languages/src/rust/config.toml @@ -5,6 +5,9 @@ line_comments = ["// ", "/// ", "//! "] autoclose_before = ";:.,=}])>" brackets = [ { start = "{", end = "}", close = true, newline = true }, + { start = "r#\"", end = "\"#", close = true, newline = true }, + { start = "r##\"", end = "\"##", close = true, newline = true }, + { start = "r###\"", end = "\"###", close = true, newline = true }, { start = "[", end = "]", close = true, newline = true }, { start = "(", end = ")", close = true, newline = true }, { start = "<", end = ">", close = false, newline = true, not_in = ["string", "comment"] }, From 78ed0c931234af8cfcde7973dbcee15b324e2de9 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 25 Oct 2024 16:47:44 -0600 Subject: [PATCH 46/76] vim: Copy comment to new lines with o/O (#19766) Co-Authored-By: Kurt Wolf Closes: #4691 Closes #ISSUE Release Notes: - vim: o/O now respect `extend_comment_on_newline` --- crates/editor/src/editor.rs | 4 ++ .../src/test/editor_lsp_test_context.rs | 1 + crates/multi_buffer/src/multi_buffer.rs | 24 +++++++++ crates/vim/src/normal.rs | 52 +++++++++++++------ crates/vim/test_data/test_o_comment.json | 8 +++ 5 files changed, 73 insertions(+), 16 deletions(-) create mode 100644 crates/vim/test_data/test_o_comment.json diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index d23889b42767991c5021edf3ff69f9dd23c66a82..5bd3accc13fbc49253c3f6b50b56037db5a1147c 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -223,6 +223,7 @@ pub fn render_parsed_markdown( } }), ); + // hello let mut links = Vec::new(); let mut link_ranges = Vec::new(); @@ -3784,6 +3785,9 @@ impl Editor { pub fn newline_below(&mut self, _: &NewlineBelow, cx: &mut ViewContext) { let buffer = self.buffer.read(cx); let snapshot = buffer.snapshot(cx); + // + // + // let mut edits = Vec::new(); let mut rows = Vec::new(); diff --git a/crates/editor/src/test/editor_lsp_test_context.rs b/crates/editor/src/test/editor_lsp_test_context.rs index e32d590a60d138dc7bd2a439f942e3d179b30f6a..b93b8d3e7e003b35c208e59f5a430c92313ff216 100644 --- a/crates/editor/src/test/editor_lsp_test_context.rs +++ b/crates/editor/src/test/editor_lsp_test_context.rs @@ -123,6 +123,7 @@ impl EditorLspTestContext { path_suffixes: vec!["rs".to_string()], ..Default::default() }, + line_comments: vec!["// ".into(), "/// ".into(), "//! ".into()], ..Default::default() }, Some(tree_sitter_rust::LANGUAGE.into()), diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 5dad354a39ca995649530f1462823b0b28147e06..f4bdafc9853fec201d59c1bc64c017c10a0406f6 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -2862,6 +2862,30 @@ impl MultiBufferSnapshot { } } + pub fn indent_and_comment_for_line(&self, row: MultiBufferRow, cx: &AppContext) -> String { + let mut indent = self.indent_size_for_line(row).chars().collect::(); + + if self.settings_at(0, cx).extend_comment_on_newline { + if let Some(language_scope) = self.language_scope_at(Point::new(row.0, 0)) { + let delimiters = language_scope.line_comment_prefixes(); + for delimiter in delimiters { + if *self + .chars_at(Point::new(row.0, indent.len() as u32)) + .take(delimiter.chars().count()) + .collect::() + .as_str() + == **delimiter + { + indent.push_str(&delimiter); + break; + } + } + } + } + + indent + } + pub fn prev_non_blank_row(&self, mut row: MultiBufferRow) -> Option { while row.0 > 0 { row.0 -= 1; diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 4a4927a2fc5bfd1e3ce0be8b41b3401a9010805f..aecd0f90b2312eae4b4a934639bfa0ee7428fe3b 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -328,14 +328,18 @@ impl Vim { .into_iter() .map(|selection| selection.start.row) .collect(); - let edits = selection_start_rows.into_iter().map(|row| { - let indent = snapshot - .indent_size_for_line(MultiBufferRow(row)) - .chars() - .collect::(); - let start_of_line = Point::new(row, 0); - (start_of_line..start_of_line, indent + "\n") - }); + let edits = selection_start_rows + .into_iter() + .map(|row| { + let indent = snapshot + .indent_and_comment_for_line(MultiBufferRow(row), cx) + .chars() + .collect::(); + + let start_of_line = Point::new(row, 0); + (start_of_line..start_of_line, indent + "\n") + }) + .collect::>(); editor.edit_with_autoindent(edits, cx); editor.change_selections(Some(Autoscroll::fit()), cx, |s| { s.move_cursors_with(|map, cursor, _| { @@ -361,14 +365,18 @@ impl Vim { .into_iter() .map(|selection| selection.end.row) .collect(); - let edits = selection_end_rows.into_iter().map(|row| { - let indent = snapshot - .indent_size_for_line(MultiBufferRow(row)) - .chars() - .collect::(); - let end_of_line = Point::new(row, snapshot.line_len(MultiBufferRow(row))); - (end_of_line..end_of_line, "\n".to_string() + &indent) - }); + let edits = selection_end_rows + .into_iter() + .map(|row| { + let indent = snapshot + .indent_and_comment_for_line(MultiBufferRow(row), cx) + .chars() + .collect::(); + + let end_of_line = Point::new(row, snapshot.line_len(MultiBufferRow(row))); + (end_of_line..end_of_line, "\n".to_string() + &indent) + }) + .collect::>(); editor.change_selections(Some(Autoscroll::fit()), cx, |s| { s.maybe_move_cursors_with(|map, cursor, goal| { Motion::CurrentLine.move_point( @@ -1414,4 +1422,16 @@ mod test { .await .assert_eq("th th\nth th\nth th\nth th\nth th\nˇth th\n"); } + + #[gpui::test] + async fn test_o_comment(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_neovim_option("filetype=rust").await; + + cx.set_shared_state("// helloˇ\n").await; + cx.simulate_shared_keystrokes("o").await; + cx.shared_state().await.assert_eq("// hello\n// ˇ\n"); + cx.simulate_shared_keystrokes("x escape shift-o").await; + cx.shared_state().await.assert_eq("// hello\n// ˇ\n// x\n"); + } } diff --git a/crates/vim/test_data/test_o_comment.json b/crates/vim/test_data/test_o_comment.json new file mode 100644 index 0000000000000000000000000000000000000000..b0b84da0e6d3b73d33d70a6d862228cc2d49d40b --- /dev/null +++ b/crates/vim/test_data/test_o_comment.json @@ -0,0 +1,8 @@ +{"SetOption":{"value":"filetype=rust"}} +{"Put":{"state":"// helloˇ\n"}} +{"Key":"o"} +{"Get":{"state":"// hello\n// ˇ\n","mode":"Insert"}} +{"Key":"x"} +{"Key":"escape"} +{"Key":"shift-o"} +{"Get":{"state":"// hello\n// ˇ\n// x\n","mode":"Insert"}} From 1acebb3c47c2fc035a1cfc45d26761af7cbee1ff Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Sat, 26 Oct 2024 01:55:46 +0300 Subject: [PATCH 47/76] Remove another false-positive Danger message (#19769) Follow-up of https://github.com/zed-industries/zed/pull/19151 Ignores any URLs aftrer `Release Notes:` (if present) and after `Follow-up of` and `Part of` words. Release Notes: - N/A --- script/danger/dangerfile.ts | 24 +++++++++--------------- 1 file changed, 9 insertions(+), 15 deletions(-) diff --git a/script/danger/dangerfile.ts b/script/danger/dangerfile.ts index 4ffe1459df1891bd4da48fe8e6a32278a1ce3894..a23816620308d06b97a5cfa1245ac062fd3b2dd4 100644 --- a/script/danger/dangerfile.ts +++ b/script/danger/dangerfile.ts @@ -8,7 +8,7 @@ prHygiene({ }, }); -const RELEASE_NOTES_PATTERN = new RegExp("Release Notes:\\r?\\n\\s+-", "gm"); +const RELEASE_NOTES_PATTERN = /Release Notes:\r?\n\s+-/gm; const body = danger.github.pr.body; const hasReleaseNotes = RELEASE_NOTES_PATTERN.test(body); @@ -36,28 +36,22 @@ if (!hasReleaseNotes) { ); } -const ISSUE_LINK_PATTERN = new RegExp( - "(? - match - .replace(/^#/, "") - .replace(/https:\/\/github\.com\/zed-industries\/zed\/issues\//, ""), - ) + .map((match) => match.replace(/^#/, "").replace(/https:\/\/github\.com\/zed-industries\/zed\/issues\//, "")) .filter((issue, index, self) => self.indexOf(issue) === index); + const issuesToReport = issues.map((issue) => `#${issue}`).join(", "); message( [ - "This PR includes links to the following GitHub Issues: " + - issues.map((issue) => `#${issue}`).join(", "), + `This PR includes links to the following GitHub Issues: ${issuesToReport}`, "If this PR aims to close an issue, please include a `Closes #ISSUE` line at the top of the PR body.", ].join("\n"), ); From fc8a72cdd8a5b12225344e54169709a98d7d46cd Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Sat, 26 Oct 2024 01:32:54 +0200 Subject: [PATCH 48/76] WIP: ssh remoting: Add `upload_binary` field to SshConnections (#19748) This removes the old `remote_server { "download_binary_on_host": bool }` field and replaces it with a `upload_binary: bool` on every `ssh_connection`. @ConradIrwin it compiles, it connects, but I haven't tested it really yet Release Notes: - N/A --------- Co-authored-by: Conrad Co-authored-by: Conrad Irwin --- .../src/disconnected_overlay.rs | 13 +- crates/recent_projects/src/recent_projects.rs | 25 +-- crates/recent_projects/src/remote_servers.rs | 25 ++- crates/recent_projects/src/ssh_connections.rs | 152 ++++++++---------- crates/remote/src/ssh_session.rs | 14 +- crates/title_bar/Cargo.toml | 2 - crates/title_bar/src/title_bar.rs | 20 +-- crates/zed/src/main.rs | 40 +---- crates/zed/src/zed.rs | 7 - crates/zed/src/zed/open_listener.rs | 73 ++++----- docs/src/remote-development.md | 60 ++++++- 11 files changed, 200 insertions(+), 231 deletions(-) diff --git a/crates/recent_projects/src/disconnected_overlay.rs b/crates/recent_projects/src/disconnected_overlay.rs index 34a9b895a25de42cc3a5dc7c201cacf7af5ea400..ed81fbb345ddeef036612b1b16fe9c9f012bd2b8 100644 --- a/crates/recent_projects/src/disconnected_overlay.rs +++ b/crates/recent_projects/src/disconnected_overlay.rs @@ -11,7 +11,7 @@ use ui::{ }; use workspace::{notifications::DetachAndPromptErr, ModalView, OpenOptions, Workspace}; -use crate::{open_ssh_project, SshSettings}; +use crate::open_ssh_project; enum Host { RemoteProject, @@ -102,16 +102,6 @@ impl DisconnectedOverlay { let paths = ssh_project.paths.iter().map(PathBuf::from).collect(); cx.spawn(move |_, mut cx| async move { - let nickname = cx - .update(|cx| { - SshSettings::get_global(cx).nickname_for( - &connection_options.host, - connection_options.port, - &connection_options.username, - ) - }) - .ok() - .flatten(); open_ssh_project( connection_options, paths, @@ -120,7 +110,6 @@ impl DisconnectedOverlay { replace_window: Some(window), ..Default::default() }, - nickname, &mut cx, ) .await?; diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 6032e7d9969eb5dda307ea9ed74825090b7ea885..e5d28d16ca30b24f9d6b9c10ff9331ba7c90b2af 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -1,7 +1,6 @@ pub mod disconnected_overlay; mod remote_servers; mod ssh_connections; -use remote::SshConnectionOptions; pub use ssh_connections::open_ssh_project; use disconnected_overlay::DisconnectedOverlay; @@ -331,23 +330,12 @@ impl PickerDelegate for RecentProjectsDelegate { ..Default::default() }; - let args = SshSettings::get_global(cx).args_for( - &ssh_project.host, - ssh_project.port, - &ssh_project.user, - ); - let nickname = SshSettings::get_global(cx).nickname_for( - &ssh_project.host, - ssh_project.port, - &ssh_project.user, - ); - let connection_options = SshConnectionOptions { - host: ssh_project.host.clone(), - username: ssh_project.user.clone(), - port: ssh_project.port, - password: None, - args, - }; + let connection_options = SshSettings::get_global(cx) + .connection_options_for( + ssh_project.host.clone(), + ssh_project.port, + ssh_project.user.clone(), + ); let paths = ssh_project.paths.iter().map(PathBuf::from).collect(); @@ -357,7 +345,6 @@ impl PickerDelegate for RecentProjectsDelegate { paths, app_state, open_options, - nickname, &mut cx, ) .await diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index d7f3beccb21388c2f5ef6c96181a2217ff21fbb3..faf58f312fafa7d7038c3b7c807c0a88ee0d14c6 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -197,11 +197,7 @@ impl ProjectPicker { picker }); let connection_string = connection.connection_string().into(); - let nickname = SshSettings::get_global(cx).nickname_for( - &connection.host, - connection.port, - &connection.username, - ); + let nickname = connection.nickname.clone().map(|nick| nick.into()); let _path_task = cx .spawn({ let workspace = workspace.clone(); @@ -414,7 +410,7 @@ impl RemoteServerProjects { return; } }; - let ssh_prompt = cx.new_view(|cx| SshPrompt::new(&connection_options, None, cx)); + let ssh_prompt = cx.new_view(|cx| SshPrompt::new(&connection_options, cx)); let connection = connect_over_ssh( connection_options.remote_server_identifier(), @@ -491,12 +487,11 @@ impl RemoteServerProjects { return; }; - let nickname = ssh_connection.nickname.clone(); let connection_options = ssh_connection.into(); workspace.update(cx, |_, cx| { cx.defer(move |workspace, cx| { workspace.toggle_modal(cx, |cx| { - SshConnectionModal::new(&connection_options, Vec::new(), nickname, cx) + SshConnectionModal::new(&connection_options, Vec::new(), cx) }); let prompt = workspace .active_modal::(cx) @@ -584,9 +579,7 @@ impl RemoteServerProjects { self.create_ssh_server(state.address_editor.clone(), cx); } Mode::EditNickname(state) => { - let text = Some(state.editor.read(cx).text(cx)) - .filter(|text| !text.is_empty()) - .map(SharedString::from); + let text = Some(state.editor.read(cx).text(cx)).filter(|text| !text.is_empty()); let index = state.index; self.update_settings_file(cx, move |setting, _| { if let Some(connections) = setting.ssh_connections.as_mut() { @@ -633,7 +626,7 @@ impl RemoteServerProjects { ) -> impl IntoElement { let (main_label, aux_label) = if let Some(nickname) = ssh_connection.nickname.clone() { let aux_label = SharedString::from(format!("({})", ssh_connection.host)); - (nickname, Some(aux_label)) + (nickname.into(), Some(aux_label)) } else { (ssh_connection.host.clone(), None) }; @@ -746,13 +739,11 @@ impl RemoteServerProjects { let project = project.clone(); let server = server.clone(); cx.spawn(|remote_server_projects, mut cx| async move { - let nickname = server.nickname.clone(); let result = open_ssh_project( server.into(), project.paths.into_iter().map(PathBuf::from).collect(), app_state, OpenOptions::default(), - nickname, &mut cx, ) .await; @@ -861,6 +852,7 @@ impl RemoteServerProjects { projects: vec![], nickname: None, args: connection_options.args.unwrap_or_default(), + upload_binary_over_ssh: None, }) }); } @@ -953,7 +945,7 @@ impl RemoteServerProjects { SshConnectionHeader { connection_string: connection_string.clone(), paths: Default::default(), - nickname: connection.nickname.clone(), + nickname: connection.nickname.clone().map(|s| s.into()), } .render(cx), ) @@ -1135,13 +1127,14 @@ impl RemoteServerProjects { }; let connection_string = connection.host.clone(); + let nickname = connection.nickname.clone().map(|s| s.into()); v_flex() .child( SshConnectionHeader { connection_string, paths: Default::default(), - nickname: connection.nickname.clone(), + nickname, } .render(cx), ) diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index 47e4c91dbd528d7f0e1456c7fc222b6aadd7d9a9..55204e14b947ddb96b6a489f3b2bef6eb5f34c69 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -26,15 +26,9 @@ use ui::{ }; use workspace::{AppState, ModalView, Workspace}; -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] -pub struct RemoteServerSettings { - pub download_on_host: Option, -} - #[derive(Deserialize)] pub struct SshSettings { pub ssh_connections: Option>, - pub remote_server: Option, } impl SshSettings { @@ -42,39 +36,31 @@ impl SshSettings { self.ssh_connections.clone().into_iter().flatten() } - pub fn args_for( + pub fn connection_options_for( &self, - host: &str, + host: String, port: Option, - user: &Option, - ) -> Option> { - self.ssh_connections() - .filter_map(|conn| { - if conn.host == host && &conn.username == user && conn.port == port { - Some(conn.args) - } else { - None - } - }) - .next() - } - - pub fn nickname_for( - &self, - host: &str, - port: Option, - user: &Option, - ) -> Option { - self.ssh_connections() - .filter_map(|conn| { - if conn.host == host && &conn.username == user && conn.port == port { - Some(conn.nickname) - } else { - None - } - }) - .next() - .flatten() + username: Option, + ) -> SshConnectionOptions { + for conn in self.ssh_connections() { + if conn.host == host && conn.username == username && conn.port == port { + return SshConnectionOptions { + nickname: conn.nickname, + upload_binary_over_ssh: conn.upload_binary_over_ssh.unwrap_or_default(), + args: Some(conn.args), + host, + port, + username, + password: None, + }; + } + } + SshConnectionOptions { + host, + port, + username, + ..Default::default() + } } } @@ -85,13 +71,20 @@ pub struct SshConnection { pub username: Option, #[serde(skip_serializing_if = "Option::is_none")] pub port: Option, - pub projects: Vec, - /// Name to use for this server in UI. - #[serde(skip_serializing_if = "Option::is_none")] - pub nickname: Option, #[serde(skip_serializing_if = "Vec::is_empty")] #[serde(default)] pub args: Vec, + #[serde(default)] + pub projects: Vec, + /// Name to use for this server in UI. + #[serde(skip_serializing_if = "Option::is_none")] + pub nickname: Option, + // By default Zed will download the binary to the host directly. + // If this is set to true, Zed will download the binary to your local machine, + // and then upload it over the SSH connection. Useful if your SSH server has + // limited outbound internet access. + #[serde(skip_serializing_if = "Option::is_none")] + pub upload_binary_over_ssh: Option, } impl From for SshConnectionOptions { @@ -102,6 +95,8 @@ impl From for SshConnectionOptions { port: val.port, password: None, args: Some(val.args), + nickname: val.nickname, + upload_binary_over_ssh: val.upload_binary_over_ssh.unwrap_or_default(), } } } @@ -114,7 +109,6 @@ pub struct SshProject { #[derive(Clone, Default, Serialize, Deserialize, JsonSchema)] pub struct RemoteSettingsContent { pub ssh_connections: Option>, - pub remote_server: Option, } impl Settings for SshSettings { @@ -153,10 +147,10 @@ pub struct SshConnectionModal { impl SshPrompt { pub(crate) fn new( connection_options: &SshConnectionOptions, - nickname: Option, cx: &mut ViewContext, ) -> Self { let connection_string = connection_options.connection_string().into(); + let nickname = connection_options.nickname.clone().map(|s| s.into()); Self { connection_string, @@ -276,11 +270,10 @@ impl SshConnectionModal { pub(crate) fn new( connection_options: &SshConnectionOptions, paths: Vec, - nickname: Option, cx: &mut ViewContext, ) -> Self { Self { - prompt: cx.new_view(|cx| SshPrompt::new(connection_options, nickname, cx)), + prompt: cx.new_view(|cx| SshPrompt::new(connection_options, cx)), finished: false, paths, } @@ -451,13 +444,17 @@ impl remote::SshClientDelegate for SshClientDelegate { fn get_server_binary( &self, platform: SshPlatform, + upload_binary_over_ssh: bool, cx: &mut AsyncAppContext, ) -> oneshot::Receiver> { let (tx, rx) = oneshot::channel(); let this = self.clone(); cx.spawn(|mut cx| async move { - tx.send(this.get_server_binary_impl(platform, &mut cx).await) - .ok(); + tx.send( + this.get_server_binary_impl(platform, upload_binary_over_ssh, &mut cx) + .await, + ) + .ok(); }) .detach(); rx @@ -492,19 +489,14 @@ impl SshClientDelegate { async fn get_server_binary_impl( &self, platform: SshPlatform, + upload_binary_via_ssh: bool, cx: &mut AsyncAppContext, ) -> Result<(ServerBinary, SemanticVersion)> { - let (version, release_channel, download_binary_on_host) = cx.update(|cx| { + let (version, release_channel) = cx.update(|cx| { let version = AppVersion::global(cx); let channel = ReleaseChannel::global(cx); - let ssh_settings = SshSettings::get_global(cx); - let download_binary_on_host = ssh_settings - .remote_server - .as_ref() - .and_then(|server| server.download_on_host) - .unwrap_or(false); - (version, channel, download_binary_on_host) + (version, channel) })?; // In dev mode, build the remote server binary from source @@ -529,8 +521,8 @@ impl SshClientDelegate { cx, ); - if download_binary_on_host { - let (request_url, request_body) = AutoUpdater::get_remote_server_release_url( + if upload_binary_via_ssh { + let binary_path = AutoUpdater::download_remote_server_release( platform.os, platform.arch, release_channel, @@ -540,7 +532,7 @@ impl SshClientDelegate { .await .map_err(|e| { anyhow!( - "Failed to get remote server binary download url (version: {}, os: {}, arch: {}): {}", + "Failed to download remote server binary (version: {}, os: {}, arch: {}): {}", version, platform.os, platform.arch, @@ -548,6 +540,26 @@ impl SshClientDelegate { ) })?; + Ok((ServerBinary::LocalBinary(binary_path), version)) + } else { + let (request_url, request_body) = AutoUpdater::get_remote_server_release_url( + platform.os, + platform.arch, + release_channel, + current_version, + cx, + ) + .await + .map_err(|e| { + anyhow!( + "Failed to get remote server binary download url (version: {}, os: {}, arch: {}): {}", + version, + platform.os, + platform.arch, + e + ) + })?; + Ok(( ServerBinary::ReleaseUrl { url: request_url, @@ -555,26 +567,6 @@ impl SshClientDelegate { }, version, )) - } else { - let binary_path = AutoUpdater::download_remote_server_release( - platform.os, - platform.arch, - release_channel, - current_version, - cx, - ) - .await - .map_err(|e| { - anyhow!( - "Failed to download remote server binary (version: {}, os: {}, arch: {}): {}", - version, - platform.os, - platform.arch, - e - ) - })?; - - Ok((ServerBinary::LocalBinary(binary_path), version)) } } @@ -715,7 +707,6 @@ pub async fn open_ssh_project( paths: Vec, app_state: Arc, open_options: workspace::OpenOptions, - nickname: Option, cx: &mut AsyncAppContext, ) -> Result<()> { let window = if let Some(window) = open_options.replace_window { @@ -740,12 +731,11 @@ pub async fn open_ssh_project( let (cancel_tx, cancel_rx) = oneshot::channel(); let delegate = window.update(cx, { let connection_options = connection_options.clone(); - let nickname = nickname.clone(); let paths = paths.clone(); move |workspace, cx| { cx.activate_window(); workspace.toggle_modal(cx, |cx| { - SshConnectionModal::new(&connection_options, paths, nickname.clone(), cx) + SshConnectionModal::new(&connection_options, paths, cx) }); let ui = workspace diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 656560f0b6b838715fa9aba330e7e5774af2538e..422937ed234de3813fc92c8e9c121dbf5e876e4e 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -64,6 +64,9 @@ pub struct SshConnectionOptions { pub port: Option, pub password: Option, pub args: Option>, + + pub nickname: Option, + pub upload_binary_over_ssh: bool, } impl SshConnectionOptions { @@ -141,8 +144,10 @@ impl SshConnectionOptions { host: hostname.to_string(), username: username.clone(), port, - password: None, args: Some(args), + password: None, + nickname: None, + upload_binary_over_ssh: false, }) } @@ -236,6 +241,7 @@ pub trait SshClientDelegate: Send + Sync { fn get_server_binary( &self, platform: SshPlatform, + upload_binary_over_ssh: bool, cx: &mut AsyncAppContext, ) -> oneshot::Receiver>; fn set_status(&self, status: Option<&str>, cx: &mut AsyncAppContext); @@ -1705,7 +1711,10 @@ impl SshRemoteConnection { return Ok(()); } - let (binary, version) = delegate.get_server_binary(platform, cx).await??; + let upload_binary_over_ssh = self.socket.connection_options.upload_binary_over_ssh; + let (binary, version) = delegate + .get_server_binary(platform, upload_binary_over_ssh, cx) + .await??; let mut remote_version = None; if cfg!(not(debug_assertions)) { @@ -2336,6 +2345,7 @@ mod fake { fn get_server_binary( &self, _: SshPlatform, + _: bool, _: &mut AsyncAppContext, ) -> oneshot::Receiver> { unreachable!() diff --git a/crates/title_bar/Cargo.toml b/crates/title_bar/Cargo.toml index 93a181e9263674ba11970616c3940471516f9527..df991613ae7c9eb06871275e996a2fff9127640e 100644 --- a/crates/title_bar/Cargo.toml +++ b/crates/title_bar/Cargo.toml @@ -23,7 +23,6 @@ test-support = [ "gpui/test-support", "http_client/test-support", "project/test-support", - "settings/test-support", "util/test-support", "workspace/test-support", ] @@ -43,7 +42,6 @@ recent_projects.workspace = true remote.workspace = true rpc.workspace = true serde.workspace = true -settings.workspace = true smallvec.workspace = true story = { workspace = true, optional = true } theme.workspace = true diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index 8b3fb5739f0733727e0e95a5a70e4f2fae116e68..74c5b2812a5fa34b3db290394f0947b7b1e889fc 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -18,10 +18,8 @@ use gpui::{ StatefulInteractiveElement, Styled, Subscription, View, ViewContext, VisualContext, WeakView, }; use project::{Project, RepositoryEntry}; -use recent_projects::{OpenRemote, RecentProjects, SshSettings}; -use remote::SshConnectionOptions; +use recent_projects::{OpenRemote, RecentProjects}; use rpc::proto; -use settings::Settings; use smallvec::SmallVec; use std::sync::Arc; use theme::ActiveTheme; @@ -29,7 +27,7 @@ use ui::{ h_flex, prelude::*, Avatar, Button, ButtonLike, ButtonStyle, ContextMenu, Icon, IconName, IconSize, IconWithIndicator, Indicator, PopoverMenu, Tooltip, }; -use util::{maybe, ResultExt}; +use util::ResultExt; use vcs_menu::{BranchList, OpenRecent as ToggleVcsMenu}; use workspace::{notifications::NotifyResultExt, Workspace}; @@ -268,15 +266,11 @@ impl TitleBar { let options = self.project.read(cx).ssh_connection_options(cx)?; let host: SharedString = options.connection_string().into(); - let nickname = maybe!({ - SshSettings::get_global(cx) - .ssh_connections - .as_ref()? - .into_iter() - .find(|connection| SshConnectionOptions::from((*connection).clone()) == options) - .and_then(|connection| connection.nickname.clone()) - }) - .unwrap_or_else(|| host.clone()); + let nickname = options + .nickname + .clone() + .map(|nick| nick.into()) + .unwrap_or_else(|| host.clone()); let (indicator_color, meta) = match self.project.read(cx).ssh_connection_state(cx)? { remote::ConnectionState::Connecting => (Color::Info, format!("Connecting to: {host}")), diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 017deca5682d32ebdad747fe886a8d2f3d99ad10..3cb717d24fa8cd1363bd6afb2ad0afbe8468a0b1 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -25,7 +25,6 @@ use gpui::{ use http_client::{read_proxy_from_env, Uri}; use language::LanguageRegistry; use log::LevelFilter; -use remote::SshConnectionOptions; use reqwest_client::ReqwestClient; use assets::Assets; @@ -616,26 +615,15 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut return; } - if let Some(connection_info) = request.ssh_connection { + if let Some(connection_options) = request.ssh_connection { cx.spawn(|mut cx| async move { - let nickname = cx - .update(|cx| { - SshSettings::get_global(cx).nickname_for( - &connection_info.host, - connection_info.port, - &connection_info.username, - ) - }) - .ok() - .flatten(); let paths_with_position = derive_paths_with_position(app_state.fs.as_ref(), request.open_paths).await; open_ssh_project( - connection_info, + connection_options, paths_with_position.into_iter().map(|p| p.path).collect(), app_state, workspace::OpenOptions::default(), - nickname, &mut cx, ) .await @@ -798,25 +786,10 @@ async fn restore_or_create_workspace( task.await?; } SerializedWorkspaceLocation::Ssh(ssh) => { - let args = cx - .update(|cx| { - SshSettings::get_global(cx).args_for(&ssh.host, ssh.port, &ssh.user) - }) - .ok() - .flatten(); - let nickname = cx - .update(|cx| { - SshSettings::get_global(cx).nickname_for(&ssh.host, ssh.port, &ssh.user) - }) - .ok() - .flatten(); - let connection_options = SshConnectionOptions { - args, - host: ssh.host.clone(), - username: ssh.user.clone(), - port: ssh.port, - password: None, - }; + let connection_options = cx.update(|cx| { + SshSettings::get_global(cx) + .connection_options_for(ssh.host, ssh.port, ssh.user) + })?; let app_state = app_state.clone(); cx.spawn(move |mut cx| async move { recent_projects::open_ssh_project( @@ -824,7 +797,6 @@ async fn restore_or_create_workspace( ssh.paths.into_iter().map(PathBuf::from).collect(), app_state, workspace::OpenOptions::default(), - nickname, &mut cx, ) .await diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 1efc5b7d385935fd2e181eca4f3678782b4c3b07..8965a1755a1e9c7dff2c81a5ad485d661e91882e 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -881,12 +881,6 @@ pub fn open_new_ssh_project_from_project( return Task::ready(Err(anyhow::anyhow!("Not an ssh project"))); }; let connection_options = ssh_client.read(cx).connection_options(); - let nickname = recent_projects::SshSettings::get_global(cx).nickname_for( - &connection_options.host, - connection_options.port, - &connection_options.username, - ); - cx.spawn(|_, mut cx| async move { open_ssh_project( connection_options, @@ -897,7 +891,6 @@ pub fn open_new_ssh_project_from_project( replace_window: None, env: None, }, - nickname, &mut cx, ) .await diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index 5656ffa86c7ea491f3f202a4d68acb7f2fe91075..f1cfc43a6a171cf03fa1a08e8bd698c2b51c965c 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -72,25 +72,24 @@ impl OpenRequest { .ok_or_else(|| anyhow!("missing host in ssh url: {}", file))? .to_string(); let username = Some(url.username().to_string()).filter(|s| !s.is_empty()); - let password = url.password().map(|s| s.to_string()); let port = url.port(); if !self.open_paths.is_empty() { return Err(anyhow!("cannot open both local and ssh paths")); } - let args = SshSettings::get_global(cx).args_for(&host, port, &username); - let connection = SshConnectionOptions { - username, - password, - host, + let mut connection_options = SshSettings::get_global(cx).connection_options_for( + host.clone(), port, - args, - }; + username.clone(), + ); + if let Some(password) = url.password() { + connection_options.password = Some(password.to_string()); + } if let Some(ssh_connection) = &self.ssh_connection { - if *ssh_connection != connection { + if *ssh_connection != connection_options { return Err(anyhow!("cannot open multiple ssh connections")); } } - self.ssh_connection = Some(connection); + self.ssh_connection = Some(connection_options); self.parse_file_path(url.path()); Ok(()) } @@ -374,40 +373,28 @@ async fn open_workspaces( } SerializedWorkspaceLocation::Ssh(ssh) => { let app_state = app_state.clone(); - let args = cx - .update(|cx| { - SshSettings::get_global(cx).args_for(&ssh.host, ssh.port, &ssh.user) - }) - .ok() - .flatten(); - let connection_options = SshConnectionOptions { - args, - host: ssh.host.clone(), - username: ssh.user.clone(), - port: ssh.port, - password: None, - }; - let nickname = cx - .update(|cx| { - SshSettings::get_global(cx).nickname_for(&ssh.host, ssh.port, &ssh.user) + let connection_options = cx.update(|cx| { + SshSettings::get_global(cx) + .connection_options_for(ssh.host, ssh.port, ssh.user) + }); + if let Ok(connection_options) = connection_options { + cx.spawn(|mut cx| async move { + open_ssh_project( + connection_options, + ssh.paths.into_iter().map(PathBuf::from).collect(), + app_state, + OpenOptions::default(), + &mut cx, + ) + .await + .log_err(); }) - .ok() - .flatten(); - cx.spawn(|mut cx| async move { - open_ssh_project( - connection_options, - ssh.paths.into_iter().map(PathBuf::from).collect(), - app_state, - OpenOptions::default(), - nickname, - &mut cx, - ) - .await - .log_err(); - }) - .detach(); - // We don't set `errored` here, because for ssh projects, the - // error is displayed in the window. + .detach(); + // We don't set `errored` here if `open_ssh_project` fails, because for ssh projects, the + // error is displayed in the window. + } else { + errored = false; + } } } } diff --git a/docs/src/remote-development.md b/docs/src/remote-development.md index 07f15379e77d1ada2fe30845d780634ec4f9481c..708d0e0b393eaef216c582b0e39944333e517bd8 100644 --- a/docs/src/remote-development.md +++ b/docs/src/remote-development.md @@ -8,7 +8,7 @@ Remote Development allows you to code at the speed of thought, even when your co Remote development requires two computers, your local machine that runs the Zed UI and the remote server which runs a Zed headless server. The two communicate over SSH, so you will need to be able to SSH from your local machine into the remote server to use this feature. -> **Note:** The original version of remote development sent traffic via Zed's servers. As of Zed v0.157 you can no-longer use this mode. +> **Note:** The original version of remote development sent traffic via Zed's servers. As of Zed v0.157 you can no-longer use that mode. ## Setup @@ -29,7 +29,63 @@ The remote machine must be able to run Zed's server. The following platforms sho - Linux (x86_64 or arm64, we do not yet support 32-bit platforms) - Windows is not yet supported. -## Settings +## Configuration + +The list of remote servers is stored in your settings file {#kb zed::OpenSettings}. You can edit this list using the Remote Projects dialogue {#kb projects::OpenRemote}, which provides some robustness - for example it checks that the connection can be established before writing it to the settings file. + +```json +{ + "ssh_connections": [ + { + "host": "192.168.1.10", + "projects": ["~/code/zed/zed"] + } + ] +} +``` + +Zed shells out to the `ssh` on your path, and so it will inherit any configuration you have in `~/.ssh/config` for the given host. That said, if you need to override anything you can configure the following additional options on each connection: + +```json +{ + "ssh_connections": [ + { + "host": "192.168.1.10", + "projects": ["~/code/zed/zed"], + // any argument to pass to the ssh master process + "args": ["-i", "~/.ssh/work_id_file"], + "port": 22, // defaults to 22 + // defaults to your username on your local machine + "username": "me" + } + ] +} +``` + +There are two additional Zed-specific options per connection, `upload_binary_over_ssh` and `nickname`: + +```json +{ + "ssh_connections": [ + { + "host": "192.168.1.10", + "projects": ["~/code/zed/zed"], + // by default Zed will download the server binary from the internet on the remote. + // When this is true, it'll be downloaded to your laptop and uploaded over SSH. + // This is useful when your remote server has restricted internet access. + "upload_binary_over_ssh": true, + // Shown in the Zed UI to help distinguish multiple hosts. + "nickname": "lil-linux" + } + ] +} +``` + +If you use the command line to open a connection to a host by doing `zed ssh://192.168.1.10/~/.vimrc`, then extra options are read from your settings file by finding the first connection that matches the host/username/port of the URL on the command line. + +Additionally it's worth noting that while you can pass a password on the command line `zed ssh://user:password@host/~`, we do not support writing a password to your settings file. If you're connecting repeatedly to the same host, you should configure key-based authentication. + +## Zed settings When opening a remote project there are three relevant settings locations: From d7a277607b17e9875a1aba6351e3acc64db0c1d3 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Sat, 26 Oct 2024 03:32:22 +0300 Subject: [PATCH 49/76] Fix a few Windows tests (#19773) --- .../src/test/editor_lsp_test_context.rs | 19 +++++++++++++++--- crates/fs/src/fs.rs | 15 ++++++++++---- crates/lsp/src/lsp.rs | 20 +++++++++++++++---- 3 files changed, 43 insertions(+), 11 deletions(-) diff --git a/crates/editor/src/test/editor_lsp_test_context.rs b/crates/editor/src/test/editor_lsp_test_context.rs index b93b8d3e7e003b35c208e59f5a430c92313ff216..23c5775abd05c93ae963633ad70c3b4e6ca893e4 100644 --- a/crates/editor/src/test/editor_lsp_test_context.rs +++ b/crates/editor/src/test/editor_lsp_test_context.rs @@ -1,6 +1,7 @@ use std::{ borrow::Cow, ops::{Deref, DerefMut, Range}, + path::Path, sync::Arc, }; @@ -66,10 +67,12 @@ impl EditorLspTestContext { ); language_registry.add(Arc::new(language)); + let root = Self::root_path(); + app_state .fs .as_fake() - .insert_tree("/root", json!({ "dir": { file_name.clone(): "" }})) + .insert_tree(root, json!({ "dir": { file_name.clone(): "" }})) .await; let window = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); @@ -79,7 +82,7 @@ impl EditorLspTestContext { let mut cx = VisualTestContext::from_window(*window.deref(), cx); project .update(&mut cx, |project, cx| { - project.find_or_create_worktree("/root", true, cx) + project.find_or_create_worktree(root, true, cx) }) .await .unwrap(); @@ -108,7 +111,7 @@ impl EditorLspTestContext { }, lsp, workspace, - buffer_lsp_url: lsp::Url::from_file_path(format!("/root/dir/{file_name}")).unwrap(), + buffer_lsp_url: lsp::Url::from_file_path(root.join("dir").join(file_name)).unwrap(), } } @@ -310,6 +313,16 @@ impl EditorLspTestContext { pub fn notify(&self, params: T::Params) { self.lsp.notify::(params); } + + #[cfg(target_os = "windows")] + fn root_path() -> &'static Path { + Path::new("C:\\root") + } + + #[cfg(not(target_os = "windows"))] + fn root_path() -> &'static Path { + Path::new("/root") + } } impl Deref for EditorLspTestContext { diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 7064448e168298e9e01f540bb57e50221579bd72..5ee2947448c90c091dee5d099670c02f654336b5 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -865,14 +865,20 @@ impl FakeFsState { let mut entry_stack = Vec::new(); 'outer: loop { let mut path_components = path.components().peekable(); + let mut prefix = None; while let Some(component) = path_components.next() { match component { - Component::Prefix(_) => panic!("prefix paths aren't supported"), + Component::Prefix(prefix_component) => prefix = Some(prefix_component), Component::RootDir => { entry_stack.clear(); entry_stack.push(self.root.clone()); canonical_path.clear(); - canonical_path.push("/"); + match prefix { + Some(prefix_component) => { + canonical_path.push(prefix_component.as_os_str()); + } + None => canonical_path.push("/"), + } } Component::CurDir => {} Component::ParentDir => { @@ -1384,11 +1390,12 @@ impl Fs for FakeFs { let mut created_dirs = Vec::new(); let mut cur_path = PathBuf::new(); for component in path.components() { - let mut state = self.state.lock(); + let should_skip = matches!(component, Component::Prefix(..) | Component::RootDir); cur_path.push(component); - if cur_path == Path::new("/") { + if should_skip { continue; } + let mut state = self.state.lock(); let inode = state.next_inode; let mtime = state.next_mtime; diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index a105c983f9ec362926b33f019218b984e398d669..df2ab35fc43377973998c38a6d78092de5e2e093 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -1177,6 +1177,8 @@ impl FakeLanguageServer { let (stdout_writer, stdout_reader) = async_pipe::pipe(); let (notifications_tx, notifications_rx) = channel::unbounded(); + let root = Self::root_path(); + let mut server = LanguageServer::new_internal( server_id, stdin_writer, @@ -1184,8 +1186,8 @@ impl FakeLanguageServer { None::, Arc::new(Mutex::new(None)), None, - Path::new("/"), - Path::new("/"), + root, + root, None, cx.clone(), |_| {}, @@ -1201,8 +1203,8 @@ impl FakeLanguageServer { None::, Arc::new(Mutex::new(None)), None, - Path::new("/"), - Path::new("/"), + root, + root, None, cx, move |msg| { @@ -1238,6 +1240,16 @@ impl FakeLanguageServer { (server, fake) } + + #[cfg(target_os = "windows")] + fn root_path() -> &'static Path { + Path::new("C:\\") + } + + #[cfg(not(target_os = "windows"))] + fn root_path() -> &'static Path { + Path::new("/") + } } #[cfg(any(test, feature = "test-support"))] From 03a1c8d2b8c1e1a623702b70796d1c5b1d265aff Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Sat, 26 Oct 2024 14:59:46 +0200 Subject: [PATCH 50/76] markdown preview: Fix infinite loop in parser when parsing list items (#19785) Release Notes: - Fixed an issue with the markdown parser when opening a markdown preview file that contained HTML tags inside a list item --- crates/markdown_preview/src/markdown_parser.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/markdown_preview/src/markdown_parser.rs b/crates/markdown_preview/src/markdown_parser.rs index 10e910036b1a5ce0c295eba3cae62add9c215e24..0b3c361fd261d1061ac360240d1c2fb21567b2e3 100644 --- a/crates/markdown_preview/src/markdown_parser.rs +++ b/crates/markdown_preview/src/markdown_parser.rs @@ -626,6 +626,8 @@ impl<'a> MarkdownParser<'a> { // Otherwise we need to insert the block after all the nested items // that have been parsed so far items.extend(block); + } else { + self.cursor += 1; } } } From 2e32f1c8a19525004cb9b3b7ac9cc53c9aa4fdd0 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Sat, 26 Oct 2024 21:57:22 +0300 Subject: [PATCH 51/76] Restore horizontal scrollbar checks (#19767) Closes https://github.com/zed-industries/zed/issues/19637 Follow-up of https://github.com/zed-industries/zed/pull/18927 , restores the condition that removed the horizontal scrollbar when panel's items are not long enough. Release Notes: - Fixed horizontal scrollbar not being hidden ([#19637](https://github.com/zed-industries/zed/issues/19637)) --- crates/project_panel/src/project_panel.rs | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 50c9d2d1269c6a2028581fb4129fcfaaacb7afee..355e8780cc91932447daccd8b630fb3d8a8b46c2 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -2821,6 +2821,17 @@ impl ProjectPanel { return None; } + let scroll_handle = self.scroll_handle.0.borrow(); + let longest_item_width = scroll_handle + .last_item_size + .filter(|size| size.contents.width > size.item.width)? + .contents + .width + .0 as f64; + if longest_item_width < scroll_handle.base_handle.bounds().size.width.0 as f64 { + return None; + } + Some( div() .occlude() From c12a9f26733e0791ba99b015c3695712ae40322a Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Sat, 26 Oct 2024 21:57:55 -0400 Subject: [PATCH 52/76] Add fold_at_level test (#19800) --- crates/editor/src/editor_tests.rs | 106 ++++++++++++++++++++++++++++++ 1 file changed, 106 insertions(+) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 99b5cb663789b28460d5ee0a5aacb25f93f53f40..d56b22b454208590314f367f8cba19e1fcd98090 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -1080,6 +1080,112 @@ fn test_fold_action_multiple_line_breaks(cx: &mut TestAppContext) { }); } +#[gpui::test] +fn test_fold_at_level(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let view = cx.add_window(|cx| { + let buffer = MultiBuffer::build_simple( + &" + class Foo: + # Hello! + + def a(): + print(1) + + def b(): + print(2) + + + class Bar: + # World! + + def a(): + print(1) + + def b(): + print(2) + + + " + .unindent(), + cx, + ); + build_editor(buffer.clone(), cx) + }); + + _ = view.update(cx, |view, cx| { + view.fold_at_level(&FoldAtLevel { level: 2 }, cx); + assert_eq!( + view.display_text(cx), + " + class Foo: + # Hello! + + def a():⋯ + + def b():⋯ + + + class Bar: + # World! + + def a():⋯ + + def b():⋯ + + + " + .unindent(), + ); + + view.fold_at_level(&FoldAtLevel { level: 1 }, cx); + assert_eq!( + view.display_text(cx), + " + class Foo:⋯ + + + class Bar:⋯ + + + " + .unindent(), + ); + + view.unfold_all(&UnfoldAll, cx); + view.fold_at_level(&FoldAtLevel { level: 0 }, cx); + assert_eq!( + view.display_text(cx), + " + class Foo: + # Hello! + + def a(): + print(1) + + def b(): + print(2) + + + class Bar: + # World! + + def a(): + print(1) + + def b(): + print(2) + + + " + .unindent(), + ); + + assert_eq!(view.display_text(cx), view.buffer.read(cx).read(cx).text()); + }); +} + #[gpui::test] fn test_move_cursor(cx: &mut TestAppContext) { init_test(cx, |_| {}); From db61711753665a7a9763e033d167cb926e2f7835 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Sun, 27 Oct 2024 13:04:52 +0000 Subject: [PATCH 53/76] ci: Don't run GitHub Actions workflows on forks (#19789) - Closes: https://github.com/zed-industries/zed/issues/19351 Release Notes: - N/A --- .github/workflows/ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3843a3343b4a79a99c06f5fdeb83039bca9823d4..ba475f88abc59d89086a9aa4b2565b6e23420cb9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -91,6 +91,7 @@ jobs: macos_tests: timeout-minutes: 60 name: (macOS) Run Clippy and tests + if: github.repository_owner == 'zed-industries' runs-on: - self-hosted - test @@ -126,6 +127,7 @@ jobs: linux_tests: timeout-minutes: 60 name: (Linux) Run Clippy and tests + if: github.repository_owner == 'zed-industries' runs-on: - buildjet-16vcpu-ubuntu-2204 steps: @@ -158,6 +160,7 @@ jobs: build_remote_server: timeout-minutes: 60 name: (Linux) Build Remote Server + if: github.repository_owner == 'zed-industries' runs-on: - buildjet-16vcpu-ubuntu-2204 steps: @@ -185,6 +188,7 @@ jobs: windows_tests: timeout-minutes: 60 name: (Windows) Run Clippy and tests + if: github.repository_owner == 'zed-industries' runs-on: hosted-windows-1 steps: - name: Checkout repo From b13940720a9091793ebe60bfbe5c12c6114ce2af Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Sun, 27 Oct 2024 14:34:59 +0100 Subject: [PATCH 54/76] markdown preview: Ignore inline HTML tags in text (#19804) Follow up to #19785 This PR ensures that we explicitly ignore inline HTML tags so that we can still extract the text between the tags and show them to the user Release Notes: - N/A --- .../markdown_preview/src/markdown_parser.rs | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/crates/markdown_preview/src/markdown_parser.rs b/crates/markdown_preview/src/markdown_parser.rs index 0b3c361fd261d1061ac360240d1c2fb21567b2e3..d514b89e52c948b216553164bc7504bcc3008b4c 100644 --- a/crates/markdown_preview/src/markdown_parser.rs +++ b/crates/markdown_preview/src/markdown_parser.rs @@ -234,6 +234,10 @@ impl<'a> MarkdownParser<'a> { text.push('\n'); } + // We want to ignore any inline HTML tags in the text but keep + // the text between them + Event::InlineHtml(_) => {} + Event::Text(t) => { text.push_str(t.as_ref()); @@ -849,6 +853,16 @@ mod tests { ); } + #[gpui::test] + async fn test_text_with_inline_html() { + let parsed = parse("This is a paragraph with an inline HTML tag.").await; + + assert_eq!( + parsed.children, + vec![p("This is a paragraph with an inline HTML tag.", 0..63),], + ); + } + #[gpui::test] async fn test_raw_links_detection() { let parsed = parse("Checkout this https://zed.dev link").await; @@ -1092,6 +1106,26 @@ Some other content ); } + #[gpui::test] + async fn test_list_item_with_inline_html() { + let parsed = parse( + "\ +* This is a list item with an inline HTML tag. +", + ) + .await; + + assert_eq!( + parsed.children, + vec![list_item( + 0..67, + 1, + Unordered, + vec![p("This is a list item with an inline HTML tag.", 4..44),], + ),], + ); + } + #[gpui::test] async fn test_nested_list_with_paragraph_inside() { let parsed = parse( From 5506669b0654add16dca8447332f4804a4cea0d8 Mon Sep 17 00:00:00 2001 From: CharlesChen0823 Date: Mon, 28 Oct 2024 01:15:23 +0800 Subject: [PATCH 55/76] windows: Fix more windows platform test (#19802) Release Notes: - N/A --------- Co-authored-by: Kirill Bulatov --- crates/editor/src/test/editor_test_context.rs | 18 ++++++++++++--- crates/fs/src/fs.rs | 22 ++++++++++++------- 2 files changed, 29 insertions(+), 11 deletions(-) diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index 7234d97c5b77e4ac52d177445967e89ea1c741ea..de5065d2656d3516563caa768a64870f961a2616 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -17,6 +17,7 @@ use project::{FakeFs, Project}; use std::{ any::TypeId, ops::{Deref, DerefMut, Range}, + path::Path, sync::{ atomic::{AtomicUsize, Ordering}, Arc, @@ -42,17 +43,18 @@ impl EditorTestContext { pub async fn new(cx: &mut gpui::TestAppContext) -> EditorTestContext { let fs = FakeFs::new(cx.executor()); // fs.insert_file("/file", "".to_owned()).await; + let root = Self::root_path(); fs.insert_tree( - "/root", + root, serde_json::json!({ "file": "", }), ) .await; - let project = Project::test(fs, ["/root".as_ref()], cx).await; + let project = Project::test(fs, [root], cx).await; let buffer = project .update(cx, |project, cx| { - project.open_local_buffer("/root/file", cx) + project.open_local_buffer(root.join("file"), cx) }) .await .unwrap(); @@ -71,6 +73,16 @@ impl EditorTestContext { } } + #[cfg(target_os = "windows")] + fn root_path() -> &'static Path { + Path::new("C:\\root") + } + + #[cfg(not(target_os = "windows"))] + fn root_path() -> &'static Path { + Path::new("/root") + } + pub async fn for_editor(editor: WindowHandle, cx: &mut gpui::TestAppContext) -> Self { let editor_view = editor.root_view(cx).unwrap(); Self { diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 5ee2947448c90c091dee5d099670c02f654336b5..8483e5c02a57dd13438baf762ba297457fd39010 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -875,9 +875,11 @@ impl FakeFsState { canonical_path.clear(); match prefix { Some(prefix_component) => { - canonical_path.push(prefix_component.as_os_str()); + canonical_path = PathBuf::from(prefix_component.as_os_str()); + // Prefixes like `C:\\` are represented without their trailing slash, so we have to re-add it. + canonical_path.push(std::path::MAIN_SEPARATOR_STR); } - None => canonical_path.push("/"), + None => canonical_path = PathBuf::from(std::path::MAIN_SEPARATOR_STR), } } Component::CurDir => {} @@ -900,7 +902,7 @@ impl FakeFsState { } } entry_stack.push(entry.clone()); - canonical_path.push(name); + canonical_path = canonical_path.join(name); } else { return None; } @@ -962,6 +964,10 @@ pub static FS_DOT_GIT: std::sync::LazyLock<&'static OsStr> = #[cfg(any(test, feature = "test-support"))] impl FakeFs { + /// We need to use something large enough for Windows and Unix to consider this a new file. + /// https://doc.rust-lang.org/nightly/std/time/struct.SystemTime.html#platform-specific-behavior + const SYSTEMTIME_INTERVAL: u64 = 100; + pub fn new(executor: gpui::BackgroundExecutor) -> Arc { Arc::new(Self { executor, @@ -995,7 +1001,7 @@ impl FakeFs { let new_mtime = state.next_mtime; let new_inode = state.next_inode; state.next_inode += 1; - state.next_mtime += Duration::from_nanos(1); + state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); state .write_path(path, move |entry| { match entry { @@ -1048,7 +1054,7 @@ impl FakeFs { let inode = state.next_inode; let mtime = state.next_mtime; state.next_inode += 1; - state.next_mtime += Duration::from_nanos(1); + state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); let file = Arc::new(Mutex::new(FakeFsEntry::File { inode, mtime, @@ -1399,7 +1405,7 @@ impl Fs for FakeFs { let inode = state.next_inode; let mtime = state.next_mtime; - state.next_mtime += Duration::from_nanos(1); + state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); state.next_inode += 1; state.write_path(&cur_path, |entry| { entry.or_insert_with(|| { @@ -1425,7 +1431,7 @@ impl Fs for FakeFs { let mut state = self.state.lock(); let inode = state.next_inode; let mtime = state.next_mtime; - state.next_mtime += Duration::from_nanos(1); + state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); state.next_inode += 1; let file = Arc::new(Mutex::new(FakeFsEntry::File { inode, @@ -1560,7 +1566,7 @@ impl Fs for FakeFs { let mut state = self.state.lock(); let mtime = state.next_mtime; let inode = util::post_inc(&mut state.next_inode); - state.next_mtime += Duration::from_nanos(1); + state.next_mtime += Duration::from_nanos(Self::SYSTEMTIME_INTERVAL); let source_entry = state.read_path(&source)?; let content = source_entry.lock().file_content(&source)?.clone(); let mut kind = Some(PathEventKind::Created); From c69da2df7071c4c327f0d18af9201f9dd876aea1 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Sun, 27 Oct 2024 15:50:54 -0700 Subject: [PATCH 56/76] Add support for git branches on remote projects (#19755) Release Notes: - Fixed a bug where the branch switcher could not be used remotely. --- Cargo.lock | 1 + crates/collab/src/rpc.rs | 2 + crates/collab/src/tests/integration_tests.rs | 92 ++++++ .../remote_editing_collaboration_tests.rs | 132 +++++++- crates/fs/src/fs.rs | 56 +++- crates/git/src/repository.rs | 78 ++++- crates/gpui/src/app.rs | 13 + crates/gpui/src/app/entity_map.rs | 9 + crates/gpui/src/app/test_context.rs | 6 + crates/gpui/src/global.rs | 1 + crates/gpui/src/gpui.rs | 1 + crates/project/src/project.rs | 28 +- crates/project/src/worktree_store.rs | 189 +++++++++++ crates/proto/proto/zed.proto | 28 +- crates/proto/src/proto.rs | 9 +- crates/recent_projects/src/ssh_connections.rs | 2 +- crates/remote_server/src/headless_project.rs | 2 +- .../remote_server/src/remote_editing_tests.rs | 312 +++++++++++++++--- crates/rpc/src/proto_client.rs | 21 +- crates/settings/src/settings_store.rs | 2 + crates/title_bar/src/title_bar.rs | 2 +- crates/util/src/arc_cow.rs | 6 + crates/vcs_menu/Cargo.toml | 1 + crates/vcs_menu/src/lib.rs | 127 ++++--- crates/worktree/src/worktree.rs | 6 + 25 files changed, 996 insertions(+), 130 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 75d058db3848bd366c6ca757ec9fbf2f4822d3a7..7c81f692ee1fe1b90b13f44475585bdb7142e23b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -12843,6 +12843,7 @@ dependencies = [ "git", "gpui", "picker", + "project", "ui", "util", "workspace", diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 90277242f1b1c67037253e9d6011cf34f39cfe65..d091f04326872374a7fe52c63242cee0d5cb4e55 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -308,6 +308,8 @@ impl Server { .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_request_handler( forward_mutating_project_request::, diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 80cc2500f5f4caf4d30b349b7bb7b1aa13c7e6b3..c905c440cf829b3df10cf18700a4fac6adf6060c 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -6575,3 +6575,95 @@ async fn test_context_collaboration_with_reconnect( assert!(context.buffer().read(cx).read_only()); }); } + +#[gpui::test] +async fn test_remote_git_branches( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree("/project", serde_json::json!({ ".git":{} })) + .await; + let branches = ["main", "dev", "feature-1"]; + client_a + .fs() + .insert_branches(Path::new("/project/.git"), &branches); + + let (project_a, worktree_id) = client_a.build_local_project("/project", cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.join_remote_project(project_id, cx_b).await; + + let root_path = ProjectPath::root_path(worktree_id); + // Client A sees that a guest has joined. + executor.run_until_parked(); + + let branches_b = cx_b + .update(|cx| project_b.update(cx, |project, cx| project.branches(root_path.clone(), cx))) + .await + .unwrap(); + + let new_branch = branches[2]; + + let branches_b = branches_b + .into_iter() + .map(|branch| branch.name) + .collect::>(); + + assert_eq!(&branches_b, &branches); + + cx_b.update(|cx| { + project_b.update(cx, |project, cx| { + project.update_or_create_branch(root_path.clone(), new_branch.to_string(), cx) + }) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + let host_branch = cx_a.update(|cx| { + project_a.update(cx, |project, cx| { + project.worktree_store().update(cx, |worktree_store, cx| { + worktree_store + .current_branch(root_path.clone(), cx) + .unwrap() + }) + }) + }); + + assert_eq!(host_branch.as_ref(), branches[2]); + + // Also try creating a new branch + cx_b.update(|cx| { + project_b.update(cx, |project, cx| { + project.update_or_create_branch(root_path.clone(), "totally-new-branch".to_string(), cx) + }) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + let host_branch = cx_a.update(|cx| { + project_a.update(cx, |project, cx| { + project.worktree_store().update(cx, |worktree_store, cx| { + worktree_store.current_branch(root_path, cx).unwrap() + }) + }) + }); + + assert_eq!(host_branch.as_ref(), "totally-new-branch"); +} diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index 0e13c88d9464ea53b2d9dc5a0d16067a05611108..9fe546ffcd125061f515676fef47cc972c67bb3a 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -1,7 +1,7 @@ use crate::tests::TestServer; use call::ActiveCall; use fs::{FakeFs, Fs as _}; -use gpui::{Context as _, TestAppContext}; +use gpui::{BackgroundExecutor, Context as _, TestAppContext}; use http_client::BlockedHttpClient; use language::{language_settings::language_settings, LanguageRegistry}; use node_runtime::NodeRuntime; @@ -174,3 +174,133 @@ async fn test_sharing_an_ssh_remote_project( ); }); } + +#[gpui::test] +async fn test_ssh_collaboration_git_branches( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + server_cx: &mut TestAppContext, +) { + cx_a.set_name("a"); + cx_b.set_name("b"); + server_cx.set_name("server"); + + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + + // Set up project on remote FS + let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx); + let remote_fs = FakeFs::new(server_cx.executor()); + remote_fs + .insert_tree("/project", serde_json::json!({ ".git":{} })) + .await; + + let branches = ["main", "dev", "feature-1"]; + remote_fs.insert_branches(Path::new("/project/.git"), &branches); + + // User A connects to the remote project via SSH. + server_cx.update(HeadlessProject::init); + let remote_http_client = Arc::new(BlockedHttpClient); + let node = NodeRuntime::unavailable(); + let languages = Arc::new(LanguageRegistry::new(server_cx.executor())); + let headless_project = server_cx.new_model(|cx| { + client::init_settings(cx); + HeadlessProject::new( + HeadlessAppState { + session: server_ssh, + fs: remote_fs.clone(), + http_client: remote_http_client, + node_runtime: node, + languages, + }, + cx, + ) + }); + + let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await; + let (project_a, worktree_id) = client_a + .build_ssh_project("/project", client_ssh, cx_a) + .await; + + // While the SSH worktree is being scanned, user A shares the remote project. + let active_call_a = cx_a.read(ActiveCall::global); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + // User B joins the project. + let project_b = client_b.join_remote_project(project_id, cx_b).await; + + // Give client A sometime to see that B has joined, and that the headless server + // has some git repositories + executor.run_until_parked(); + + let root_path = ProjectPath::root_path(worktree_id); + + let branches_b = cx_b + .update(|cx| project_b.update(cx, |project, cx| project.branches(root_path.clone(), cx))) + .await + .unwrap(); + + let new_branch = branches[2]; + + let branches_b = branches_b + .into_iter() + .map(|branch| branch.name) + .collect::>(); + + assert_eq!(&branches_b, &branches); + + cx_b.update(|cx| { + project_b.update(cx, |project, cx| { + project.update_or_create_branch(root_path.clone(), new_branch.to_string(), cx) + }) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + let server_branch = server_cx.update(|cx| { + headless_project.update(cx, |headless_project, cx| { + headless_project + .worktree_store + .update(cx, |worktree_store, cx| { + worktree_store + .current_branch(root_path.clone(), cx) + .unwrap() + }) + }) + }); + + assert_eq!(server_branch.as_ref(), branches[2]); + + // Also try creating a new branch + cx_b.update(|cx| { + project_b.update(cx, |project, cx| { + project.update_or_create_branch(root_path.clone(), "totally-new-branch".to_string(), cx) + }) + }) + .await + .unwrap(); + + executor.run_until_parked(); + + let server_branch = server_cx.update(|cx| { + headless_project.update(cx, |headless_project, cx| { + headless_project + .worktree_store + .update(cx, |worktree_store, cx| { + worktree_store.current_branch(root_path, cx).unwrap() + }) + }) + }); + + assert_eq!(server_branch.as_ref(), "totally-new-branch"); +} diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 8483e5c02a57dd13438baf762ba297457fd39010..4a84c27dfd09da9c092a4c77eb2b1262e1d636c6 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -813,6 +813,7 @@ struct FakeFsState { root: Arc>, next_inode: u64, next_mtime: SystemTime, + git_event_tx: smol::channel::Sender, event_txs: Vec>>, events_paused: bool, buffered_events: Vec, @@ -969,8 +970,10 @@ impl FakeFs { const SYSTEMTIME_INTERVAL: u64 = 100; pub fn new(executor: gpui::BackgroundExecutor) -> Arc { - Arc::new(Self { - executor, + let (tx, mut rx) = smol::channel::bounded::(10); + + let this = Arc::new(Self { + executor: executor.clone(), state: Mutex::new(FakeFsState { root: Arc::new(Mutex::new(FakeFsEntry::Dir { inode: 0, @@ -979,6 +982,7 @@ impl FakeFs { entries: Default::default(), git_repo_state: None, })), + git_event_tx: tx, next_mtime: SystemTime::UNIX_EPOCH, next_inode: 1, event_txs: Default::default(), @@ -987,7 +991,22 @@ impl FakeFs { read_dir_call_count: 0, metadata_call_count: 0, }), - }) + }); + + executor.spawn({ + let this = this.clone(); + async move { + while let Some(git_event) = rx.next().await { + if let Some(mut state) = this.state.try_lock() { + state.emit_event([(git_event, None)]); + } else { + panic!("Failed to lock file system state, this execution would have caused a test hang"); + } + } + } + }).detach(); + + this } pub fn set_next_mtime(&self, next_mtime: SystemTime) { @@ -1181,7 +1200,12 @@ impl FakeFs { let mut entry = entry.lock(); if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry { - let repo_state = git_repo_state.get_or_insert_with(Default::default); + let repo_state = git_repo_state.get_or_insert_with(|| { + Arc::new(Mutex::new(FakeGitRepositoryState::new( + dot_git.to_path_buf(), + state.git_event_tx.clone(), + ))) + }); let mut repo_state = repo_state.lock(); f(&mut repo_state); @@ -1196,7 +1220,22 @@ impl FakeFs { pub fn set_branch_name(&self, dot_git: &Path, branch: Option>) { self.with_git_state(dot_git, true, |state| { - state.branch_name = branch.map(Into::into) + let branch = branch.map(Into::into); + state.branches.extend(branch.clone()); + state.current_branch_name = branch.map(Into::into) + }) + } + + pub fn insert_branches(&self, dot_git: &Path, branches: &[&str]) { + self.with_git_state(dot_git, true, |state| { + if let Some(first) = branches.first() { + if state.current_branch_name.is_none() { + state.current_branch_name = Some(first.to_string()) + } + } + state + .branches + .extend(branches.iter().map(ToString::to_string)); }) } @@ -1836,7 +1875,12 @@ impl Fs for FakeFs { let mut entry = entry.lock(); if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry { let state = git_repo_state - .get_or_insert_with(|| Arc::new(Mutex::new(FakeGitRepositoryState::default()))) + .get_or_insert_with(|| { + Arc::new(Mutex::new(FakeGitRepositoryState::new( + abs_dot_git.to_path_buf(), + state.git_event_tx.clone(), + ))) + }) .clone(); Some(git::repository::FakeGitRepository::open(state)) } else { diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 1b3686f0218c9afe203020e9b509aeb98f669d8c..fe65816cc5950bfd3a68284fc4b5de3fe420209e 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -1,8 +1,9 @@ use crate::GitHostingProviderRegistry; use crate::{blame::Blame, status::GitStatus}; use anyhow::{Context, Result}; -use collections::HashMap; +use collections::{HashMap, HashSet}; use git2::BranchType; +use gpui::SharedString; use parking_lot::Mutex; use rope::Rope; use serde::{Deserialize, Serialize}; @@ -17,7 +18,7 @@ use util::ResultExt; #[derive(Clone, Debug, Hash, PartialEq)] pub struct Branch { pub is_head: bool, - pub name: Box, + pub name: SharedString, /// Timestamp of most recent commit, normalized to Unix Epoch format. pub unix_timestamp: Option, } @@ -41,6 +42,7 @@ pub trait GitRepository: Send + Sync { fn branches(&self) -> Result>; fn change_branch(&self, _: &str) -> Result<()>; fn create_branch(&self, _: &str) -> Result<()>; + fn branch_exits(&self, _: &str) -> Result; fn blame(&self, path: &Path, content: Rope) -> Result; } @@ -132,6 +134,18 @@ impl GitRepository for RealGitRepository { GitStatus::new(&self.git_binary_path, &working_directory, path_prefixes) } + fn branch_exits(&self, name: &str) -> Result { + let repo = self.repository.lock(); + let branch = repo.find_branch(name, BranchType::Local); + match branch { + Ok(_) => Ok(true), + Err(e) => match e.code() { + git2::ErrorCode::NotFound => Ok(false), + _ => Err(anyhow::anyhow!(e)), + }, + } + } + fn branches(&self) -> Result> { let repo = self.repository.lock(); let local_branches = repo.branches(Some(BranchType::Local))?; @@ -139,7 +153,11 @@ impl GitRepository for RealGitRepository { .filter_map(|branch| { branch.ok().and_then(|(branch, _)| { let is_head = branch.is_head(); - let name = branch.name().ok().flatten().map(Box::from)?; + let name = branch + .name() + .ok() + .flatten() + .map(|name| name.to_string().into())?; let timestamp = branch.get().peel_to_commit().ok()?.time(); let unix_timestamp = timestamp.seconds(); let timezone_offset = timestamp.offset_minutes(); @@ -201,17 +219,20 @@ impl GitRepository for RealGitRepository { } } -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone)] pub struct FakeGitRepository { state: Arc>, } -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone)] pub struct FakeGitRepositoryState { + pub path: PathBuf, + pub event_emitter: smol::channel::Sender, pub index_contents: HashMap, pub blames: HashMap, pub worktree_statuses: HashMap, - pub branch_name: Option, + pub current_branch_name: Option, + pub branches: HashSet, } impl FakeGitRepository { @@ -220,6 +241,20 @@ impl FakeGitRepository { } } +impl FakeGitRepositoryState { + pub fn new(path: PathBuf, event_emitter: smol::channel::Sender) -> Self { + FakeGitRepositoryState { + path, + event_emitter, + index_contents: Default::default(), + blames: Default::default(), + worktree_statuses: Default::default(), + current_branch_name: Default::default(), + branches: Default::default(), + } + } +} + impl GitRepository for FakeGitRepository { fn reload_index(&self) {} @@ -234,7 +269,7 @@ impl GitRepository for FakeGitRepository { fn branch_name(&self) -> Option { let state = self.state.lock(); - state.branch_name.clone() + state.current_branch_name.clone() } fn head_sha(&self) -> Option { @@ -264,18 +299,41 @@ impl GitRepository for FakeGitRepository { } fn branches(&self) -> Result> { - Ok(vec![]) + let state = self.state.lock(); + let current_branch = &state.current_branch_name; + Ok(state + .branches + .iter() + .map(|branch_name| Branch { + is_head: Some(branch_name) == current_branch.as_ref(), + name: branch_name.into(), + unix_timestamp: None, + }) + .collect()) + } + + fn branch_exits(&self, name: &str) -> Result { + let state = self.state.lock(); + Ok(state.branches.contains(name)) } fn change_branch(&self, name: &str) -> Result<()> { let mut state = self.state.lock(); - state.branch_name = Some(name.to_owned()); + state.current_branch_name = Some(name.to_owned()); + state + .event_emitter + .try_send(state.path.clone()) + .expect("Dropped repo change event"); Ok(()) } fn create_branch(&self, name: &str) -> Result<()> { let mut state = self.state.lock(); - state.branch_name = Some(name.to_owned()); + state.branches.insert(name.to_owned()); + state + .event_emitter + .try_send(state.path.clone()) + .expect("Dropped repo change event"); Ok(()) } diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index f81a2092d56af61ee4655b5c61173d10ac1fe2df..096f495a880e2e9190ae76265116fd64f9aeed20 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -256,6 +256,9 @@ pub struct AppContext { pub(crate) layout_id_buffer: Vec, // We recycle this memory across layout requests. pub(crate) propagate_event: bool, pub(crate) prompt_builder: Option, + + #[cfg(any(test, feature = "test-support", debug_assertions))] + pub(crate) name: Option<&'static str>, } impl AppContext { @@ -309,6 +312,9 @@ impl AppContext { layout_id_buffer: Default::default(), propagate_event: true, prompt_builder: Some(PromptBuilder::Default), + + #[cfg(any(test, feature = "test-support", debug_assertions))] + name: None, }), }); @@ -988,6 +994,7 @@ impl AppContext { } /// Move the global of the given type to the stack. + #[track_caller] pub(crate) fn lease_global(&mut self) -> GlobalLease { GlobalLease::new( self.globals_by_type @@ -1319,6 +1326,12 @@ impl AppContext { (task, is_first) } + + /// Get the name for this App. + #[cfg(any(test, feature = "test-support", debug_assertions))] + pub fn get_name(&self) -> &'static str { + self.name.as_ref().unwrap() + } } impl Context for AppContext { diff --git a/crates/gpui/src/app/entity_map.rs b/crates/gpui/src/app/entity_map.rs index 4d5452acc0e5fe77548444d7cbb5c33680e5d040..07aa466295af2eac951e2608409bdfba6d610ca4 100644 --- a/crates/gpui/src/app/entity_map.rs +++ b/crates/gpui/src/app/entity_map.rs @@ -536,6 +536,15 @@ impl AnyWeakModel { } } +impl std::fmt::Debug for AnyWeakModel { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct(type_name::()) + .field("entity_id", &self.entity_id) + .field("entity_type", &self.entity_type) + .finish() + } +} + impl From> for AnyWeakModel { fn from(model: WeakModel) -> Self { model.any_model diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index f46cdc8e34b1a6a63b5f50faae49187a9067baa9..34449c91ec7328d6c7d6019621ee5203a1e6d32b 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -478,6 +478,12 @@ impl TestAppContext { .await .unwrap(); } + + /// Set a name for this App. + #[cfg(any(test, feature = "test-support"))] + pub fn set_name(&mut self, name: &'static str) { + self.update(|cx| cx.name = Some(name)) + } } impl Model { diff --git a/crates/gpui/src/global.rs b/crates/gpui/src/global.rs index 05f15983644612231ed1cec62fcf980361c12ed0..96f5d5fed5b1973e672d65cbd2ef301471a18f07 100644 --- a/crates/gpui/src/global.rs +++ b/crates/gpui/src/global.rs @@ -57,6 +57,7 @@ pub trait UpdateGlobal { } impl UpdateGlobal for T { + #[track_caller] fn update_global(cx: &mut C, update: F) -> R where C: BorrowAppContext, diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index 7ba3ce055ecc67142e34fc6c0bf72a6675a64adc..2952f4af8abfb7efff33bf2f3002d22c9f54e22b 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -306,6 +306,7 @@ where self.borrow_mut().set_global(global) } + #[track_caller] fn update_global(&mut self, f: impl FnOnce(&mut G, &mut Self) -> R) -> R where G: Global, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 167d5c1d49b2da37a35ca69041683baf56d9ada7..b2fc8c53041399846c5935fe3cfc17b9d1b857d8 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -288,6 +288,13 @@ impl ProjectPath { path: self.path.to_string_lossy().to_string(), } } + + pub fn root_path(worktree_id: WorktreeId) -> Self { + Self { + worktree_id, + path: Path::new("").into(), + } + } } #[derive(Debug, Clone, PartialEq, Eq)] @@ -701,7 +708,7 @@ impl Project { let ssh_proto = ssh.read(cx).proto_client(); let worktree_store = - cx.new_model(|_| WorktreeStore::remote(false, ssh_proto.clone(), 0)); + cx.new_model(|_| WorktreeStore::remote(false, ssh_proto.clone(), SSH_PROJECT_ID)); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); @@ -3370,6 +3377,25 @@ impl Project { worktree.get_local_repo(&root_entry)?.repo().clone().into() } + pub fn branches( + &self, + project_path: ProjectPath, + cx: &AppContext, + ) -> Task>> { + self.worktree_store().read(cx).branches(project_path, cx) + } + + pub fn update_or_create_branch( + &self, + repository: ProjectPath, + new_branch: String, + cx: &AppContext, + ) -> Task> { + self.worktree_store() + .read(cx) + .update_or_create_branch(repository, new_branch, cx) + } + pub fn blame_buffer( &self, buffer: &Model, diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index df190d03f39239e0a8908f5e105cd0449a8d310d..dc67eedbc11bfacdd13ce2aa3205c90bd04c0363 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -73,6 +73,8 @@ impl WorktreeStore { client.add_model_request_handler(Self::handle_copy_project_entry); client.add_model_request_handler(Self::handle_delete_project_entry); client.add_model_request_handler(Self::handle_expand_project_entry); + client.add_model_request_handler(Self::handle_git_branches); + client.add_model_request_handler(Self::handle_update_branch); } pub fn local(retain_worktrees: bool, fs: Arc) -> Self { @@ -127,6 +129,13 @@ impl WorktreeStore { .find(|worktree| worktree.read(cx).id() == id) } + pub fn current_branch(&self, repository: ProjectPath, cx: &AppContext) -> Option> { + self.worktree_for_id(repository.worktree_id, cx)? + .read(cx) + .git_entry(repository.path)? + .branch() + } + pub fn worktree_for_entry( &self, entry_id: ProjectEntryId, @@ -836,6 +845,131 @@ impl WorktreeStore { Ok(()) } + pub fn branches( + &self, + project_path: ProjectPath, + cx: &AppContext, + ) -> Task>> { + let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else { + return Task::ready(Err(anyhow!("No worktree found for ProjectPath"))); + }; + + match worktree.read(cx) { + Worktree::Local(local_worktree) => { + let branches = util::maybe!({ + let worktree_error = |error| { + format!( + "{} for worktree {}", + error, + local_worktree.abs_path().to_string_lossy() + ) + }; + + let entry = local_worktree + .git_entry(project_path.path) + .with_context(|| worktree_error("No git entry found"))?; + + let repo = local_worktree + .get_local_repo(&entry) + .with_context(|| worktree_error("No repository found"))? + .repo() + .clone(); + + repo.branches() + }); + + Task::ready(branches) + } + Worktree::Remote(remote_worktree) => { + let request = remote_worktree.client().request(proto::GitBranches { + project_id: remote_worktree.project_id(), + repository: Some(proto::ProjectPath { + worktree_id: project_path.worktree_id.to_proto(), + path: project_path.path.to_string_lossy().to_string(), // Root path + }), + }); + + cx.background_executor().spawn(async move { + let response = request.await?; + + let branches = response + .branches + .into_iter() + .map(|proto_branch| git::repository::Branch { + is_head: proto_branch.is_head, + name: proto_branch.name.into(), + unix_timestamp: proto_branch + .unix_timestamp + .map(|timestamp| timestamp as i64), + }) + .collect(); + + Ok(branches) + }) + } + } + } + + pub fn update_or_create_branch( + &self, + repository: ProjectPath, + new_branch: String, + cx: &AppContext, + ) -> Task> { + let Some(worktree) = self.worktree_for_id(repository.worktree_id, cx) else { + return Task::ready(Err(anyhow!("No worktree found for ProjectPath"))); + }; + + match worktree.read(cx) { + Worktree::Local(local_worktree) => { + let result = util::maybe!({ + let worktree_error = |error| { + format!( + "{} for worktree {}", + error, + local_worktree.abs_path().to_string_lossy() + ) + }; + + let entry = local_worktree + .git_entry(repository.path) + .with_context(|| worktree_error("No git entry found"))?; + + let repo = local_worktree + .get_local_repo(&entry) + .with_context(|| worktree_error("No repository found"))? + .repo() + .clone(); + + if !repo.branch_exits(&new_branch)? { + repo.create_branch(&new_branch)?; + } + + repo.change_branch(&new_branch)?; + + Ok(()) + }); + + Task::ready(result) + } + Worktree::Remote(remote_worktree) => { + let request = remote_worktree.client().request(proto::UpdateGitBranch { + project_id: remote_worktree.project_id(), + repository: Some(proto::ProjectPath { + worktree_id: repository.worktree_id.to_proto(), + path: repository.path.to_string_lossy().to_string(), // Root path + }), + branch_name: new_branch, + }); + + cx.background_executor().spawn(async move { + request.await?; + Ok(()) + }) + } + } + } + async fn filter_paths( fs: &Arc, mut input: Receiver, @@ -917,6 +1051,61 @@ impl WorktreeStore { .ok_or_else(|| anyhow!("invalid request"))?; Worktree::handle_expand_entry(worktree, envelope.payload, cx).await } + + pub async fn handle_git_branches( + this: Model, + branches: TypedEnvelope, + cx: AsyncAppContext, + ) -> Result { + let project_path = branches + .payload + .repository + .clone() + .context("Invalid GitBranches call")?; + let project_path = ProjectPath { + worktree_id: WorktreeId::from_proto(project_path.worktree_id), + path: Path::new(&project_path.path).into(), + }; + + let branches = this + .read_with(&cx, |this, cx| this.branches(project_path, cx))? + .await?; + + Ok(proto::GitBranchesResponse { + branches: branches + .into_iter() + .map(|branch| proto::Branch { + is_head: branch.is_head, + name: branch.name.to_string(), + unix_timestamp: branch.unix_timestamp.map(|timestamp| timestamp as u64), + }) + .collect(), + }) + } + + pub async fn handle_update_branch( + this: Model, + update_branch: TypedEnvelope, + cx: AsyncAppContext, + ) -> Result { + let project_path = update_branch + .payload + .repository + .clone() + .context("Invalid GitBranches call")?; + let project_path = ProjectPath { + worktree_id: WorktreeId::from_proto(project_path.worktree_id), + path: Path::new(&project_path.path).into(), + }; + let new_branch = update_branch.payload.branch_name; + + this.read_with(&cx, |this, cx| { + this.update_or_create_branch(project_path, new_branch, cx) + })? + .await?; + + Ok(proto::Ack {}) + } } #[derive(Clone, Debug)] diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 5635eb880022ea5abf6c4296c01ad9bb97f300ac..c61a14cdbfdf355d34780ea95a581a5c9d2c522c 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -281,7 +281,12 @@ message Envelope { FlushBufferedMessages flush_buffered_messages = 267; LanguageServerPromptRequest language_server_prompt_request = 268; - LanguageServerPromptResponse language_server_prompt_response = 269; // current max + LanguageServerPromptResponse language_server_prompt_response = 269; + + GitBranches git_branches = 270; + GitBranchesResponse git_branches_response = 271; + + UpdateGitBranch update_git_branch = 272; // current max } @@ -2432,3 +2437,24 @@ message LanguageServerPromptRequest { message LanguageServerPromptResponse { optional uint64 action_response = 1; } + +message Branch { + bool is_head = 1; + string name = 2; + optional uint64 unix_timestamp = 3; +} + +message GitBranches { + uint64 project_id = 1; + ProjectPath repository = 2; +} + +message GitBranchesResponse { + repeated Branch branches = 1; +} + +message UpdateGitBranch { + uint64 project_id = 1; + string branch_name = 2; + ProjectPath repository = 3; +} diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 7a31e7cc7a691a5926e621f2c2d0c42aa00a7985..3807e04bd56d013e9e418bbe47399b4c6a9c8aac 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -357,6 +357,9 @@ messages!( (FlushBufferedMessages, Foreground), (LanguageServerPromptRequest, Foreground), (LanguageServerPromptResponse, Foreground), + (GitBranches, Background), + (GitBranchesResponse, Background), + (UpdateGitBranch, Background) ); request_messages!( @@ -473,6 +476,8 @@ request_messages!( (GetPermalinkToLine, GetPermalinkToLineResponse), (FlushBufferedMessages, Ack), (LanguageServerPromptRequest, LanguageServerPromptResponse), + (GitBranches, GitBranchesResponse), + (UpdateGitBranch, Ack) ); entity_messages!( @@ -550,7 +555,9 @@ entity_messages!( HideToast, OpenServerSettings, GetPermalinkToLine, - LanguageServerPromptRequest + LanguageServerPromptRequest, + GitBranches, + UpdateGitBranch ); entity_messages!( diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index 55204e14b947ddb96b6a489f3b2bef6eb5f34c69..7dc28536502a7ada1d878377898fc74a9ed98515 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -631,7 +631,7 @@ impl SshClientDelegate { self.update_status( Some(&format!( - "Building remote server binary from source for {}", + "Building remote server binary from source for {} with Docker", &triple )), cx, diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 4385dac1fe963d2a6120fd92cd51ba0eb1a70986..81be01b6a640e806985f6afe8252f43d226f9107 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -1,6 +1,6 @@ use anyhow::{anyhow, Result}; use fs::Fs; -use gpui::{AppContext, AsyncAppContext, Context, Model, ModelContext, PromptLevel}; +use gpui::{AppContext, AsyncAppContext, Context as _, Model, ModelContext, PromptLevel}; use http_client::HttpClient; use language::{proto::serialize_operation, Buffer, BufferEvent, LanguageRegistry}; use node_runtime::NodeRuntime; diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index f7420ef5b091b70c8036a77650eb81208edfa2d3..82e3824eb07fef24988ad634709ce095ed1cc43d 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -26,7 +26,29 @@ use std::{ #[gpui::test] async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, _headless, fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + "project2": { + "README.md": "# project 2", + }, + }), + ) + .await; + fs.set_index_for_repo( + Path::new("/code/project1/.git"), + &[(Path::new("src/lib.rs"), "fn one() -> usize { 0 }".into())], + ); + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let (worktree, _) = project .update(cx, |project, cx| { project.find_or_create_worktree("/code/project1", true, cx) @@ -128,7 +150,22 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test #[gpui::test] async fn test_remote_project_search(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, headless, _) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, headless) = init_test(&fs, cx, server_cx).await; project .update(cx, |project, cx| { @@ -193,7 +230,22 @@ async fn test_remote_project_search(cx: &mut TestAppContext, server_cx: &mut Tes #[gpui::test] async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, headless, fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, headless) = init_test(&fs, cx, server_cx).await; cx.update_global(|settings_store: &mut SettingsStore, cx| { settings_store.set_user_settings( @@ -304,7 +356,22 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo #[gpui::test] async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, headless, fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, headless) = init_test(&fs, cx, server_cx).await; fs.insert_tree( "/code/project1/.zed", @@ -463,7 +530,22 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext #[gpui::test] async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, _headless, fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let (worktree, _) = project .update(cx, |project, cx| { project.find_or_create_worktree("/code/project1", true, cx) @@ -523,7 +605,22 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont #[gpui::test] async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, _headless, _fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let (worktree, _) = project .update(cx, |project, cx| { project.find_or_create_worktree("/code/project1", true, cx) @@ -566,7 +663,22 @@ async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut #[gpui::test(iterations = 10)] async fn test_canceling_buffer_opening(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, _headless, _fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let (worktree, _) = project .update(cx, |project, cx| { project.find_or_create_worktree("/code/project1", true, cx) @@ -597,7 +709,25 @@ async fn test_adding_then_removing_then_adding_worktrees( cx: &mut TestAppContext, server_cx: &mut TestAppContext, ) { - let (project, _headless, _fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + "project2": { + "README.md": "# project 2", + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let (_worktree, _) = project .update(cx, |project, cx| { project.find_or_create_worktree("/code/project1", true, cx) @@ -636,9 +766,25 @@ async fn test_adding_then_removing_then_adding_worktrees( #[gpui::test] async fn test_open_server_settings(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, _headless, _fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let buffer = project.update(cx, |project, cx| project.open_server_settings(cx)); cx.executor().run_until_parked(); + let buffer = buffer.await.unwrap(); cx.update(|cx| { @@ -651,7 +797,22 @@ async fn test_open_server_settings(cx: &mut TestAppContext, server_cx: &mut Test #[gpui::test(iterations = 20)] async fn test_reconnect(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let (project, _headless, fs) = init_test(cx, server_cx).await; + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; let (worktree, _) = project .update(cx, |project, cx| { @@ -690,19 +851,8 @@ async fn test_reconnect(cx: &mut TestAppContext, server_cx: &mut TestAppContext) ); } -fn init_logger() { - if std::env::var("RUST_LOG").is_ok() { - env_logger::try_init().ok(); - } -} - -async fn init_test( - cx: &mut TestAppContext, - server_cx: &mut TestAppContext, -) -> (Model, Model, Arc) { - init_logger(); - - let (opts, ssh_server_client) = SshRemoteClient::fake_server(cx, server_cx); +#[gpui::test] +async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let fs = FakeFs::new(server_cx.executor()); fs.insert_tree( "/code", @@ -710,32 +860,109 @@ async fn init_test( "project1": { ".git": {}, "README.md": "# project 1", - "src": { - "lib.rs": "fn one() -> usize { 1 }" - } - }, - "project2": { - "README.md": "# project 2", }, }), ) .await; - fs.set_index_for_repo( - Path::new("/code/project1/.git"), - &[(Path::new("src/lib.rs"), "fn one() -> usize { 0 }".into())], - ); - server_cx.update(HeadlessProject::init); + let (project, headless_project) = init_test(&fs, cx, server_cx).await; + let branches = ["main", "dev", "feature-1"]; + fs.insert_branches(Path::new("/code/project1/.git"), &branches); + + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/project1", true, cx) + }) + .await + .unwrap(); + + let worktree_id = cx.update(|cx| worktree.read(cx).id()); + let root_path = ProjectPath::root_path(worktree_id); + // Give the worktree a bit of time to index the file system + cx.run_until_parked(); + + let remote_branches = project + .update(cx, |project, cx| project.branches(root_path.clone(), cx)) + .await + .unwrap(); + + let new_branch = branches[2]; + + let remote_branches = remote_branches + .into_iter() + .map(|branch| branch.name) + .collect::>(); + + assert_eq!(&remote_branches, &branches); + + cx.update(|cx| { + project.update(cx, |project, cx| { + project.update_or_create_branch(root_path.clone(), new_branch.to_string(), cx) + }) + }) + .await + .unwrap(); + + cx.run_until_parked(); + + let server_branch = server_cx.update(|cx| { + headless_project.update(cx, |headless_project, cx| { + headless_project + .worktree_store + .update(cx, |worktree_store, cx| { + worktree_store + .current_branch(root_path.clone(), cx) + .unwrap() + }) + }) + }); + + assert_eq!(server_branch.as_ref(), branches[2]); + + // Also try creating a new branch + cx.update(|cx| { + project.update(cx, |project, cx| { + project.update_or_create_branch(root_path.clone(), "totally-new-branch".to_string(), cx) + }) + }) + .await + .unwrap(); + + cx.run_until_parked(); + + let server_branch = server_cx.update(|cx| { + headless_project.update(cx, |headless_project, cx| { + headless_project + .worktree_store + .update(cx, |worktree_store, cx| { + worktree_store.current_branch(root_path, cx).unwrap() + }) + }) + }); + + assert_eq!(server_branch.as_ref(), "totally-new-branch"); +} + +pub async fn init_test( + server_fs: &Arc, + cx: &mut TestAppContext, + server_cx: &mut TestAppContext, +) -> (Model, Model) { + let server_fs = server_fs.clone(); + init_logger(); + + let (opts, ssh_server_client) = SshRemoteClient::fake_server(cx, server_cx); let http_client = Arc::new(BlockedHttpClient); let node_runtime = NodeRuntime::unavailable(); let languages = Arc::new(LanguageRegistry::new(cx.executor())); + server_cx.update(HeadlessProject::init); let headless = server_cx.new_model(|cx| { client::init_settings(cx); HeadlessProject::new( crate::HeadlessAppState { session: ssh_server_client, - fs: fs.clone(), + fs: server_fs.clone(), http_client, node_runtime, languages, @@ -752,13 +979,21 @@ async fn init_test( |_, cx| cx.on_release(|_, _| drop(headless)) }) .detach(); - (project, headless, fs) + (project, headless) +} + +fn init_logger() { + if std::env::var("RUST_LOG").is_ok() { + env_logger::try_init().ok(); + } } fn build_project(ssh: Model, cx: &mut TestAppContext) -> Model { cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); + if !cx.has_global::() { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + } }); let client = cx.update(|cx| { @@ -773,6 +1008,7 @@ fn build_project(ssh: Model, cx: &mut TestAppContext) -> Model< let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx)); let languages = Arc::new(LanguageRegistry::test(cx.executor())); let fs = FakeFs::new(cx.executor()); + cx.update(|cx| { Project::init(&client, cx); language::init(cx); diff --git a/crates/rpc/src/proto_client.rs b/crates/rpc/src/proto_client.rs index 56b13688bad2b6bb548e6e077d54473782636080..9288416d5720b551031e54245b15521fd9cb7906 100644 --- a/crates/rpc/src/proto_client.rs +++ b/crates/rpc/src/proto_client.rs @@ -123,7 +123,6 @@ impl ProtoMessageHandlerSet { let extract_entity_id = *this.entity_id_extractors.get(&payload_type_id)?; let entity_type_id = *this.entity_types_by_message_type.get(&payload_type_id)?; let entity_id = (extract_entity_id)(message.as_ref()); - match this .entities_by_type_and_remote_id .get_mut(&(entity_type_id, entity_id))? @@ -145,6 +144,26 @@ pub enum EntityMessageSubscriber { Pending(Vec>), } +impl std::fmt::Debug for EntityMessageSubscriber { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + EntityMessageSubscriber::Entity { handle } => f + .debug_struct("EntityMessageSubscriber::Entity") + .field("handle", handle) + .finish(), + EntityMessageSubscriber::Pending(vec) => f + .debug_struct("EntityMessageSubscriber::Pending") + .field( + "envelopes", + &vec.iter() + .map(|envelope| envelope.payload_type_name()) + .collect::>(), + ) + .finish(), + } + } +} + impl From> for AnyProtoClient where T: ProtoClient + 'static, diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 0130adf99cba4199bb94e41c817f84febd78a440..620055a9712d7731261b6fa23edaea6d13cdb97f 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -61,6 +61,7 @@ pub trait Settings: 'static + Send + Sync { anyhow::anyhow!("missing default") } + #[track_caller] fn register(cx: &mut AppContext) where Self: Sized, @@ -271,6 +272,7 @@ impl SettingsStore { pub fn register_setting(&mut self, cx: &mut AppContext) { let setting_type_id = TypeId::of::(); let entry = self.setting_values.entry(setting_type_id); + if matches!(entry, hash_map::Entry::Occupied(_)) { return; } diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index 74c5b2812a5fa34b3db290394f0947b7b1e889fc..f58eaa89a0f9b69d5dfdb9b966f21e4fc89d7235 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -447,7 +447,7 @@ impl TitleBar { }) .on_click(move |_, cx| { let _ = workspace.update(cx, |this, cx| { - BranchList::open(this, &Default::default(), cx) + BranchList::open(this, &Default::default(), cx); }); }), ) diff --git a/crates/util/src/arc_cow.rs b/crates/util/src/arc_cow.rs index 02ad1fa1f0a17179c9d68450be4f45750d5ac043..06a2fa9cd03cc2857a3bc12fead955bc5b7298cd 100644 --- a/crates/util/src/arc_cow.rs +++ b/crates/util/src/arc_cow.rs @@ -75,6 +75,12 @@ impl From for ArcCow<'_, str> { } } +impl From<&String> for ArcCow<'_, str> { + fn from(value: &String) -> Self { + Self::Owned(value.clone().into()) + } +} + impl<'a> From> for ArcCow<'a, str> { fn from(value: Cow<'a, str>) -> Self { match value { diff --git a/crates/vcs_menu/Cargo.toml b/crates/vcs_menu/Cargo.toml index 75dcad83dff3177d8d2ba95bb44e68bf27decd51..11de371868953d8673092106b71850d28d02bbf6 100644 --- a/crates/vcs_menu/Cargo.toml +++ b/crates/vcs_menu/Cargo.toml @@ -14,6 +14,7 @@ fuzzy.workspace = true git.workspace = true gpui.workspace = true picker.workspace = true +project.workspace = true ui.workspace = true util.workspace = true workspace.workspace = true diff --git a/crates/vcs_menu/src/lib.rs b/crates/vcs_menu/src/lib.rs index 720a427ae90efedf3004a2b1c062a3090517e85f..3ee289df0e21aa438c0904bc61306d89a4eee233 100644 --- a/crates/vcs_menu/src/lib.rs +++ b/crates/vcs_menu/src/lib.rs @@ -2,24 +2,23 @@ use anyhow::{Context, Result}; use fuzzy::{StringMatch, StringMatchCandidate}; use git::repository::Branch; use gpui::{ - actions, rems, AnyElement, AppContext, DismissEvent, EventEmitter, FocusHandle, FocusableView, - InteractiveElement, IntoElement, ParentElement, Render, SharedString, Styled, Subscription, - Task, View, ViewContext, VisualContext, WindowContext, + actions, rems, AnyElement, AppContext, AsyncAppContext, DismissEvent, EventEmitter, + FocusHandle, FocusableView, InteractiveElement, IntoElement, ParentElement, Render, + SharedString, Styled, Subscription, Task, View, ViewContext, VisualContext, WindowContext, }; use picker::{Picker, PickerDelegate}; +use project::ProjectPath; use std::{ops::Not, sync::Arc}; use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing}; use util::ResultExt; -use workspace::notifications::NotificationId; -use workspace::{ModalView, Toast, Workspace}; +use workspace::notifications::DetachAndPromptErr; +use workspace::{ModalView, Workspace}; actions!(branches, [OpenRecent]); pub fn init(cx: &mut AppContext) { cx.observe_new_views(|workspace: &mut Workspace, _| { - workspace.register_action(|workspace, action, cx| { - BranchList::open(workspace, action, cx).log_err(); - }); + workspace.register_action(BranchList::open); }) .detach(); } @@ -31,6 +30,21 @@ pub struct BranchList { } impl BranchList { + pub fn open(_: &mut Workspace, _: &OpenRecent, cx: &mut ViewContext) { + let this = cx.view().clone(); + cx.spawn(|_, mut cx| async move { + // Modal branch picker has a longer trailoff than a popover one. + let delegate = BranchListDelegate::new(this.clone(), 70, &cx).await?; + + this.update(&mut cx, |workspace, cx| { + workspace.toggle_modal(cx, |cx| BranchList::new(delegate, 34., cx)) + })?; + + Ok(()) + }) + .detach_and_prompt_err("Failed to read branches", cx, |_, _| None) + } + fn new(delegate: BranchListDelegate, rem_width: f32, cx: &mut ViewContext) -> Self { let picker = cx.new_view(|cx| Picker::uniform_list(delegate, cx)); let _subscription = cx.subscribe(&picker, |_, _, _, cx| cx.emit(DismissEvent)); @@ -40,17 +54,6 @@ impl BranchList { _subscription, } } - pub fn open( - workspace: &mut Workspace, - _: &OpenRecent, - cx: &mut ViewContext, - ) -> Result<()> { - // Modal branch picker has a longer trailoff than a popover one. - let delegate = BranchListDelegate::new(workspace, cx.view().clone(), 70, cx)?; - workspace.toggle_modal(cx, |cx| BranchList::new(delegate, 34., cx)); - - Ok(()) - } } impl ModalView for BranchList {} impl EventEmitter for BranchList {} @@ -100,36 +103,32 @@ pub struct BranchListDelegate { } impl BranchListDelegate { - fn new( - workspace: &Workspace, - handle: View, + async fn new( + workspace: View, branch_name_trailoff_after: usize, - cx: &AppContext, + cx: &AsyncAppContext, ) -> Result { - let project = workspace.project().read(cx); - let repo = project - .get_first_worktree_root_repo(cx) - .context("failed to get root repository for first worktree")?; + let all_branches_request = cx.update(|cx| { + let project = workspace.read(cx).project().read(cx); + let first_worktree = project + .visible_worktrees(cx) + .next() + .context("No worktrees found")?; + let project_path = ProjectPath::root_path(first_worktree.read(cx).id()); + anyhow::Ok(project.branches(project_path, cx)) + })??; + + let all_branches = all_branches_request.await?; - let all_branches = repo.branches()?; Ok(Self { matches: vec![], - workspace: handle, + workspace, all_branches, selected_index: 0, last_query: Default::default(), branch_name_trailoff_after, }) } - - fn display_error_toast(&self, message: String, cx: &mut WindowContext<'_>) { - self.workspace.update(cx, |model, ctx| { - struct GitCheckoutFailure; - let id = NotificationId::unique::(); - - model.show_toast(Toast::new(id, message), ctx) - }); - } } impl PickerDelegate for BranchListDelegate { @@ -235,40 +234,32 @@ impl PickerDelegate for BranchListDelegate { cx.spawn({ let branch = branch.clone(); |picker, mut cx| async move { - picker - .update(&mut cx, |this, cx| { - let project = this.delegate.workspace.read(cx).project().read(cx); - let repo = project - .get_first_worktree_root_repo(cx) - .context("failed to get root repository for first worktree")?; - - let branch_to_checkout = match branch { - BranchEntry::Branch(branch) => branch.string, - BranchEntry::NewBranch { name: branch_name } => { - let status = repo.create_branch(&branch_name); - if status.is_err() { - this.delegate.display_error_toast(format!("Failed to create branch '{branch_name}', check for conflicts or unstashed files"), cx); - status?; - } - - branch_name - } - }; - - let status = repo.change_branch(&branch_to_checkout); - if status.is_err() { - this.delegate.display_error_toast(format!("Failed to checkout branch '{branch_to_checkout}', check for conflicts or unstashed files"), cx); - status?; - } + let branch_change_task = picker.update(&mut cx, |this, cx| { + let project = this.delegate.workspace.read(cx).project().read(cx); - cx.emit(DismissEvent); + let branch_to_checkout = match branch { + BranchEntry::Branch(branch) => branch.string, + BranchEntry::NewBranch { name: branch_name } => branch_name, + }; + let worktree = project + .worktrees(cx) + .next() + .context("worktree disappeared")?; + let repository = ProjectPath::root_path(worktree.read(cx).id()); - Ok::<(), anyhow::Error>(()) - }) - .log_err(); + anyhow::Ok(project.update_or_create_branch(repository, branch_to_checkout, cx)) + })??; + + branch_change_task.await?; + + picker.update(&mut cx, |_, cx| { + cx.emit(DismissEvent); + + Ok::<(), anyhow::Error>(()) + }) } }) - .detach(); + .detach_and_prompt_err("Failed to change branch", cx, |_, _| None); } fn dismissed(&mut self, cx: &mut ViewContext>) { diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 722a7b3f0abfd44f2109c81296a4b01c3bf364a4..ba65eae87c272ffd3cb2ba04f9a624ab7bf9ac7c 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -2385,6 +2385,12 @@ impl Snapshot { .map(|entry| entry.to_owned()) } + pub fn git_entry(&self, work_directory_path: Arc) -> Option { + self.repository_entries + .get(&RepositoryWorkDirectory(work_directory_path)) + .map(|entry| entry.to_owned()) + } + pub fn git_entries(&self) -> impl Iterator { self.repository_entries.values() } From 2d16d2d0363ad69653ab0c1a3736de74a07f476e Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 28 Oct 2024 02:31:02 +0200 Subject: [PATCH 57/76] Fixed outline panel panicking on filtering (#19811) Closes https://github.com/zed-industries/zed/issues/19732 Release Notes: - Fixed outline panel panicking on filtering ([#19732](https://github.com/zed-industries/zed/issues/19732)) --- crates/outline_panel/src/outline_panel.rs | 313 +++++++++------------- 1 file changed, 132 insertions(+), 181 deletions(-) diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 25dd5cba8dcbfa091b0f5dfdddd9ed822d6e1306..72b97c8f69ea797037f3104619d6f9323c2b0394 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -2825,7 +2825,6 @@ impl OutlinePanel { cx.spawn(|outline_panel, mut cx| async move { let mut entries = Vec::new(); let mut match_candidates = Vec::new(); - let mut added_contexts = HashSet::default(); let Ok(()) = outline_panel.update(&mut cx, |outline_panel, cx| { let auto_fold_dirs = OutlinePanelSettings::get_global(cx).auto_fold_dirs; @@ -2947,7 +2946,6 @@ impl OutlinePanel { outline_panel.push_entry( &mut entries, &mut match_candidates, - &mut added_contexts, track_matches, new_folded_dirs, folded_depth, @@ -2986,7 +2984,6 @@ impl OutlinePanel { outline_panel.push_entry( &mut entries, &mut match_candidates, - &mut added_contexts, track_matches, PanelEntry::FoldedDirs(worktree_id, folded_dirs), folded_depth, @@ -3012,7 +3009,6 @@ impl OutlinePanel { outline_panel.push_entry( &mut entries, &mut match_candidates, - &mut added_contexts, track_matches, PanelEntry::FoldedDirs(worktree_id, folded_dirs), folded_depth, @@ -3049,7 +3045,6 @@ impl OutlinePanel { outline_panel.push_entry( &mut entries, &mut match_candidates, - &mut added_contexts, track_matches, PanelEntry::Fs(entry.clone()), depth, @@ -3063,7 +3058,6 @@ impl OutlinePanel { outline_panel.add_search_entries( &mut entries, &mut match_candidates, - &mut added_contexts, entry.clone(), depth, query.clone(), @@ -3097,7 +3091,6 @@ impl OutlinePanel { query.as_deref(), &mut entries, &mut match_candidates, - &mut added_contexts, cx, ); } @@ -3113,7 +3106,6 @@ impl OutlinePanel { outline_panel.push_entry( &mut entries, &mut match_candidates, - &mut added_contexts, track_matches, PanelEntry::Fs(entry.clone()), 0, @@ -3132,7 +3124,6 @@ impl OutlinePanel { outline_panel.push_entry( &mut entries, &mut match_candidates, - &mut added_contexts, track_matches, PanelEntry::FoldedDirs(worktree_id, folded_dirs), folded_depth, @@ -3144,22 +3135,10 @@ impl OutlinePanel { return Vec::new(); }; - outline_panel - .update(&mut cx, |outline_panel, _| { - if matches!(outline_panel.mode, ItemsDisplayMode::Search(_)) { - cleanup_fs_entries_without_search_children( - &outline_panel.collapsed_entries, - &mut entries, - &mut match_candidates, - &mut added_contexts, - ); - } - }) - .ok(); - let Some(query) = query else { return entries; }; + let mut matched_ids = match_strings( &match_candidates, &query, @@ -3195,7 +3174,6 @@ impl OutlinePanel { &self, entries: &mut Vec, match_candidates: &mut Vec, - added_contexts: &mut HashSet, track_matches: bool, entry: PanelEntry, depth: usize, @@ -3221,47 +3199,39 @@ impl OutlinePanel { if let Some(file_name) = self.relative_path(fs_entry, cx).as_deref().map(file_name) { - if added_contexts.insert(file_name.clone()) { - match_candidates.push(StringMatchCandidate { - id, - string: file_name.to_string(), - char_bag: file_name.chars().collect(), - }); - } + match_candidates.push(StringMatchCandidate { + id, + string: file_name.to_string(), + char_bag: file_name.chars().collect(), + }); } } PanelEntry::FoldedDirs(worktree_id, entries) => { let dir_names = self.dir_names_string(entries, *worktree_id, cx); { - if added_contexts.insert(dir_names.clone()) { - match_candidates.push(StringMatchCandidate { - id, - string: dir_names.clone(), - char_bag: dir_names.chars().collect(), - }); - } + match_candidates.push(StringMatchCandidate { + id, + string: dir_names.clone(), + char_bag: dir_names.chars().collect(), + }); } } PanelEntry::Outline(outline_entry) => match outline_entry { OutlineEntry::Outline(_, _, outline) => { - if added_contexts.insert(outline.text.clone()) { - match_candidates.push(StringMatchCandidate { - id, - string: outline.text.clone(), - char_bag: outline.text.chars().collect(), - }); - } - } - OutlineEntry::Excerpt(..) => {} - }, - PanelEntry::Search(new_search_entry) => { - if added_contexts.insert(new_search_entry.render_data.context_text.clone()) { match_candidates.push(StringMatchCandidate { id, - char_bag: new_search_entry.render_data.context_text.chars().collect(), - string: new_search_entry.render_data.context_text.clone(), + string: outline.text.clone(), + char_bag: outline.text.chars().collect(), }); } + OutlineEntry::Excerpt(..) => {} + }, + PanelEntry::Search(new_search_entry) => { + match_candidates.push(StringMatchCandidate { + id, + char_bag: new_search_entry.render_data.context_text.chars().collect(), + string: new_search_entry.render_data.context_text.clone(), + }); } } } @@ -3408,7 +3378,6 @@ impl OutlinePanel { query: Option<&str>, entries: &mut Vec, match_candidates: &mut Vec, - added_contexts: &mut HashSet, cx: &mut ViewContext, ) { if let Some(excerpts) = self.excerpts.get(&buffer_id) { @@ -3420,7 +3389,6 @@ impl OutlinePanel { self.push_entry( entries, match_candidates, - added_contexts, track_matches, PanelEntry::Outline(OutlineEntry::Excerpt( buffer_id, @@ -3448,7 +3416,6 @@ impl OutlinePanel { self.push_entry( entries, match_candidates, - added_contexts, track_matches, PanelEntry::Outline(OutlineEntry::Outline( buffer_id, @@ -3468,7 +3435,6 @@ impl OutlinePanel { &mut self, entries: &mut Vec, match_candidates: &mut Vec, - added_contexts: &mut HashSet, parent_entry: FsEntry, parent_depth: usize, filter_query: Option, @@ -3556,7 +3522,6 @@ impl OutlinePanel { self.push_entry( entries, match_candidates, - added_contexts, filter_query.is_some(), PanelEntry::Search(new_search_entry), depth, @@ -3618,131 +3583,6 @@ impl OutlinePanel { } } -fn cleanup_fs_entries_without_search_children( - collapsed_entries: &HashSet, - entries: &mut Vec, - string_match_candidates: &mut Vec, - added_contexts: &mut HashSet, -) { - let mut match_ids_to_remove = BTreeSet::new(); - let mut previous_entry = None::<&PanelEntry>; - for (id, entry) in entries.iter().enumerate().rev() { - let has_search_items = match (previous_entry, &entry.entry) { - (Some(PanelEntry::Outline(_)), _) => unreachable!(), - (_, PanelEntry::Outline(_)) => false, - (_, PanelEntry::Search(_)) => true, - (None, PanelEntry::FoldedDirs(_, _) | PanelEntry::Fs(_)) => false, - ( - Some(PanelEntry::Search(_)), - PanelEntry::FoldedDirs(_, _) | PanelEntry::Fs(FsEntry::Directory(..)), - ) => false, - (Some(PanelEntry::FoldedDirs(..)), PanelEntry::FoldedDirs(..)) => true, - ( - Some(PanelEntry::Search(_)), - PanelEntry::Fs(FsEntry::File(..) | FsEntry::ExternalFile(..)), - ) => true, - ( - Some(PanelEntry::Fs(previous_fs)), - PanelEntry::FoldedDirs(folded_worktree, folded_dirs), - ) => { - let expected_parent = folded_dirs.last().map(|dir_entry| dir_entry.path.as_ref()); - match previous_fs { - FsEntry::ExternalFile(..) => false, - FsEntry::File(file_worktree, file_entry, ..) => { - file_worktree == folded_worktree - && file_entry.path.parent() == expected_parent - } - FsEntry::Directory(directory_wortree, directory_entry) => { - directory_wortree == folded_worktree - && directory_entry.path.parent() == expected_parent - } - } - } - ( - Some(PanelEntry::FoldedDirs(folded_worktree, folded_dirs)), - PanelEntry::Fs(fs_entry), - ) => match fs_entry { - FsEntry::File(..) | FsEntry::ExternalFile(..) => false, - FsEntry::Directory(directory_wortree, maybe_parent_directory) => { - directory_wortree == folded_worktree - && Some(maybe_parent_directory.path.as_ref()) - == folded_dirs - .first() - .and_then(|dir_entry| dir_entry.path.parent()) - } - }, - (Some(PanelEntry::Fs(previous_entry)), PanelEntry::Fs(maybe_parent_entry)) => { - match (previous_entry, maybe_parent_entry) { - (FsEntry::ExternalFile(..), _) | (_, FsEntry::ExternalFile(..)) => false, - (FsEntry::Directory(..) | FsEntry::File(..), FsEntry::File(..)) => false, - ( - FsEntry::Directory(previous_worktree, previous_directory), - FsEntry::Directory(new_worktree, maybe_parent_directory), - ) => { - previous_worktree == new_worktree - && previous_directory.path.parent() - == Some(maybe_parent_directory.path.as_ref()) - } - ( - FsEntry::File(previous_worktree, previous_file, ..), - FsEntry::Directory(new_worktree, maybe_parent_directory), - ) => { - previous_worktree == new_worktree - && previous_file.path.parent() - == Some(maybe_parent_directory.path.as_ref()) - } - } - } - }; - - if has_search_items { - previous_entry = Some(&entry.entry); - } else { - let collapsed_entries_to_check = match &entry.entry { - PanelEntry::FoldedDirs(worktree_id, entries) => entries - .iter() - .map(|entry| CollapsedEntry::Dir(*worktree_id, entry.id)) - .collect(), - PanelEntry::Fs(FsEntry::Directory(worktree_id, entry)) => { - vec![CollapsedEntry::Dir(*worktree_id, entry.id)] - } - PanelEntry::Fs(FsEntry::ExternalFile(buffer_id, _)) => { - vec![CollapsedEntry::ExternalFile(*buffer_id)] - } - PanelEntry::Fs(FsEntry::File(worktree_id, _, buffer_id, _)) => { - vec![CollapsedEntry::File(*worktree_id, *buffer_id)] - } - PanelEntry::Search(_) | PanelEntry::Outline(_) => Vec::new(), - }; - if !collapsed_entries_to_check.is_empty() - && collapsed_entries_to_check - .iter() - .any(|collapsed_entry| collapsed_entries.contains(collapsed_entry)) - { - previous_entry = Some(&entry.entry); - continue; - } - match_ids_to_remove.insert(id); - previous_entry = None; - } - } - - if match_ids_to_remove.is_empty() { - return; - } - - string_match_candidates.retain(|candidate| { - let retain = !match_ids_to_remove.contains(&candidate.id); - if !retain { - added_contexts.remove(&candidate.string); - } - retain - }); - match_ids_to_remove.into_iter().rev().for_each(|id| { - entries.remove(id); - }); -} - fn workspace_active_editor( workspace: &Workspace, cx: &AppContext, @@ -4374,6 +4214,117 @@ mod tests { }); } + #[gpui::test] + async fn test_item_filtering(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.background_executor.clone()); + populate_with_test_ra_project(&fs, "/rust-analyzer").await; + let project = Project::test(fs.clone(), ["/rust-analyzer".as_ref()], cx).await; + project.read_with(cx, |project, _| { + project.languages().add(Arc::new(rust_lang())) + }); + let workspace = add_outline_panel(&project, cx).await; + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let outline_panel = outline_panel(&workspace, cx); + outline_panel.update(cx, |outline_panel, cx| outline_panel.set_active(true, cx)); + + workspace + .update(cx, |workspace, cx| { + ProjectSearchView::deploy_search(workspace, &workspace::DeploySearch::default(), cx) + }) + .unwrap(); + let search_view = workspace + .update(cx, |workspace, cx| { + workspace + .active_pane() + .read(cx) + .items() + .find_map(|item| item.downcast::()) + .expect("Project search view expected to appear after new search event trigger") + }) + .unwrap(); + + let query = "param_names_for_lifetime_elision_hints"; + perform_project_search(&search_view, query, cx); + search_view.update(cx, |search_view, cx| { + search_view + .results_editor() + .update(cx, |results_editor, cx| { + assert_eq!( + results_editor.display_text(cx).match_indices(query).count(), + 9 + ); + }); + }); + let all_matches = r#"/ + crates/ + ide/src/ + inlay_hints/ + fn_lifetime_fn.rs + search: match config.param_names_for_lifetime_elision_hints { + search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints { + search: Some(it) if config.param_names_for_lifetime_elision_hints => { + search: InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG }, + inlay_hints.rs + search: pub param_names_for_lifetime_elision_hints: bool, + search: param_names_for_lifetime_elision_hints: self + static_index.rs + search: param_names_for_lifetime_elision_hints: false, + rust-analyzer/src/ + cli/ + analysis_stats.rs + search: param_names_for_lifetime_elision_hints: true, + config.rs + search: param_names_for_lifetime_elision_hints: self"#; + + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, _| { + assert_eq!( + display_entries(&outline_panel.cached_entries, None,), + all_matches, + ); + }); + + let filter_text = "a"; + outline_panel.update(cx, |outline_panel, cx| { + outline_panel.filter_editor.update(cx, |filter_editor, cx| { + filter_editor.set_text(filter_text, cx); + }); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + + outline_panel.update(cx, |outline_panel, _| { + assert_eq!( + display_entries(&outline_panel.cached_entries, None), + all_matches + .lines() + .filter(|item| item.contains(filter_text)) + .collect::>() + .join("\n"), + ); + }); + + outline_panel.update(cx, |outline_panel, cx| { + outline_panel.filter_editor.update(cx, |filter_editor, cx| { + filter_editor.set_text("", cx); + }); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, _| { + assert_eq!( + display_entries(&outline_panel.cached_entries, None,), + all_matches, + ); + }); + } + #[gpui::test] async fn test_frontend_repo_structure(cx: &mut TestAppContext) { init_test(cx); From ffe36c9beb40caf128535a536619b12e6153aa6a Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Sun, 27 Oct 2024 19:44:21 -0700 Subject: [PATCH 58/76] Remove hosted projects (#19754) Release Notes: - N/A --- crates/channel/src/channel_store.rs | 76 +---------------- crates/collab/src/db.rs | 1 - crates/collab/src/db/queries.rs | 1 - crates/collab/src/db/queries/channels.rs | 5 -- .../collab/src/db/queries/hosted_projects.rs | 85 ------------------- crates/collab/src/db/queries/projects.rs | 34 -------- crates/collab/src/db/tables.rs | 1 - crates/collab/src/db/tables/hosted_project.rs | 27 ------ crates/collab/src/db/tables/project.rs | 15 +--- crates/collab/src/rpc.rs | 30 ------- crates/collab_ui/src/collab_panel.rs | 60 +------------ crates/project/src/project.rs | 53 +----------- crates/proto/proto/zed.proto | 23 +---- crates/proto/src/proto.rs | 2 - crates/workspace/src/workspace.rs | 54 +----------- 15 files changed, 8 insertions(+), 459 deletions(-) delete mode 100644 crates/collab/src/db/queries/hosted_projects.rs delete mode 100644 crates/collab/src/db/tables/hosted_project.rs diff --git a/crates/channel/src/channel_store.rs b/crates/channel/src/channel_store.rs index fc5b12cfae1c39b26896126a516ba5a2ea7997f3..d627d8fe15a988fb85e0434be00189d7c1eaa804 100644 --- a/crates/channel/src/channel_store.rs +++ b/crates/channel/src/channel_store.rs @@ -3,7 +3,7 @@ mod channel_index; use crate::{channel_buffer::ChannelBuffer, channel_chat::ChannelChat, ChannelMessage}; use anyhow::{anyhow, Result}; use channel_index::ChannelIndex; -use client::{ChannelId, Client, ClientSettings, ProjectId, Subscription, User, UserId, UserStore}; +use client::{ChannelId, Client, ClientSettings, Subscription, User, UserId, UserStore}; use collections::{hash_map, HashMap, HashSet}; use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt}; use gpui::{ @@ -33,30 +33,11 @@ struct NotesVersion { version: clock::Global, } -#[derive(Debug, Clone)] -pub struct HostedProject { - project_id: ProjectId, - channel_id: ChannelId, - name: SharedString, - _visibility: proto::ChannelVisibility, -} -impl From for HostedProject { - fn from(project: proto::HostedProject) -> Self { - Self { - project_id: ProjectId(project.project_id), - channel_id: ChannelId(project.channel_id), - _visibility: project.visibility(), - name: project.name.into(), - } - } -} pub struct ChannelStore { pub channel_index: ChannelIndex, channel_invitations: Vec>, channel_participants: HashMap>>, channel_states: HashMap, - hosted_projects: HashMap, - outgoing_invites: HashSet<(ChannelId, UserId)>, update_channels_tx: mpsc::UnboundedSender, opened_buffers: HashMap>, @@ -85,7 +66,6 @@ pub struct ChannelState { observed_notes_version: NotesVersion, observed_chat_message: Option, role: Option, - projects: HashSet, } impl Channel { @@ -216,7 +196,6 @@ impl ChannelStore { channel_invitations: Vec::default(), channel_index: ChannelIndex::default(), channel_participants: Default::default(), - hosted_projects: Default::default(), outgoing_invites: Default::default(), opened_buffers: Default::default(), opened_chats: Default::default(), @@ -316,19 +295,6 @@ impl ChannelStore { self.channel_index.by_id().get(&channel_id) } - pub fn projects_for_id(&self, channel_id: ChannelId) -> Vec<(SharedString, ProjectId)> { - let mut projects: Vec<(SharedString, ProjectId)> = self - .channel_states - .get(&channel_id) - .map(|state| state.projects.clone()) - .unwrap_or_default() - .into_iter() - .flat_map(|id| Some((self.hosted_projects.get(&id)?.name.clone(), id))) - .collect(); - projects.sort(); - projects - } - pub fn has_open_channel_buffer(&self, channel_id: ChannelId, _cx: &AppContext) -> bool { if let Some(buffer) = self.opened_buffers.get(&channel_id) { if let OpenedModelHandle::Open(buffer) = buffer { @@ -1102,9 +1068,7 @@ impl ChannelStore { let channels_changed = !payload.channels.is_empty() || !payload.delete_channels.is_empty() || !payload.latest_channel_message_ids.is_empty() - || !payload.latest_channel_buffer_versions.is_empty() - || !payload.hosted_projects.is_empty() - || !payload.deleted_hosted_projects.is_empty(); + || !payload.latest_channel_buffer_versions.is_empty(); if channels_changed { if !payload.delete_channels.is_empty() { @@ -1161,34 +1125,6 @@ impl ChannelStore { .or_default() .update_latest_message_id(latest_channel_message.message_id); } - - for hosted_project in payload.hosted_projects { - let hosted_project: HostedProject = hosted_project.into(); - if let Some(old_project) = self - .hosted_projects - .insert(hosted_project.project_id, hosted_project.clone()) - { - self.channel_states - .entry(old_project.channel_id) - .or_default() - .remove_hosted_project(old_project.project_id); - } - self.channel_states - .entry(hosted_project.channel_id) - .or_default() - .add_hosted_project(hosted_project.project_id); - } - - for hosted_project_id in payload.deleted_hosted_projects { - let hosted_project_id = ProjectId(hosted_project_id); - - if let Some(old_project) = self.hosted_projects.remove(&hosted_project_id) { - self.channel_states - .entry(old_project.channel_id) - .or_default() - .remove_hosted_project(old_project.project_id); - } - } } cx.notify(); @@ -1295,12 +1231,4 @@ impl ChannelState { }; } } - - fn add_hosted_project(&mut self, project_id: ProjectId) { - self.projects.insert(project_id); - } - - fn remove_hosted_project(&mut self, project_id: ProjectId) { - self.projects.remove(&project_id); - } } diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 9c02e0c801c826ef160d91448aa87389488e122b..ef85f91fe15a7aca1b38202c069af6b1f4d276ee 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -617,7 +617,6 @@ pub struct ChannelsForUser { pub channels: Vec, pub channel_memberships: Vec, pub channel_participants: HashMap>, - pub hosted_projects: Vec, pub invited_channels: Vec, pub observed_buffer_versions: Vec, diff --git a/crates/collab/src/db/queries.rs b/crates/collab/src/db/queries.rs index 79523444ab276063c4c73a76f72e76cc3605c315..bfcd111e3f48616de35b13d47d6e7e3c530f7a3a 100644 --- a/crates/collab/src/db/queries.rs +++ b/crates/collab/src/db/queries.rs @@ -10,7 +10,6 @@ pub mod contacts; pub mod contributors; pub mod embeddings; pub mod extensions; -pub mod hosted_projects; pub mod messages; pub mod notifications; pub mod processed_stripe_events; diff --git a/crates/collab/src/db/queries/channels.rs b/crates/collab/src/db/queries/channels.rs index f9da0187fec7a098adc292768617b7d14080bd9c..10120ea8143010b46c25c7ec6eb13f6ac6f9f260 100644 --- a/crates/collab/src/db/queries/channels.rs +++ b/crates/collab/src/db/queries/channels.rs @@ -615,15 +615,10 @@ impl Database { .observed_channel_messages(&channel_ids, user_id, tx) .await?; - let hosted_projects = self - .get_hosted_projects(&channel_ids, &roles_by_channel_id, tx) - .await?; - Ok(ChannelsForUser { channel_memberships, channels, invited_channels, - hosted_projects, channel_participants, latest_buffer_versions, latest_channel_messages, diff --git a/crates/collab/src/db/queries/hosted_projects.rs b/crates/collab/src/db/queries/hosted_projects.rs deleted file mode 100644 index eb38eaa9ccac9bd239c19e1c477a7adce5aeb3b9..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/queries/hosted_projects.rs +++ /dev/null @@ -1,85 +0,0 @@ -use rpc::{proto, ErrorCode}; - -use super::*; - -impl Database { - pub async fn get_hosted_projects( - &self, - channel_ids: &[ChannelId], - roles: &HashMap, - tx: &DatabaseTransaction, - ) -> Result> { - let projects = hosted_project::Entity::find() - .find_also_related(project::Entity) - .filter(hosted_project::Column::ChannelId.is_in(channel_ids.iter().map(|id| id.0))) - .all(tx) - .await? - .into_iter() - .flat_map(|(hosted_project, project)| { - if hosted_project.deleted_at.is_some() { - return None; - } - match hosted_project.visibility { - ChannelVisibility::Public => {} - ChannelVisibility::Members => { - let is_visible = roles - .get(&hosted_project.channel_id) - .map(|role| role.can_see_all_descendants()) - .unwrap_or(false); - if !is_visible { - return None; - } - } - }; - Some(proto::HostedProject { - project_id: project?.id.to_proto(), - channel_id: hosted_project.channel_id.to_proto(), - name: hosted_project.name.clone(), - visibility: hosted_project.visibility.into(), - }) - }) - .collect(); - - Ok(projects) - } - - pub async fn get_hosted_project( - &self, - hosted_project_id: HostedProjectId, - user_id: UserId, - tx: &DatabaseTransaction, - ) -> Result<(hosted_project::Model, ChannelRole)> { - let project = hosted_project::Entity::find_by_id(hosted_project_id) - .one(tx) - .await? - .ok_or_else(|| anyhow!(ErrorCode::NoSuchProject))?; - let channel = channel::Entity::find_by_id(project.channel_id) - .one(tx) - .await? - .ok_or_else(|| anyhow!(ErrorCode::NoSuchChannel))?; - - let role = match project.visibility { - ChannelVisibility::Public => { - self.check_user_is_channel_participant(&channel, user_id, tx) - .await? - } - ChannelVisibility::Members => { - self.check_user_is_channel_member(&channel, user_id, tx) - .await? - } - }; - - Ok((project, role)) - } - - pub async fn is_hosted_project(&self, project_id: ProjectId) -> Result { - self.transaction(|tx| async move { - Ok(project::Entity::find_by_id(project_id) - .one(&*tx) - .await? - .map(|project| project.hosted_project_id.is_some()) - .ok_or_else(|| anyhow!(ErrorCode::NoSuchProject))?) - }) - .await - } -} diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index 27bec21ca1cddd27b2e0056f6a21c34098fc78bc..9ea42dd9bfb812e29f7f8bcef1a89b6cd5877511 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -68,7 +68,6 @@ impl Database { connection.owner_id as i32, ))), id: ActiveValue::NotSet, - hosted_project_id: ActiveValue::Set(None), } .insert(&*tx) .await?; @@ -536,39 +535,6 @@ impl Database { .await } - /// Adds the given connection to the specified hosted project - pub async fn join_hosted_project( - &self, - id: ProjectId, - user_id: UserId, - connection: ConnectionId, - ) -> Result<(Project, ReplicaId)> { - self.transaction(|tx| async move { - let (project, hosted_project) = project::Entity::find_by_id(id) - .find_also_related(hosted_project::Entity) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("hosted project is no longer shared"))?; - - let Some(hosted_project) = hosted_project else { - return Err(anyhow!("project is not hosted"))?; - }; - - let channel = channel::Entity::find_by_id(hosted_project.channel_id) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("no such channel"))?; - - let role = self - .check_user_is_channel_participant(&channel, user_id, &tx) - .await?; - - self.join_project_internal(project, user_id, connection, role, &tx) - .await - }) - .await - } - pub async fn get_project(&self, id: ProjectId) -> Result { self.transaction(|tx| async move { Ok(project::Entity::find_by_id(id) diff --git a/crates/collab/src/db/tables.rs b/crates/collab/src/db/tables.rs index 23dced800b56ba36f73ffbf039cd934536fbbc80..8a4ec29998ac8693186d22c0745c8277caa62502 100644 --- a/crates/collab/src/db/tables.rs +++ b/crates/collab/src/db/tables.rs @@ -18,7 +18,6 @@ pub mod extension; pub mod extension_version; pub mod feature_flag; pub mod follower; -pub mod hosted_project; pub mod language_server; pub mod notification; pub mod notification_kind; diff --git a/crates/collab/src/db/tables/hosted_project.rs b/crates/collab/src/db/tables/hosted_project.rs deleted file mode 100644 index dd7cb1b5b107f90d9158afc47051da88aab033e8..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/tables/hosted_project.rs +++ /dev/null @@ -1,27 +0,0 @@ -use crate::db::{ChannelId, ChannelVisibility, HostedProjectId}; -use sea_orm::entity::prelude::*; - -#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] -#[sea_orm(table_name = "hosted_projects")] -pub struct Model { - #[sea_orm(primary_key)] - pub id: HostedProjectId, - pub channel_id: ChannelId, - pub name: String, - pub visibility: ChannelVisibility, - pub deleted_at: Option, -} - -impl ActiveModelBehavior for ActiveModel {} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm(has_one = "super::project::Entity")] - Project, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::Project.def() - } -} diff --git a/crates/collab/src/db/tables/project.rs b/crates/collab/src/db/tables/project.rs index a357634aff614ccb6d4377c6ff1a42522e862a5d..10e3da50e1dd09932913bf45c0792decf871de50 100644 --- a/crates/collab/src/db/tables/project.rs +++ b/crates/collab/src/db/tables/project.rs @@ -1,4 +1,4 @@ -use crate::db::{HostedProjectId, ProjectId, Result, RoomId, ServerId, UserId}; +use crate::db::{ProjectId, Result, RoomId, ServerId, UserId}; use anyhow::anyhow; use rpc::ConnectionId; use sea_orm::entity::prelude::*; @@ -12,7 +12,6 @@ pub struct Model { pub host_user_id: Option, pub host_connection_id: Option, pub host_connection_server_id: Option, - pub hosted_project_id: Option, } impl Model { @@ -50,12 +49,6 @@ pub enum Relation { Collaborators, #[sea_orm(has_many = "super::language_server::Entity")] LanguageServers, - #[sea_orm( - belongs_to = "super::hosted_project::Entity", - from = "Column::HostedProjectId", - to = "super::hosted_project::Column::Id" - )] - HostedProject, } impl Related for Entity { @@ -88,10 +81,4 @@ impl Related for Entity { } } -impl Related for Entity { - fn to() -> RelationDef { - Relation::HostedProject.def() - } -} - impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index d091f04326872374a7fe52c63242cee0d5cb4e55..0b90bfa0c9e2c458bfb34a39d7c57b1c34aac86a 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -287,7 +287,6 @@ impl Server { .add_request_handler(share_project) .add_message_handler(unshare_project) .add_request_handler(join_project) - .add_request_handler(join_hosted_project) .add_message_handler(leave_project) .add_request_handler(update_project) .add_request_handler(update_worktree) @@ -1795,11 +1794,6 @@ impl JoinProjectInternalResponse for Response { Response::::send(self, result) } } -impl JoinProjectInternalResponse for Response { - fn send(self, result: proto::JoinProjectResponse) -> Result<()> { - Response::::send(self, result) - } -} fn join_project_internal( response: impl JoinProjectInternalResponse, @@ -1923,11 +1917,6 @@ async fn leave_project(request: proto::LeaveProject, session: Session) -> Result let sender_id = session.connection_id; let project_id = ProjectId::from_proto(request.project_id); let db = session.db().await; - if db.is_hosted_project(project_id).await? { - let project = db.leave_hosted_project(project_id, sender_id).await?; - project_left(&project, &session); - return Ok(()); - } let (room, project) = &*db.leave_project(project_id, sender_id).await?; tracing::info!( @@ -1943,24 +1932,6 @@ async fn leave_project(request: proto::LeaveProject, session: Session) -> Result Ok(()) } -async fn join_hosted_project( - request: proto::JoinHostedProject, - response: Response, - session: Session, -) -> Result<()> { - let (mut project, replica_id) = session - .db() - .await - .join_hosted_project( - ProjectId(request.project_id as i32), - session.user_id(), - session.connection_id, - ) - .await?; - - join_project_internal(response, session, &mut project, &replica_id) -} - /// Updates other participants with changes to the project async fn update_project( request: proto::UpdateProject, @@ -4202,7 +4173,6 @@ fn build_channels_update(channels: ChannelsForUser) -> proto::UpdateChannels { update.channel_invitations.push(channel.to_proto()); } - update.hosted_projects = channels.hosted_projects; update } diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 59f83e06548a6b48af52f563d318773e05808fec..f188aaf921af0d4dc8e6f87f83bcc7c084e9cd48 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -5,7 +5,7 @@ use self::channel_modal::ChannelModal; use crate::{channel_view::ChannelView, chat_panel::ChatPanel, CollaborationPanelSettings}; use call::ActiveCall; use channel::{Channel, ChannelEvent, ChannelStore}; -use client::{ChannelId, Client, Contact, ProjectId, User, UserStore}; +use client::{ChannelId, Client, Contact, User, UserStore}; use contact_finder::ContactFinder; use db::kvp::KEY_VALUE_STORE; use editor::{Editor, EditorElement, EditorStyle}; @@ -182,10 +182,6 @@ enum ListEntry { ChannelEditor { depth: usize, }, - HostedProject { - id: ProjectId, - name: SharedString, - }, Contact { contact: Arc, calling: bool, @@ -566,7 +562,6 @@ impl CollabPanel { } } - let hosted_projects = channel_store.projects_for_id(channel.id); let has_children = channel_store .channel_at_index(mat.candidate_id + 1) .map_or(false, |next_channel| { @@ -600,10 +595,6 @@ impl CollabPanel { }); } } - - for (name, id) in hosted_projects { - self.entries.push(ListEntry::HostedProject { id, name }); - } } } @@ -1029,40 +1020,6 @@ impl CollabPanel { .tooltip(move |cx| Tooltip::text("Open Chat", cx)) } - fn render_channel_project( - &self, - id: ProjectId, - name: &SharedString, - is_selected: bool, - cx: &mut ViewContext, - ) -> impl IntoElement { - ListItem::new(ElementId::NamedInteger( - "channel-project".into(), - id.0 as usize, - )) - .indent_level(2) - .indent_step_size(px(20.)) - .selected(is_selected) - .on_click(cx.listener(move |this, _, cx| { - if let Some(workspace) = this.workspace.upgrade() { - let app_state = workspace.read(cx).app_state().clone(); - workspace::join_hosted_project(id, app_state, cx).detach_and_prompt_err( - "Failed to open project", - cx, - |_, _| None, - ) - } - })) - .start_slot( - h_flex() - .relative() - .gap_1() - .child(IconButton::new(0, IconName::FileTree)), - ) - .child(Label::new(name.clone())) - .tooltip(move |cx| Tooltip::text("Open Project", cx)) - } - fn has_subchannels(&self, ix: usize) -> bool { self.entries.get(ix).map_or(false, |entry| { if let ListEntry::Channel { has_children, .. } = entry { @@ -1538,12 +1495,6 @@ impl CollabPanel { ListEntry::ChannelChat { channel_id } => { self.join_channel_chat(*channel_id, cx) } - ListEntry::HostedProject { - id: _id, - name: _name, - } => { - // todo() - } ListEntry::OutgoingRequest(_) => {} ListEntry::ChannelEditor { .. } => {} } @@ -2157,10 +2108,6 @@ impl CollabPanel { ListEntry::ChannelChat { channel_id } => self .render_channel_chat(*channel_id, is_selected, cx) .into_any_element(), - - ListEntry::HostedProject { id, name } => self - .render_channel_project(*id, name, is_selected, cx) - .into_any_element(), } } @@ -2898,11 +2845,6 @@ impl PartialEq for ListEntry { return channel_1.id == channel_2.id; } } - ListEntry::HostedProject { id, .. } => { - if let ListEntry::HostedProject { id: other_id, .. } = other { - return id == other_id; - } - } ListEntry::ChannelNotes { channel_id } => { if let ListEntry::ChannelNotes { channel_id: other_id, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index b2fc8c53041399846c5935fe3cfc17b9d1b857d8..49f4b7c6f3b23c6230a98fe4f87983ad68336c18 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -24,9 +24,7 @@ mod yarn; use anyhow::{anyhow, Context as _, Result}; use buffer_store::{BufferStore, BufferStoreEvent}; -use client::{ - proto, Client, Collaborator, PendingEntitySubscription, ProjectId, TypedEnvelope, UserStore, -}; +use client::{proto, Client, Collaborator, PendingEntitySubscription, TypedEnvelope, UserStore}; use clock::ReplicaId; use collections::{BTreeSet, HashMap, HashSet}; use debounced_delay::DebouncedDelay; @@ -154,7 +152,6 @@ pub struct Project { remotely_created_models: Arc>, terminals: Terminals, node: Option, - hosted_project_id: Option, search_history: SearchHistory, search_included_history: SearchHistory, search_excluded_history: SearchHistory, @@ -678,7 +675,6 @@ impl Project { local_handles: Vec::new(), }, node: Some(node), - hosted_project_id: None, search_history: Self::new_search_history(), environment, remotely_created_models: Default::default(), @@ -796,7 +792,6 @@ impl Project { local_handles: Vec::new(), }, node: Some(node), - hosted_project_id: None, search_history: Self::new_search_history(), environment, remotely_created_models: Default::default(), @@ -993,7 +988,6 @@ impl Project { local_handles: Vec::new(), }, node: None, - hosted_project_id: None, search_history: Self::new_search_history(), search_included_history: Self::new_search_history(), search_excluded_history: Self::new_search_history(), @@ -1045,47 +1039,6 @@ impl Project { Ok(this) } - pub async fn hosted( - remote_id: ProjectId, - user_store: Model, - client: Arc, - languages: Arc, - fs: Arc, - cx: AsyncAppContext, - ) -> Result> { - client.authenticate_and_connect(true, &cx).await?; - - let subscriptions = [ - EntitySubscription::Project(client.subscribe_to_entity::(remote_id.0)?), - EntitySubscription::BufferStore( - client.subscribe_to_entity::(remote_id.0)?, - ), - EntitySubscription::WorktreeStore( - client.subscribe_to_entity::(remote_id.0)?, - ), - EntitySubscription::LspStore(client.subscribe_to_entity::(remote_id.0)?), - EntitySubscription::SettingsObserver( - client.subscribe_to_entity::(remote_id.0)?, - ), - ]; - let response = client - .request_envelope(proto::JoinHostedProject { - project_id: remote_id.0, - }) - .await?; - Self::from_join_project_response( - response, - subscriptions, - client, - true, - user_store, - languages, - fs, - cx, - ) - .await - } - fn new_search_history() -> SearchHistory { SearchHistory::new( Some(MAX_PROJECT_SEARCH_HISTORY_SIZE), @@ -1290,10 +1243,6 @@ impl Project { } } - pub fn hosted_project_id(&self) -> Option { - self.hosted_project_id - } - pub fn supports_terminal(&self, _cx: &AppContext) -> bool { if self.is_local() { return true; diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index c61a14cdbfdf355d34780ea95a581a5c9d2c522c..53aaa6ef6d73abc1b6368890c5eb133733977e16 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -196,8 +196,6 @@ message Envelope { GetImplementation get_implementation = 162; GetImplementationResponse get_implementation_response = 163; - JoinHostedProject join_hosted_project = 164; - CountLanguageModelTokens count_language_model_tokens = 230; CountLanguageModelTokensResponse count_language_model_tokens_response = 231; GetCachedEmbeddings get_cached_embeddings = 189; @@ -292,6 +290,7 @@ message Envelope { reserved 87 to 88; reserved 158 to 161; + reserved 164; reserved 166 to 169; reserved 177 to 185; reserved 188; @@ -523,11 +522,6 @@ message JoinProject { uint64 project_id = 1; } -message JoinHostedProject { - uint64 project_id = 1; -} - - message ListRemoteDirectory { uint64 dev_server_id = 1; string path = 2; @@ -1294,13 +1288,7 @@ message UpdateChannels { repeated ChannelMessageId latest_channel_message_ids = 8; repeated ChannelBufferVersion latest_channel_buffer_versions = 9; - repeated HostedProject hosted_projects = 10; - repeated uint64 deleted_hosted_projects = 11; - - reserved 12; - reserved 13; - reserved 14; - reserved 15; + reserved 10 to 15; } message UpdateUserChannels { @@ -1329,13 +1317,6 @@ message ChannelParticipants { repeated uint64 participant_user_ids = 2; } -message HostedProject { - uint64 project_id = 1; - uint64 channel_id = 2; - string name = 3; - ChannelVisibility visibility = 4; -} - message JoinChannel { uint64 channel_id = 1; } diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 3807e04bd56d013e9e418bbe47399b4c6a9c8aac..a7140cc7ed5d12245b89469689b842c711574f75 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -228,7 +228,6 @@ messages!( (JoinChannelChat, Foreground), (JoinChannelChatResponse, Foreground), (JoinProject, Foreground), - (JoinHostedProject, Foreground), (JoinProjectResponse, Foreground), (JoinRoom, Foreground), (JoinRoomResponse, Foreground), @@ -411,7 +410,6 @@ request_messages!( (JoinChannel, JoinRoomResponse), (JoinChannelBuffer, JoinChannelBufferResponse), (JoinChannelChat, JoinChannelChatResponse), - (JoinHostedProject, JoinProjectResponse), (JoinProject, JoinProjectResponse), (JoinRoom, JoinRoomResponse), (LeaveChannelBuffer, Ack), diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index a81174020b0ed0a947952b542d01e80ad20ca657..b92417b2936c31df71ef5e02b7bc83d3b1a3350c 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -16,7 +16,7 @@ use anyhow::{anyhow, Context as _, Result}; use call::{call_settings::CallSettings, ActiveCall}; use client::{ proto::{self, ErrorCode, PanelId, PeerId}, - ChannelId, Client, ErrorExt, ProjectId, Status, TypedEnvelope, UserStore, + ChannelId, Client, ErrorExt, Status, TypedEnvelope, UserStore, }; use collections::{hash_map, HashMap, HashSet}; use derive_more::{Deref, DerefMut}; @@ -5469,58 +5469,6 @@ pub fn create_and_open_local_file( }) } -pub fn join_hosted_project( - hosted_project_id: ProjectId, - app_state: Arc, - cx: &mut AppContext, -) -> Task> { - cx.spawn(|mut cx| async move { - let existing_window = cx.update(|cx| { - cx.windows().into_iter().find_map(|window| { - let workspace = window.downcast::()?; - workspace - .read(cx) - .is_ok_and(|workspace| { - workspace.project().read(cx).hosted_project_id() == Some(hosted_project_id) - }) - .then_some(workspace) - }) - })?; - - let workspace = if let Some(existing_window) = existing_window { - existing_window - } else { - let project = Project::hosted( - hosted_project_id, - app_state.user_store.clone(), - app_state.client.clone(), - app_state.languages.clone(), - app_state.fs.clone(), - cx.clone(), - ) - .await?; - - let window_bounds_override = window_bounds_env_override(); - cx.update(|cx| { - let mut options = (app_state.build_window_options)(None, cx); - options.window_bounds = window_bounds_override.map(WindowBounds::Windowed); - cx.open_window(options, |cx| { - cx.new_view(|cx| { - Workspace::new(Default::default(), project, app_state.clone(), cx) - }) - }) - })?? - }; - - workspace.update(&mut cx, |_, cx| { - cx.activate(true); - cx.activate_window(); - })?; - - Ok(()) - }) -} - pub fn open_ssh_project( window: WindowHandle, connection_options: SshConnectionOptions, From e86b096b92a01361b96267c58825aca92fd02477 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 28 Oct 2024 09:45:19 +0100 Subject: [PATCH 59/76] docs: Add `indent_guides` setting to project panel docs (#19819) Follow up to #18260 Release Notes: - N/A --- docs/src/configuring-zed.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 78c7c62c941db7c30898c292d2e4b82f9ca20204..784cb631ca275214380808302d212056aeab29f1 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -2042,6 +2042,7 @@ Run the `theme selector: toggle` action in the command palette to see a current "folder_icons": true, "git_status": true, "indent_size": 20, + "indent_guides": true, "auto_reveal_entries": true, "auto_fold_dirs": true, "scrollbar": { @@ -2163,6 +2164,12 @@ Run the `theme selector: toggle` action in the command palette to see a current - Setting: `indent_size` - Default: `20` +### Indent Guides + +- Description: Whether to show indent guides in the project panel. +- Setting: `indent_guides` +- Default: `true` + ### Scrollbar - Description: Scrollbar related settings. Possible values: null, "auto", "system", "always", "never". Inherits editor settings when absent, see its description for more details. From 888fec9299b4e36c37f6f6144345def1b4e43425 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 28 Oct 2024 09:54:18 +0100 Subject: [PATCH 60/76] outline panel: Add indent guides (#19719) See #12673 | File | Search | |--------|--------| | image | image | Release Notes: - Added indent guides to the outline panel --- Cargo.lock | 2 + assets/settings/default.json | 2 + crates/gpui/src/elements/uniform_list.rs | 2 + crates/outline_panel/Cargo.toml | 2 + crates/outline_panel/src/outline_panel.rs | 122 +++++++++++- .../src/outline_panel_settings.rs | 5 + crates/ui/src/components/indent_guides.rs | 176 +++++++++++------- docs/src/configuring-zed.md | 1 + 8 files changed, 231 insertions(+), 81 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7c81f692ee1fe1b90b13f44475585bdb7142e23b..91b76f33e8d8f715a3aefc9d6bc64f4cfc62b00f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7728,8 +7728,10 @@ dependencies = [ "serde", "serde_json", "settings", + "smallvec", "smol", "theme", + "ui", "util", "workspace", "worktree", diff --git a/assets/settings/default.json b/assets/settings/default.json index 32f46ce714379157dfb54ae06d6d507514421b16..cd4e3db15c2f6742510aa8f07d68b26dd602ff3f 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -388,6 +388,8 @@ "git_status": true, // Amount of indentation for nested items. "indent_size": 20, + // Whether to show indent guides in the outline panel. + "indent_guides": true, // Whether to reveal it in the outline panel automatically, // when a corresponding outline entry becomes active. // Gitignored entries are never auto revealed. diff --git a/crates/gpui/src/elements/uniform_list.rs b/crates/gpui/src/elements/uniform_list.rs index 9ce85aab232cd7407ced619ba1f3feecdf73c9e6..2379ee9f8123e726ffb01e77858bb4feea5e92ea 100644 --- a/crates/gpui/src/elements/uniform_list.rs +++ b/crates/gpui/src/elements/uniform_list.rs @@ -340,6 +340,7 @@ impl Element for UniformList { visible_range.clone(), bounds, item_height, + self.item_count, cx, ); let available_space = size( @@ -396,6 +397,7 @@ pub trait UniformListDecoration { visible_range: Range, bounds: Bounds, item_height: Pixels, + item_count: usize, cx: &mut WindowContext, ) -> AnyElement; } diff --git a/crates/outline_panel/Cargo.toml b/crates/outline_panel/Cargo.toml index 824ea70735d9ca58e3904d447de919c7857c12eb..be7653db685e969e7183a023ddba63afbbbf5754 100644 --- a/crates/outline_panel/Cargo.toml +++ b/crates/outline_panel/Cargo.toml @@ -30,8 +30,10 @@ search.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true +smallvec.workspace = true smol.workspace = true theme.workspace = true +ui.workspace = true util.workspace = true worktree.workspace = true workspace.workspace = true diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 72b97c8f69ea797037f3104619d6f9323c2b0394..6def76bb38d50eb16ff664478035b0cb1cdc3d94 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -24,12 +24,12 @@ use editor::{ use file_icons::FileIcons; use fuzzy::{match_strings, StringMatch, StringMatchCandidate}; use gpui::{ - actions, anchored, deferred, div, impl_actions, px, uniform_list, Action, AnyElement, - AppContext, AssetSource, AsyncWindowContext, ClipboardItem, DismissEvent, Div, ElementId, - EventEmitter, FocusHandle, FocusableView, HighlightStyle, InteractiveElement, IntoElement, - KeyContext, Model, MouseButton, MouseDownEvent, ParentElement, Pixels, Point, Render, - SharedString, Stateful, Styled, Subscription, Task, UniformListScrollHandle, View, ViewContext, - VisualContext, WeakView, WindowContext, + actions, anchored, deferred, div, impl_actions, point, px, size, uniform_list, Action, + AnyElement, AppContext, AssetSource, AsyncWindowContext, Bounds, ClipboardItem, DismissEvent, + Div, ElementId, EventEmitter, FocusHandle, FocusableView, HighlightStyle, InteractiveElement, + IntoElement, KeyContext, Model, MouseButton, MouseDownEvent, ParentElement, Pixels, Point, + Render, SharedString, Stateful, Styled, Subscription, Task, UniformListScrollHandle, View, + ViewContext, VisualContext, WeakView, WindowContext, }; use itertools::Itertools; use language::{BufferId, BufferSnapshot, OffsetRangeExt, OutlineItem}; @@ -42,6 +42,7 @@ use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsStore}; use smol::channel; use theme::{SyntaxTheme, ThemeSettings}; +use ui::{IndentGuideColors, IndentGuideLayout}; use util::{debug_panic, RangeExt, ResultExt, TryFutureExt}; use workspace::{ dock::{DockPosition, Panel, PanelEvent}, @@ -254,14 +255,14 @@ impl SearchState { #[derive(Debug)] enum SelectedEntry { Invalidated(Option), - Valid(PanelEntry), + Valid(PanelEntry, usize), None, } impl SelectedEntry { fn invalidate(&mut self) { match std::mem::replace(self, SelectedEntry::None) { - Self::Valid(entry) => *self = Self::Invalidated(Some(entry)), + Self::Valid(entry, _) => *self = Self::Invalidated(Some(entry)), Self::None => *self = Self::Invalidated(None), other => *self = other, } @@ -3568,7 +3569,7 @@ impl OutlinePanel { fn selected_entry(&self) -> Option<&PanelEntry> { match &self.selected_entry { SelectedEntry::Invalidated(entry) => entry.as_ref(), - SelectedEntry::Valid(entry) => Some(entry), + SelectedEntry::Valid(entry, _) => Some(entry), SelectedEntry::None => None, } } @@ -3577,7 +3578,16 @@ impl OutlinePanel { if focus { self.focus_handle.focus(cx); } - self.selected_entry = SelectedEntry::Valid(entry); + let ix = self + .cached_entries + .iter() + .enumerate() + .find(|(_, cached_entry)| &cached_entry.entry == &entry) + .map(|(i, _)| i) + .unwrap_or_default(); + + self.selected_entry = SelectedEntry::Valid(entry, ix); + self.autoscroll(cx); cx.notify(); } @@ -3736,6 +3746,9 @@ impl Render for OutlinePanel { let project = self.project.read(cx); let query = self.query(cx); let pinned = self.pinned; + let settings = OutlinePanelSettings::get_global(cx); + let indent_size = settings.indent_size; + let show_indent_guides = settings.indent_guides; let outline_panel = v_flex() .id("outline-panel") @@ -3901,6 +3914,61 @@ impl Render for OutlinePanel { }) .size_full() .track_scroll(self.scroll_handle.clone()) + .when(show_indent_guides, |list| { + list.with_decoration( + ui::indent_guides( + cx.view().clone(), + px(indent_size), + IndentGuideColors::panel(cx), + |outline_panel, range, _| { + let entries = outline_panel.cached_entries.get(range); + if let Some(entries) = entries { + entries.into_iter().map(|item| item.depth).collect() + } else { + smallvec::SmallVec::new() + } + }, + ) + .with_render_fn( + cx.view().clone(), + move |outline_panel, params, _| { + const LEFT_OFFSET: f32 = 14.; + + let indent_size = params.indent_size; + let item_height = params.item_height; + let active_indent_guide_ix = find_active_indent_guide_ix( + outline_panel, + ¶ms.indent_guides, + ); + + params + .indent_guides + .into_iter() + .enumerate() + .map(|(ix, layout)| { + let bounds = Bounds::new( + point( + px(layout.offset.x as f32) * indent_size + + px(LEFT_OFFSET), + px(layout.offset.y as f32) * item_height, + ), + size( + px(1.), + px(layout.length as f32) * item_height, + ), + ); + ui::RenderedIndentGuide { + bounds, + layout, + is_active: active_indent_guide_ix == Some(ix), + hitbox: None, + } + }) + .collect() + }, + ), + ) + }) }) } .children(self.context_menu.as_ref().map(|(menu, position, _)| { @@ -3945,6 +4013,40 @@ impl Render for OutlinePanel { } } +fn find_active_indent_guide_ix( + outline_panel: &OutlinePanel, + candidates: &[IndentGuideLayout], +) -> Option { + let SelectedEntry::Valid(_, target_ix) = &outline_panel.selected_entry else { + return None; + }; + let target_depth = outline_panel + .cached_entries + .get(*target_ix) + .map(|cached_entry| cached_entry.depth)?; + + let (target_ix, target_depth) = if let Some(target_depth) = outline_panel + .cached_entries + .get(target_ix + 1) + .filter(|cached_entry| cached_entry.depth > target_depth) + .map(|entry| entry.depth) + { + (target_ix + 1, target_depth.saturating_sub(1)) + } else { + (*target_ix, target_depth.saturating_sub(1)) + }; + + candidates + .iter() + .enumerate() + .find(|(_, guide)| { + guide.offset.y <= target_ix + && target_ix < guide.offset.y + guide.length + && guide.offset.x == target_depth + }) + .map(|(ix, _)| ix) +} + fn subscribe_for_editor_events( editor: &View, cx: &mut ViewContext, diff --git a/crates/outline_panel/src/outline_panel_settings.rs b/crates/outline_panel/src/outline_panel_settings.rs index e19fc3c008494745d417d2b836d6d603b9f84ffa..e165978fc758efe51c0bc9eacd1d7fa37858b6ca 100644 --- a/crates/outline_panel/src/outline_panel_settings.rs +++ b/crates/outline_panel/src/outline_panel_settings.rs @@ -19,6 +19,7 @@ pub struct OutlinePanelSettings { pub folder_icons: bool, pub git_status: bool, pub indent_size: f32, + pub indent_guides: bool, pub auto_reveal_entries: bool, pub auto_fold_dirs: bool, } @@ -53,6 +54,10 @@ pub struct OutlinePanelSettingsContent { /// /// Default: 20 pub indent_size: Option, + /// Whether to show indent guides in the outline panel. + /// + /// Default: true + pub indent_guides: Option, /// Whether to reveal it in the outline panel automatically, /// when a corresponding project entry becomes active. /// Gitignored entries are never auto revealed. diff --git a/crates/ui/src/components/indent_guides.rs b/crates/ui/src/components/indent_guides.rs index e45404429ce4f92897491b8981e9d19dafbe5d03..caab92053c58b884ec31edb1e8f0949b8b4b3402 100644 --- a/crates/ui/src/components/indent_guides.rs +++ b/crates/ui/src/components/indent_guides.rs @@ -140,13 +140,18 @@ mod uniform_list { visible_range: Range, bounds: Bounds, item_height: Pixels, + item_count: usize, cx: &mut WindowContext, ) -> AnyElement { let mut visible_range = visible_range.clone(); - visible_range.end += 1; + let includes_trailing_indent = visible_range.end < item_count; + // Check if we have entries after the visible range, + // if so extend the visible range so we can fetch a trailing indent, + // which is needed to compute indent guides correctly. + if includes_trailing_indent { + visible_range.end += 1; + } let visible_entries = &(self.compute_indents_fn)(visible_range.clone(), cx); - // Check if we have an additional indent that is outside of the visible range - let includes_trailing_indent = visible_entries.len() == visible_range.len(); let indent_guides = compute_indent_guides( &visible_entries, visible_range.start, @@ -198,8 +203,12 @@ mod uniform_list { on_hovered_indent_guide_click: Option>, } - struct IndentGuidesElementPrepaintState { - hitboxes: SmallVec<[Hitbox; 12]>, + enum IndentGuidesElementPrepaintState { + Static, + Interactive { + hitboxes: Rc>, + on_hovered_indent_guide_click: Rc, + }, } impl Element for IndentGuidesElement { @@ -225,11 +234,21 @@ mod uniform_list { _request_layout: &mut Self::RequestLayoutState, cx: &mut WindowContext, ) -> Self::PrepaintState { - let mut hitboxes = SmallVec::new(); - for guide in self.indent_guides.as_ref().iter() { - hitboxes.push(cx.insert_hitbox(guide.hitbox.unwrap_or(guide.bounds), false)); + if let Some(on_hovered_indent_guide_click) = self.on_hovered_indent_guide_click.clone() + { + let hitboxes = self + .indent_guides + .as_ref() + .iter() + .map(|guide| cx.insert_hitbox(guide.hitbox.unwrap_or(guide.bounds), false)) + .collect(); + Self::PrepaintState::Interactive { + hitboxes: Rc::new(hitboxes), + on_hovered_indent_guide_click, + } + } else { + Self::PrepaintState::Static } - Self::PrepaintState { hitboxes } } fn paint( @@ -240,81 +259,96 @@ mod uniform_list { prepaint: &mut Self::PrepaintState, cx: &mut WindowContext, ) { - let callback = self.on_hovered_indent_guide_click.clone(); - if let Some(callback) = callback { - cx.on_mouse_event({ - let hitboxes = prepaint.hitboxes.clone(); - let indent_guides = self.indent_guides.clone(); - move |event: &MouseDownEvent, phase, cx| { - if phase == DispatchPhase::Bubble && event.button == MouseButton::Left { - let mut active_hitbox_ix = None; - for (i, hitbox) in hitboxes.iter().enumerate() { - if hitbox.is_hovered(cx) { - active_hitbox_ix = Some(i); - break; + match prepaint { + IndentGuidesElementPrepaintState::Static => { + for indent_guide in self.indent_guides.as_ref() { + let fill_color = if indent_guide.is_active { + self.colors.active + } else { + self.colors.default + }; + + cx.paint_quad(fill(indent_guide.bounds, fill_color)); + } + } + IndentGuidesElementPrepaintState::Interactive { + hitboxes, + on_hovered_indent_guide_click, + } => { + cx.on_mouse_event({ + let hitboxes = hitboxes.clone(); + let indent_guides = self.indent_guides.clone(); + let on_hovered_indent_guide_click = on_hovered_indent_guide_click.clone(); + move |event: &MouseDownEvent, phase, cx| { + if phase == DispatchPhase::Bubble && event.button == MouseButton::Left { + let mut active_hitbox_ix = None; + for (i, hitbox) in hitboxes.iter().enumerate() { + if hitbox.is_hovered(cx) { + active_hitbox_ix = Some(i); + break; + } } - } - let Some(active_hitbox_ix) = active_hitbox_ix else { - return; - }; + let Some(active_hitbox_ix) = active_hitbox_ix else { + return; + }; - let active_indent_guide = &indent_guides[active_hitbox_ix].layout; - callback(active_indent_guide, cx); + let active_indent_guide = &indent_guides[active_hitbox_ix].layout; + on_hovered_indent_guide_click(active_indent_guide, cx); - cx.stop_propagation(); - cx.prevent_default(); + cx.stop_propagation(); + cx.prevent_default(); + } } - } - }); - } - - let mut hovered_hitbox_id = None; - for (i, hitbox) in prepaint.hitboxes.iter().enumerate() { - cx.set_cursor_style(gpui::CursorStyle::PointingHand, hitbox); - let indent_guide = &self.indent_guides[i]; - let fill_color = if hitbox.is_hovered(cx) { - hovered_hitbox_id = Some(hitbox.id); - self.colors.hover - } else if indent_guide.is_active { - self.colors.active - } else { - self.colors.default - }; - - cx.paint_quad(fill(indent_guide.bounds, fill_color)); - } - - cx.on_mouse_event({ - let prev_hovered_hitbox_id = hovered_hitbox_id; - let hitboxes = prepaint.hitboxes.clone(); - move |_: &MouseMoveEvent, phase, cx| { + }); let mut hovered_hitbox_id = None; - for hitbox in &hitboxes { - if hitbox.is_hovered(cx) { + for (i, hitbox) in hitboxes.iter().enumerate() { + cx.set_cursor_style(gpui::CursorStyle::PointingHand, hitbox); + let indent_guide = &self.indent_guides[i]; + let fill_color = if hitbox.is_hovered(cx) { hovered_hitbox_id = Some(hitbox.id); - break; - } + self.colors.hover + } else if indent_guide.is_active { + self.colors.active + } else { + self.colors.default + }; + + cx.paint_quad(fill(indent_guide.bounds, fill_color)); } - if phase == DispatchPhase::Capture { - // If the hovered hitbox has changed, we need to re-paint the indent guides. - match (prev_hovered_hitbox_id, hovered_hitbox_id) { - (Some(prev_id), Some(id)) => { - if prev_id != id { - cx.refresh(); + + cx.on_mouse_event({ + let prev_hovered_hitbox_id = hovered_hitbox_id; + let hitboxes = hitboxes.clone(); + move |_: &MouseMoveEvent, phase, cx| { + let mut hovered_hitbox_id = None; + for hitbox in hitboxes.as_ref() { + if hitbox.is_hovered(cx) { + hovered_hitbox_id = Some(hitbox.id); + break; } } - (None, Some(_)) => { - cx.refresh(); - } - (Some(_), None) => { - cx.refresh(); + if phase == DispatchPhase::Capture { + // If the hovered hitbox has changed, we need to re-paint the indent guides. + match (prev_hovered_hitbox_id, hovered_hitbox_id) { + (Some(prev_id), Some(id)) => { + if prev_id != id { + cx.refresh(); + } + } + (None, Some(_)) => { + cx.refresh(); + } + (Some(_), None) => { + cx.refresh(); + } + (None, None) => {} + } } - (None, None) => {} } - } + }); } - }); + } } } diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 784cb631ca275214380808302d212056aeab29f1..f149fa5cf1b59dfcdd5805824846d2a1b9c31501 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -2237,6 +2237,7 @@ Run the `theme selector: toggle` action in the command palette to see a current "folder_icons": true, "git_status": true, "indent_size": 20, + "indent_guides": true, "auto_reveal_entries": true, "auto_fold_dirs": true, } From 2ab0b3b81976367961048e75d93a029cebc866d8 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 28 Oct 2024 11:02:46 +0100 Subject: [PATCH 61/76] remote server: Fix language servers not starting (#19821) PR #19653 change the code in this diff, which lead to the remote_server binary trying to load language grammars, which in turn failed, and stopped languages from being loaded correctly. That then lead to language servers not starting up. This change reintroduces what #19653 removed, so that we don't load the grammar on the remote_server, by ignoring the grammar name from the config. The tests still all work. Release Notes: - N/A Co-authored-by: Bennet --- crates/languages/src/lib.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index 7e8c09c8ad068aa9384ffedf0aaef331e4acfa47..03c4735d6d645b34d3f72260fd92e85497472910 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -288,6 +288,15 @@ fn load_config(name: &str) -> LanguageConfig { .with_context(|| format!("failed to load config.toml for language {name:?}")) .unwrap(); + #[cfg(not(feature = "load-grammars"))] + { + config = LanguageConfig { + name: config.name, + matcher: config.matcher, + ..Default::default() + } + } + config } From 177dfdf9002fcfc32e8356102879373e4460a1f5 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 28 Oct 2024 14:53:40 +0200 Subject: [PATCH 62/76] Declare RUSTFLAGS env var for all CI jobs (#19826) Follow-up of https://github.com/zed-industries/zed/pull/19149 Makes RUSTFLAGS propagation uniform, to ensure all `cargo ...` jobs get the same RUSTFLAGS env set. Release Notes: - N/A --- .github/workflows/ci.yml | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ba475f88abc59d89086a9aa4b2565b6e23420cb9..dc38baeae90710864cc7467f5617df0e39ac8194 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -25,6 +25,7 @@ env: CARGO_TERM_COLOR: always CARGO_INCREMENTAL: 0 RUST_BACKTRACE: 1 + RUSTFLAGS: "-D warnings" jobs: migration_checks: @@ -116,13 +117,13 @@ jobs: uses: ./.github/actions/run_tests - name: Build collab - run: RUSTFLAGS="-D warnings" cargo build -p collab + run: cargo build -p collab - name: Build other binaries and features run: | - RUSTFLAGS="-D warnings" cargo build --workspace --bins --all-features + cargo build --workspace --bins --all-features cargo check -p gpui --features "macos-blade" - RUSTFLAGS="-D warnings" cargo build -p remote_server + cargo build -p remote_server linux_tests: timeout-minutes: 60 @@ -155,7 +156,7 @@ jobs: uses: ./.github/actions/run_tests - name: Build Zed - run: RUSTFLAGS="-D warnings" cargo build -p zed + run: cargo build -p zed build_remote_server: timeout-minutes: 60 @@ -182,7 +183,7 @@ jobs: run: ./script/remote-server && ./script/install-mold 2.34.0 - name: Build Remote Server - run: RUSTFLAGS="-D warnings" cargo build -p remote_server + run: cargo build -p remote_server # todo(windows): Actually run the tests windows_tests: @@ -207,7 +208,7 @@ jobs: run: cargo xtask clippy - name: Build Zed - run: $env:RUSTFLAGS="-D warnings"; cargo build + run: cargo build bundle-mac: timeout-minutes: 60 From 03bd95405b3398691c64d16b02783b6dce214c18 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 28 Oct 2024 14:14:51 +0100 Subject: [PATCH 63/76] docs: Add diagram to remote development docs (#19827) Release Notes: - N/A --- docs/src/remote-development.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/src/remote-development.md b/docs/src/remote-development.md index 708d0e0b393eaef216c582b0e39944333e517bd8..9dc1777f39f1d2f1071fd003e0d85bd8bac915a3 100644 --- a/docs/src/remote-development.md +++ b/docs/src/remote-development.md @@ -8,6 +8,10 @@ Remote Development allows you to code at the speed of thought, even when your co Remote development requires two computers, your local machine that runs the Zed UI and the remote server which runs a Zed headless server. The two communicate over SSH, so you will need to be able to SSH from your local machine into the remote server to use this feature. +![Architectural overview of Zed Remote Development](https://zed.dev/img/remote-development/diagram.png) + +On your local machine, Zed runs its UI, talks to language models, uses Tree-sitter to parse and syntax-highlight code, and store unsaved changes and recent projects. The source code, language servers, tasks, and the terminal all run on the remote server. + > **Note:** The original version of remote development sent traffic via Zed's servers. As of Zed v0.157 you can no-longer use that mode. ## Setup @@ -15,7 +19,7 @@ Remote development requires two computers, your local machine that runs the Zed 1. Download and install the latest [Zed Preview](https://zed.dev/releases/preview). You need at least Zed v0.159. 1. Open the remote projects dialogue with cmd-shift-p remote or cmd-control-o. 1. Click "Connect New Server" and enter the command you use to SSH into the server. See [Supported SSH options](#supported-ssh-options) for options you can pass. -1. Your local machine will attempt to connect to the remote server using the `ssh` binary on your path. Assuming the connection is successful, it will download the latest version of the Zed server and upload it to the remote over SSH. +1. Your local machine will attempt to connect to the remote server using the `ssh` binary on your path. Assuming the connection is successful, Zed will download the server on the remote host and start it. 1. Once the Zed server is running, you will be prompted to choose a path to open on the remote server. > **Note:** Zed does not currently handle opening very large directories (for example, `/` or `~` that may have >100,000 files) very well. We are working on improving this, but suggest in the meantime opening only specific projects, or subfolders of very large mono-repos. From cdddb4d3603691406ac846f97bc66699c3dc093a Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Mon, 28 Oct 2024 15:34:03 +0100 Subject: [PATCH 64/76] Add language toolchains (#19576) This PR adds support for selecting toolchains for a given language (e.g. Rust toolchains or Python virtual environments) with support for SSH projects provided out of the box. For Python we piggy-back off of [PET](https://github.com/microsoft/python-environment-tools), a library maintained by Microsoft. Closes #16421 Closes #7646 Release Notes: - Added toolchain selector to the status bar (with initial support for Python virtual environments) --- .github/workflows/ci.yml | 3 + Cargo.lock | 493 ++++++++++++++++-- Cargo.toml | 7 + assets/settings/default.json | 1 + crates/extension/src/extension_lsp_adapter.rs | 4 +- crates/extension/src/extension_store.rs | 25 +- crates/language/src/language.rs | 16 +- crates/language/src/language_registry.rs | 63 ++- crates/language/src/toolchain.rs | 65 +++ crates/languages/Cargo.toml | 5 + crates/languages/src/json.rs | 5 +- crates/languages/src/lib.rs | 55 +- crates/languages/src/python.rs | 115 +++- crates/languages/src/tailwind.rs | 3 +- crates/languages/src/typescript.rs | 4 +- crates/languages/src/vtsls.rs | 3 +- crates/languages/src/yaml.rs | 4 +- crates/project/src/lsp_store.rs | 135 +++-- crates/project/src/project.rs | 65 ++- crates/project/src/toolchain_store.rs | 416 +++++++++++++++ crates/proto/proto/zed.proto | 49 +- crates/proto/src/proto.rs | 17 +- crates/remote_server/src/headless_project.rs | 7 +- crates/toolchain_selector/Cargo.toml | 24 + crates/toolchain_selector/LICENSE-GPL | 1 + .../src/active_toolchain.rs | 173 ++++++ .../src/toolchain_selector.rs | 343 ++++++++++++ crates/workspace/src/persistence.rs | 96 +++- crates/workspace/src/workspace.rs | 16 + crates/zed/Cargo.toml | 1 + crates/zed/src/main.rs | 1 + crates/zed/src/zed.rs | 3 + script/licenses/zed-licenses.toml | 138 +++++ 33 files changed, 2222 insertions(+), 134 deletions(-) create mode 100644 crates/language/src/toolchain.rs create mode 100644 crates/project/src/toolchain_store.rs create mode 100644 crates/toolchain_selector/Cargo.toml create mode 120000 crates/toolchain_selector/LICENSE-GPL create mode 100644 crates/toolchain_selector/src/active_toolchain.rs create mode 100644 crates/toolchain_selector/src/toolchain_selector.rs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index dc38baeae90710864cc7467f5617df0e39ac8194..84ed0dd5d44ab8f7213c386043b2ae4c7ba081b1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -192,6 +192,9 @@ jobs: if: github.repository_owner == 'zed-industries' runs-on: hosted-windows-1 steps: + # more info here:- https://github.com/rust-lang/cargo/issues/13020 + - name: Enable longer pathnames for git + run: git config --system core.longpaths true - name: Checkout repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: diff --git a/Cargo.lock b/Cargo.lock index 91b76f33e8d8f715a3aefc9d6bc64f4cfc62b00f..bd9ad91bf7416a7281396b3de2e9550c6d995a4b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -291,6 +291,12 @@ dependencies = [ "syn 2.0.76", ] +[[package]] +name = "arraydeque" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d902e3d592a523def97af8f317b08ce16b7ab854c1985a0c671e6f15cebc236" + [[package]] name = "arrayref" version = "0.3.8" @@ -385,7 +391,7 @@ dependencies = [ "ctor", "db", "editor", - "env_logger", + "env_logger 0.11.5", "feature_flags", "fs", "futures 0.3.30", @@ -2551,7 +2557,7 @@ dependencies = [ "dashmap 6.0.1", "derive_more", "editor", - "env_logger", + "env_logger 0.11.5", "envy", "file_finder", "fs", @@ -2706,7 +2712,7 @@ dependencies = [ "command_palette_hooks", "ctor", "editor", - "env_logger", + "env_logger 0.11.5", "fuzzy", "go_to_line", "gpui", @@ -3483,7 +3489,7 @@ dependencies = [ "collections", "ctor", "editor", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "language", @@ -3671,7 +3677,7 @@ dependencies = [ "ctor", "db", "emojis", - "env_logger", + "env_logger 0.11.5", "file_icons", "futures 0.3.30", "fuzzy", @@ -3877,6 +3883,19 @@ dependencies = [ "regex", ] +[[package]] +name = "env_logger" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580" +dependencies = [ + "humantime", + "is-terminal", + "log", + "regex", + "termcolor", +] + [[package]] name = "env_logger" version = "0.11.5" @@ -3985,7 +4004,7 @@ dependencies = [ "client", "clock", "collections", - "env_logger", + "env_logger 0.11.5", "feature_flags", "fs", "git", @@ -4080,7 +4099,7 @@ dependencies = [ "client", "collections", "ctor", - "env_logger", + "env_logger 0.11.5", "fs", "futures 0.3.30", "gpui", @@ -4122,7 +4141,7 @@ version = "0.1.0" dependencies = [ "anyhow", "clap", - "env_logger", + "env_logger 0.11.5", "extension", "fs", "language", @@ -4281,7 +4300,7 @@ dependencies = [ "collections", "ctor", "editor", - "env_logger", + "env_logger 0.11.5", "file_icons", "futures 0.3.30", "fuzzy", @@ -5036,7 +5055,7 @@ dependencies = [ "ctor", "derive_more", "embed-resource", - "env_logger", + "env_logger 0.11.5", "etagere", "filedescriptor", "flume", @@ -5226,6 +5245,15 @@ dependencies = [ "serde", ] +[[package]] +name = "hashlink" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" +dependencies = [ + "hashbrown 0.14.5", +] + [[package]] name = "hashlink" version = "0.9.1" @@ -6184,7 +6212,7 @@ dependencies = [ "collections", "ctor", "ec4rs", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "fuzzy", "git", @@ -6241,7 +6269,7 @@ dependencies = [ "copilot", "ctor", "editor", - "env_logger", + "env_logger 0.11.5", "feature_flags", "futures 0.3.30", "google_ai", @@ -6298,7 +6326,7 @@ dependencies = [ "collections", "copilot", "editor", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "language", @@ -6332,6 +6360,11 @@ dependencies = [ "lsp", "node_runtime", "paths", + "pet", + "pet-conda", + "pet-core", + "pet-poetry", + "pet-reporter", "project", "regex", "rope", @@ -6628,7 +6661,7 @@ dependencies = [ "async-pipe", "collections", "ctor", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "log", @@ -6711,7 +6744,7 @@ version = "0.1.0" dependencies = [ "anyhow", "assets", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "language", @@ -6824,7 +6857,7 @@ dependencies = [ "clap", "clap_complete", "elasticlunr-rs", - "env_logger", + "env_logger 0.11.5", "futures-util", "handlebars 5.1.2", "ignore", @@ -7006,6 +7039,15 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "msvc_spectre_libs" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8661ace213a0a130c7c5b9542df5023aedf092a02008ccf477b39ff108990305" +dependencies = [ + "cc", +] + [[package]] name = "multi_buffer" version = "0.1.0" @@ -7014,7 +7056,7 @@ dependencies = [ "clock", "collections", "ctor", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "itertools 0.13.0", @@ -7974,6 +8016,366 @@ dependencies = [ "sha2", ] +[[package]] +name = "pet" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "clap", + "env_logger 0.10.2", + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-conda", + "pet-core", + "pet-env-var-path", + "pet-fs", + "pet-global-virtualenvs", + "pet-homebrew", + "pet-jsonrpc", + "pet-linux-global-python", + "pet-mac-commandlinetools", + "pet-mac-python-org", + "pet-mac-xcode", + "pet-pipenv", + "pet-poetry", + "pet-pyenv", + "pet-python-utils", + "pet-reporter", + "pet-telemetry", + "pet-venv", + "pet-virtualenv", + "pet-virtualenvwrapper", + "pet-windows-registry", + "pet-windows-store", + "serde", + "serde_json", +] + +[[package]] +name = "pet-conda" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "env_logger 0.10.2", + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-reporter", + "regex", + "serde", + "serde_json", + "yaml-rust2", +] + +[[package]] +name = "pet-core" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "clap", + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-fs", + "regex", + "serde", + "serde_json", +] + +[[package]] +name = "pet-env-var-path" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-conda", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", + "regex", +] + +[[package]] +name = "pet-fs" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", +] + +[[package]] +name = "pet-global-virtualenvs" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-conda", + "pet-core", + "pet-fs", + "pet-virtualenv", +] + +[[package]] +name = "pet-homebrew" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-conda", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", + "regex", + "serde", + "serde_json", +] + +[[package]] +name = "pet-jsonrpc" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "env_logger 0.10.2", + "log", + "msvc_spectre_libs", + "pet-core", + "serde", + "serde_json", +] + +[[package]] +name = "pet-linux-global-python" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-mac-commandlinetools" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-mac-python-org" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-mac-xcode" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-pipenv" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-poetry" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "base64 0.22.1", + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-reporter", + "pet-virtualenv", + "regex", + "serde", + "serde_json", + "sha2", + "toml 0.8.19", +] + +[[package]] +name = "pet-pyenv" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-conda", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-reporter", + "regex", + "serde", + "serde_json", +] + +[[package]] +name = "pet-python-utils" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "env_logger 0.10.2", + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "regex", + "serde", + "serde_json", + "sha2", +] + +[[package]] +name = "pet-reporter" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "env_logger 0.10.2", + "log", + "msvc_spectre_libs", + "pet-core", + "pet-jsonrpc", + "serde", + "serde_json", +] + +[[package]] +name = "pet-telemetry" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "env_logger 0.10.2", + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "regex", +] + +[[package]] +name = "pet-venv" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-virtualenv" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", +] + +[[package]] +name = "pet-virtualenvwrapper" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", +] + +[[package]] +name = "pet-windows-registry" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-conda", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", + "pet-windows-store", + "regex", + "winreg 0.52.0", +] + +[[package]] +name = "pet-windows-store" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=ffcbf3f28c46633abd5448a52b1f396c322e0d6c#ffcbf3f28c46633abd5448a52b1f396c322e0d6c" +dependencies = [ + "lazy_static", + "log", + "msvc_spectre_libs", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", + "regex", + "winreg 0.52.0", +] + [[package]] name = "petgraph" version = "0.6.5" @@ -8062,7 +8464,7 @@ dependencies = [ "anyhow", "ctor", "editor", - "env_logger", + "env_logger 0.11.5", "gpui", "menu", "serde", @@ -8408,7 +8810,7 @@ dependencies = [ "client", "clock", "collections", - "env_logger", + "env_logger 0.11.5", "fs", "futures 0.3.30", "fuzzy", @@ -9123,7 +9525,7 @@ dependencies = [ "clap", "client", "clock", - "env_logger", + "env_logger 0.11.5", "fork", "fs", "futures 0.3.30", @@ -9174,7 +9576,7 @@ dependencies = [ "collections", "command_palette_hooks", "editor", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "http_client", @@ -9454,7 +9856,7 @@ dependencies = [ "arrayvec", "criterion", "ctor", - "env_logger", + "env_logger 0.11.5", "gpui", "log", "rand 0.8.5", @@ -9485,7 +9887,7 @@ dependencies = [ "base64 0.22.1", "chrono", "collections", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "parking_lot", @@ -10074,7 +10476,7 @@ dependencies = [ "client", "clock", "collections", - "env_logger", + "env_logger 0.11.5", "feature_flags", "fs", "futures 0.3.30", @@ -10767,7 +11169,7 @@ dependencies = [ "futures-io", "futures-util", "hashbrown 0.14.5", - "hashlink", + "hashlink 0.9.1", "hex", "indexmap 2.4.0", "log", @@ -11091,7 +11493,7 @@ version = "0.1.0" dependencies = [ "arrayvec", "ctor", - "env_logger", + "env_logger 0.11.5", "log", "rand 0.8.5", "rayon", @@ -11105,7 +11507,7 @@ dependencies = [ "client", "collections", "editor", - "env_logger", + "env_logger 0.11.5", "futures 0.3.30", "gpui", "http_client", @@ -11404,7 +11806,7 @@ dependencies = [ "collections", "ctor", "editor", - "env_logger", + "env_logger 0.11.5", "gpui", "language", "menu", @@ -11611,7 +12013,7 @@ dependencies = [ "clock", "collections", "ctor", - "env_logger", + "env_logger 0.11.5", "gpui", "http_client", "log", @@ -12100,6 +12502,21 @@ dependencies = [ "winnow 0.6.18", ] +[[package]] +name = "toolchain_selector" +version = "0.1.0" +dependencies = [ + "editor", + "fuzzy", + "gpui", + "language", + "picker", + "project", + "ui", + "util", + "workspace", +] + [[package]] name = "topological-sort" version = "0.2.2" @@ -14269,7 +14686,7 @@ dependencies = [ "collections", "db", "derive_more", - "env_logger", + "env_logger 0.11.5", "fs", "futures 0.3.30", "git", @@ -14306,7 +14723,7 @@ dependencies = [ "anyhow", "clock", "collections", - "env_logger", + "env_logger 0.11.5", "fs", "futures 0.3.30", "fuzzy", @@ -14476,6 +14893,17 @@ dependencies = [ "clap", ] +[[package]] +name = "yaml-rust2" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8902160c4e6f2fb145dbe9d6760a75e3c9522d8bf796ed7047c85919ac7115f8" +dependencies = [ + "arraydeque", + "encoding_rs", + "hashlink 0.8.4", +] + [[package]] name = "yansi" version = "1.0.1" @@ -14589,7 +15017,7 @@ dependencies = [ "db", "diagnostics", "editor", - "env_logger", + "env_logger 0.11.5", "extension", "extensions_ui", "feature_flags", @@ -14656,6 +15084,7 @@ dependencies = [ "theme", "theme_selector", "time", + "toolchain_selector", "tree-sitter-md", "tree-sitter-rust", "ui", diff --git a/Cargo.toml b/Cargo.toml index 64a2546020982f95a8e36da84b40cea87e8c98b4..0697cc0c0becc472a71ee9ddc640c04404122b54 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -117,6 +117,7 @@ members = [ "crates/theme_selector", "crates/time_format", "crates/title_bar", + "crates/toolchain_selector", "crates/ui", "crates/ui_input", "crates/ui_macros", @@ -290,6 +291,7 @@ theme_importer = { path = "crates/theme_importer" } theme_selector = { path = "crates/theme_selector" } time_format = { path = "crates/time_format" } title_bar = { path = "crates/title_bar" } +toolchain_selector = { path = "crates/toolchain_selector" } ui = { path = "crates/ui" } ui_input = { path = "crates/ui_input" } ui_macros = { path = "crates/ui_macros" } @@ -376,6 +378,11 @@ ordered-float = "2.1.1" palette = { version = "0.7.5", default-features = false, features = ["std"] } parking_lot = "0.12.1" pathdiff = "0.2" +pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" } +pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" } +pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" } +pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" } +pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" } postage = { version = "0.5", features = ["futures-traits"] } pretty_assertions = "1.3.0" profiling = "1" diff --git a/assets/settings/default.json b/assets/settings/default.json index cd4e3db15c2f6742510aa8f07d68b26dd602ff3f..879f6bb7fac65e5701e4887070819753433a4fb6 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -779,6 +779,7 @@ "tasks": { "variables": {} }, + "toolchain": { "name": "default", "path": "default" }, // An object whose keys are language names, and whose values // are arrays of filenames or extensions of files that should // use those languages. diff --git a/crates/extension/src/extension_lsp_adapter.rs b/crates/extension/src/extension_lsp_adapter.rs index 25179acec69ed0fb391f913c5adeb89375a9c828..1557ef215301484a4c6b8b8d05c650818900a65e 100644 --- a/crates/extension/src/extension_lsp_adapter.rs +++ b/crates/extension/src/extension_lsp_adapter.rs @@ -8,7 +8,8 @@ use collections::HashMap; use futures::{Future, FutureExt}; use gpui::AsyncAppContext; use language::{ - CodeLabel, HighlightId, Language, LanguageServerName, LspAdapter, LspAdapterDelegate, + CodeLabel, HighlightId, Language, LanguageServerName, LanguageToolchainStore, LspAdapter, + LspAdapterDelegate, }; use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions}; use serde::Serialize; @@ -194,6 +195,7 @@ impl LspAdapter for ExtensionLspAdapter { async fn workspace_configuration( self: Arc, delegate: &Arc, + _: Arc, _cx: &mut AsyncAppContext, ) -> Result { let delegate = delegate.clone(); diff --git a/crates/extension/src/extension_store.rs b/crates/extension/src/extension_store.rs index 535d68326f9c3eda899677b8930cdd353d6cc4a2..0a9299a8be818864754185cee509ad5e382851f2 100644 --- a/crates/extension/src/extension_store.rs +++ b/crates/extension/src/extension_store.rs @@ -37,7 +37,7 @@ use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; use indexed_docs::{IndexedDocsRegistry, ProviderId}; use language::{ LanguageConfig, LanguageMatcher, LanguageName, LanguageQueries, LanguageRegistry, - QUERY_FILENAME_PREFIXES, + LoadedLanguage, QUERY_FILENAME_PREFIXES, }; use node_runtime::NodeRuntime; use project::ContextProviderWithTasks; @@ -1102,14 +1102,21 @@ impl ExtensionStore { let config = std::fs::read_to_string(language_path.join("config.toml"))?; let config: LanguageConfig = ::toml::from_str(&config)?; let queries = load_plugin_queries(&language_path); - let tasks = std::fs::read_to_string(language_path.join("tasks.json")) - .ok() - .and_then(|contents| { - let definitions = serde_json_lenient::from_str(&contents).log_err()?; - Some(Arc::new(ContextProviderWithTasks::new(definitions)) as Arc<_>) - }); - - Ok((config, queries, tasks)) + let context_provider = + std::fs::read_to_string(language_path.join("tasks.json")) + .ok() + .and_then(|contents| { + let definitions = + serde_json_lenient::from_str(&contents).log_err()?; + Some(Arc::new(ContextProviderWithTasks::new(definitions)) as Arc<_>) + }); + + Ok(LoadedLanguage { + config, + queries, + context_provider, + toolchain_provider: None, + }) }, ); } diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index c1c9cfebbead5ebd0a51e77259dd699fd7dab13b..e52794f81f8320cc62ecc5d088c3bfdcc1768a47 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -15,6 +15,7 @@ mod outline; pub mod proto; mod syntax_map; mod task_context; +mod toolchain; #[cfg(test)] pub mod buffer_tests; @@ -28,7 +29,7 @@ use futures::Future; use gpui::{AppContext, AsyncAppContext, Model, SharedString, Task}; pub use highlight_map::HighlightMap; use http_client::HttpClient; -pub use language_registry::LanguageName; +pub use language_registry::{LanguageName, LoadedLanguage}; use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions}; use parking_lot::Mutex; use regex::Regex; @@ -61,6 +62,7 @@ use syntax_map::{QueryCursorHandle, SyntaxSnapshot}; use task::RunnableTag; pub use task_context::{ContextProvider, RunnableRange}; use theme::SyntaxTheme; +pub use toolchain::{LanguageToolchainStore, Toolchain, ToolchainList, ToolchainLister}; use tree_sitter::{self, wasmtime, Query, QueryCursor, WasmStore}; use util::serde::default_true; @@ -502,6 +504,7 @@ pub trait LspAdapter: 'static + Send + Sync { async fn workspace_configuration( self: Arc, _: &Arc, + _: Arc, _cx: &mut AsyncAppContext, ) -> Result { Ok(serde_json::json!({})) @@ -855,6 +858,7 @@ pub struct Language { pub(crate) config: LanguageConfig, pub(crate) grammar: Option>, pub(crate) context_provider: Option>, + pub(crate) toolchain: Option>, } #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] @@ -983,6 +987,7 @@ impl Language { }) }), context_provider: None, + toolchain: None, } } @@ -991,6 +996,11 @@ impl Language { self } + pub fn with_toolchain_lister(mut self, provider: Option>) -> Self { + self.toolchain = provider; + self + } + pub fn with_queries(mut self, queries: LanguageQueries) -> Result { if let Some(query) = queries.highlights { self = self @@ -1361,6 +1371,10 @@ impl Language { self.context_provider.clone() } + pub fn toolchain_lister(&self) -> Option> { + self.toolchain.clone() + } + pub fn highlight_text<'a>( self: &'a Arc, text: &'a Rope, diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index 880ae3b6115c37de7f56536dbd28ffd6e31ba06c..caea801ce56387aeefdb5cd8c441f1641ef6f541 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -4,7 +4,7 @@ use crate::{ }, task_context::ContextProvider, with_parser, CachedLspAdapter, File, Language, LanguageConfig, LanguageId, LanguageMatcher, - LanguageServerName, LspAdapter, PLAIN_TEXT, + LanguageServerName, LspAdapter, ToolchainLister, PLAIN_TEXT, }; use anyhow::{anyhow, Context, Result}; use collections::{hash_map, HashMap, HashSet}; @@ -75,6 +75,13 @@ impl<'a> From<&'a str> for LanguageName { } } +impl From for String { + fn from(value: LanguageName) -> Self { + let value: &str = &value.0; + Self::from(value) + } +} + pub struct LanguageRegistry { state: RwLock, language_server_download_dir: Option>, @@ -123,16 +130,7 @@ pub struct AvailableLanguage { name: LanguageName, grammar: Option>, matcher: LanguageMatcher, - load: Arc< - dyn Fn() -> Result<( - LanguageConfig, - LanguageQueries, - Option>, - )> - + 'static - + Send - + Sync, - >, + load: Arc Result + 'static + Send + Sync>, loaded: bool, } @@ -200,6 +198,13 @@ struct LspBinaryStatusSender { txs: Arc>>>, } +pub struct LoadedLanguage { + pub config: LanguageConfig, + pub queries: LanguageQueries, + pub context_provider: Option>, + pub toolchain_provider: Option>, +} + impl LanguageRegistry { pub fn new(executor: BackgroundExecutor) -> Self { let this = Self { @@ -283,7 +288,14 @@ impl LanguageRegistry { config.name.clone(), config.grammar.clone(), config.matcher.clone(), - move || Ok((config.clone(), Default::default(), None)), + move || { + Ok(LoadedLanguage { + config: config.clone(), + queries: Default::default(), + toolchain_provider: None, + context_provider: None, + }) + }, ) } @@ -424,14 +436,7 @@ impl LanguageRegistry { name: LanguageName, grammar_name: Option>, matcher: LanguageMatcher, - load: impl Fn() -> Result<( - LanguageConfig, - LanguageQueries, - Option>, - )> - + 'static - + Send - + Sync, + load: impl Fn() -> Result + 'static + Send + Sync, ) { let load = Arc::new(load); let state = &mut *self.state.write(); @@ -726,16 +731,18 @@ impl LanguageRegistry { self.executor .spawn(async move { let language = async { - let (config, queries, provider) = (language_load)()?; - - if let Some(grammar) = config.grammar.clone() { + let loaded_language = (language_load)()?; + if let Some(grammar) = loaded_language.config.grammar.clone() { let grammar = Some(this.get_or_load_grammar(grammar).await?); - Language::new_with_id(id, config, grammar) - .with_context_provider(provider) - .with_queries(queries) + + Language::new_with_id(id, loaded_language.config, grammar) + .with_context_provider(loaded_language.context_provider) + .with_toolchain_lister(loaded_language.toolchain_provider) + .with_queries(loaded_language.queries) } else { - Ok(Language::new_with_id(id, config, None) - .with_context_provider(provider)) + Ok(Language::new_with_id(id, loaded_language.config, None) + .with_context_provider(loaded_language.context_provider) + .with_toolchain_lister(loaded_language.toolchain_provider)) } } .await; diff --git a/crates/language/src/toolchain.rs b/crates/language/src/toolchain.rs new file mode 100644 index 0000000000000000000000000000000000000000..efb27008d0fe74d244b3010d90368064c6cc2193 --- /dev/null +++ b/crates/language/src/toolchain.rs @@ -0,0 +1,65 @@ +//! Provides support for language toolchains. +//! +//! A language can have associated toolchains, +//! which is a set of tools used to interact with the projects written in said language. +//! For example, a Python project can have an associated virtual environment; a Rust project can have a toolchain override. + +use std::{path::PathBuf, sync::Arc}; + +use async_trait::async_trait; +use gpui::{AsyncAppContext, SharedString}; +use settings::WorktreeId; + +use crate::LanguageName; + +/// Represents a single toolchain. +#[derive(Clone, Debug, PartialEq)] +pub struct Toolchain { + /// User-facing label + pub name: SharedString, + pub path: SharedString, + pub language_name: LanguageName, +} + +#[async_trait(?Send)] +pub trait ToolchainLister: Send + Sync { + async fn list(&self, _: PathBuf) -> ToolchainList; +} + +#[async_trait(?Send)] +pub trait LanguageToolchainStore { + async fn active_toolchain( + self: Arc, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &mut AsyncAppContext, + ) -> Option; +} + +type DefaultIndex = usize; +#[derive(Default, Clone)] +pub struct ToolchainList { + pub toolchains: Vec, + pub default: Option, + pub groups: Box<[(usize, SharedString)]>, +} + +impl ToolchainList { + pub fn toolchains(&self) -> &[Toolchain] { + &self.toolchains + } + pub fn default_toolchain(&self) -> Option { + self.default.and_then(|ix| self.toolchains.get(ix)).cloned() + } + pub fn group_for_index(&self, index: usize) -> Option<(usize, SharedString)> { + if index >= self.toolchains.len() { + return None; + } + let first_equal_or_greater = self + .groups + .partition_point(|(group_lower_bound, _)| group_lower_bound <= &index); + self.groups + .get(first_equal_or_greater.checked_sub(1)?) + .cloned() + } +} diff --git a/crates/languages/Cargo.toml b/crates/languages/Cargo.toml index d6746575f39b7e6b5dcc992676f5edfe9c7084bf..29c52ba301694e66f4c2969c72cf019a9c63bfea 100644 --- a/crates/languages/Cargo.toml +++ b/crates/languages/Cargo.toml @@ -47,6 +47,11 @@ log.workspace = true lsp.workspace = true node_runtime.workspace = true paths.workspace = true +pet.workspace = true +pet-core.workspace = true +pet-conda.workspace = true +pet-poetry.workspace = true +pet-reporter.workspace = true project.workspace = true regex.workspace = true rope.workspace = true diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index 95c4070b13a331660aeae2706b892311f676fb28..28ee884307f2dd6d7fcab52c1fa4c78177465800 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -7,7 +7,9 @@ use feature_flags::FeatureFlagAppExt; use futures::StreamExt; use gpui::{AppContext, AsyncAppContext}; use http_client::github::{latest_github_release, GitHubLspBinaryVersion}; -use language::{LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate}; +use language::{ + LanguageRegistry, LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate, +}; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; use project::ContextProviderWithTasks; @@ -198,6 +200,7 @@ impl LspAdapter for JsonLspAdapter { async fn workspace_configuration( self: Arc, _: &Arc, + _: Arc, cx: &mut AsyncAppContext, ) -> Result { cx.update(|cx| { diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index 03c4735d6d645b34d3f72260fd92e85497472910..2fd8ffa633d9aa3a6d3656b1d0bb53d0f389b7b3 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -3,7 +3,7 @@ use gpui::{AppContext, UpdateGlobal}; use json::json_task_context; pub use language::*; use node_runtime::NodeRuntime; -use python::PythonContextProvider; +use python::{PythonContextProvider, PythonToolchainProvider}; use rust_embed::RustEmbed; use settings::SettingsStore; use smol::stream::StreamExt; @@ -61,7 +61,14 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu config.name.clone(), config.grammar.clone(), config.matcher.clone(), - move || Ok((config.clone(), load_queries($name), None)), + move || { + Ok(LoadedLanguage { + config: config.clone(), + queries: load_queries($name), + context_provider: None, + toolchain_provider: None, + }) + }, ); }; ($name:literal, $adapters:expr) => { @@ -75,7 +82,14 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu config.name.clone(), config.grammar.clone(), config.matcher.clone(), - move || Ok((config.clone(), load_queries($name), None)), + move || { + Ok(LoadedLanguage { + config: config.clone(), + queries: load_queries($name), + context_provider: None, + toolchain_provider: None, + }) + }, ); }; ($name:literal, $adapters:expr, $context_provider:expr) => { @@ -90,11 +104,33 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu config.grammar.clone(), config.matcher.clone(), move || { - Ok(( - config.clone(), - load_queries($name), - Some(Arc::new($context_provider)), - )) + Ok(LoadedLanguage { + config: config.clone(), + queries: load_queries($name), + context_provider: Some(Arc::new($context_provider)), + toolchain_provider: None, + }) + }, + ); + }; + ($name:literal, $adapters:expr, $context_provider:expr, $toolchain_provider:expr) => { + let config = load_config($name); + // typeck helper + let adapters: Vec> = $adapters; + for adapter in adapters { + languages.register_lsp_adapter(config.name.clone(), adapter); + } + languages.register_language( + config.name.clone(), + config.grammar.clone(), + config.matcher.clone(), + move || { + Ok(LoadedLanguage { + config: config.clone(), + queries: load_queries($name), + context_provider: Some(Arc::new($context_provider)), + toolchain_provider: Some($toolchain_provider), + }) }, ); }; @@ -141,7 +177,8 @@ pub fn init(languages: Arc, node_runtime: NodeRuntime, cx: &mu vec![Arc::new(python::PythonLspAdapter::new( node_runtime.clone(), ))], - PythonContextProvider + PythonContextProvider, + Arc::new(PythonToolchainProvider::default()) as Arc ); language!( "rust", diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 4b5fe3d277cd39eb74ba60b6eef09056757fd4a0..e73e3c86829aca027f108215c59ad08722986431 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -3,9 +3,16 @@ use async_trait::async_trait; use collections::HashMap; use gpui::AppContext; use gpui::AsyncAppContext; +use language::LanguageName; +use language::LanguageToolchainStore; +use language::Toolchain; +use language::ToolchainList; +use language::ToolchainLister; use language::{ContextProvider, LanguageServerName, LspAdapter, LspAdapterDelegate}; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; +use pet_core::python_environment::PythonEnvironmentKind; +use pet_core::Configuration; use project::lsp_store::language_server_settings; use serde_json::Value; @@ -200,12 +207,35 @@ impl LspAdapter for PythonLspAdapter { async fn workspace_configuration( self: Arc, adapter: &Arc, + toolchains: Arc, cx: &mut AsyncAppContext, ) -> Result { - cx.update(|cx| { - language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx) - .and_then(|s| s.settings.clone()) - .unwrap_or_default() + let toolchain = toolchains + .active_toolchain(adapter.worktree_id(), LanguageName::new("Python"), cx) + .await; + cx.update(move |cx| { + let mut user_settings = + language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx) + .and_then(|s| s.settings.clone()) + .unwrap_or_default(); + + // If python.pythonPath is not set in user config, do so using our toolchain picker. + if let Some(toolchain) = toolchain { + if user_settings.is_null() { + user_settings = Value::Object(serde_json::Map::default()); + } + let object = user_settings.as_object_mut().unwrap(); + if let Some(python) = object + .entry("python") + .or_insert(Value::Object(serde_json::Map::default())) + .as_object_mut() + { + python + .entry("pythonPath") + .or_insert(Value::String(toolchain.path.into())); + } + } + user_settings }) } } @@ -320,6 +350,83 @@ fn python_module_name_from_relative_path(relative_path: &str) -> String { .to_string() } +#[derive(Default)] +pub(crate) struct PythonToolchainProvider {} + +static ENV_PRIORITY_LIST: &'static [PythonEnvironmentKind] = &[ + // Prioritize non-Conda environments. + PythonEnvironmentKind::Poetry, + PythonEnvironmentKind::Pipenv, + PythonEnvironmentKind::VirtualEnvWrapper, + PythonEnvironmentKind::Venv, + PythonEnvironmentKind::VirtualEnv, + PythonEnvironmentKind::Conda, + PythonEnvironmentKind::Pyenv, + PythonEnvironmentKind::GlobalPaths, + PythonEnvironmentKind::Homebrew, +]; + +fn env_priority(kind: Option) -> usize { + if let Some(kind) = kind { + ENV_PRIORITY_LIST + .iter() + .position(|blessed_env| blessed_env == &kind) + .unwrap_or(ENV_PRIORITY_LIST.len()) + } else { + // Unknown toolchains are less useful than non-blessed ones. + ENV_PRIORITY_LIST.len() + 1 + } +} + +#[async_trait(?Send)] +impl ToolchainLister for PythonToolchainProvider { + async fn list(&self, worktree_root: PathBuf) -> ToolchainList { + let environment = pet_core::os_environment::EnvironmentApi::new(); + let locators = pet::locators::create_locators( + Arc::new(pet_conda::Conda::from(&environment)), + Arc::new(pet_poetry::Poetry::from(&environment)), + &environment, + ); + let mut config = Configuration::default(); + config.workspace_directories = Some(vec![worktree_root]); + let reporter = pet_reporter::collect::create_reporter(); + pet::find::find_and_report_envs(&reporter, config, &locators, &environment, None); + + let mut toolchains = reporter + .environments + .lock() + .ok() + .map_or(Vec::new(), |mut guard| std::mem::take(&mut guard)); + toolchains.sort_by(|lhs, rhs| { + env_priority(lhs.kind) + .cmp(&env_priority(rhs.kind)) + .then_with(|| lhs.executable.cmp(&rhs.executable)) + }); + let mut toolchains: Vec<_> = toolchains + .into_iter() + .filter_map(|toolchain| { + let name = if let Some(version) = &toolchain.version { + format!("Python {version} ({:?})", toolchain.kind?) + } else { + format!("{:?}", toolchain.kind?) + } + .into(); + Some(Toolchain { + name, + path: toolchain.executable?.to_str()?.to_owned().into(), + language_name: LanguageName::new("Python"), + }) + }) + .collect(); + toolchains.dedup(); + ToolchainList { + toolchains, + default: None, + groups: Default::default(), + } + } +} + #[cfg(test)] mod tests { use gpui::{BorrowAppContext, Context, ModelContext, TestAppContext}; diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index 4ed5c742a9fc8da4212cdba5323c4b7a20d00109..6d4416c7d95cc29ec25946a35454c33d2ae91f8c 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -3,7 +3,7 @@ use async_trait::async_trait; use collections::HashMap; use futures::StreamExt; use gpui::AsyncAppContext; -use language::{LanguageServerName, LspAdapter, LspAdapterDelegate}; +use language::{LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate}; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; use project::lsp_store::language_server_settings; @@ -111,6 +111,7 @@ impl LspAdapter for TailwindLspAdapter { async fn workspace_configuration( self: Arc, delegate: &Arc, + _: Arc, cx: &mut AsyncAppContext, ) -> Result { let tailwind_user_settings = cx.update(|cx| { diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index cfd7e04bc6417701fbbe51e8328c541fc0ae7a39..345a5f0694447d38839f6d141e991403f1e2c3a0 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -5,7 +5,7 @@ use async_trait::async_trait; use collections::HashMap; use gpui::AsyncAppContext; use http_client::github::{build_asset_url, AssetKind, GitHubLspBinaryVersion}; -use language::{LanguageServerName, LspAdapter, LspAdapterDelegate}; +use language::{LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate}; use lsp::{CodeActionKind, LanguageServerBinary}; use node_runtime::NodeRuntime; use project::lsp_store::language_server_settings; @@ -230,6 +230,7 @@ impl LspAdapter for TypeScriptLspAdapter { async fn workspace_configuration( self: Arc, delegate: &Arc, + _: Arc, cx: &mut AsyncAppContext, ) -> Result { let override_options = cx.update(|cx| { @@ -325,6 +326,7 @@ impl LspAdapter for EsLintLspAdapter { async fn workspace_configuration( self: Arc, delegate: &Arc, + _: Arc, cx: &mut AsyncAppContext, ) -> Result { let workspace_root = delegate.worktree_root_path(); diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index ff8637dc28dbd8d474d28a7a353c660ea00c9777..ae65488a385d239b4db248e53ddb41cd3d2559ff 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -2,7 +2,7 @@ use anyhow::{anyhow, Result}; use async_trait::async_trait; use collections::HashMap; use gpui::AsyncAppContext; -use language::{LanguageServerName, LspAdapter, LspAdapterDelegate}; +use language::{LanguageServerName, LanguageToolchainStore, LspAdapter, LspAdapterDelegate}; use lsp::{CodeActionKind, LanguageServerBinary}; use node_runtime::NodeRuntime; use project::lsp_store::language_server_settings; @@ -183,6 +183,7 @@ impl LspAdapter for VtslsLspAdapter { async fn workspace_configuration( self: Arc, delegate: &Arc, + _: Arc, cx: &mut AsyncAppContext, ) -> Result { let tsdk_path = Self::tsdk_path(delegate).await; diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index 9f1c468b876b4a2cc2b4455f8a149e5e4bb637cc..d8f927b770ce2c5731c55f38493c40b42fa3f57e 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -3,7 +3,8 @@ use async_trait::async_trait; use futures::StreamExt; use gpui::AsyncAppContext; use language::{ - language_settings::AllLanguageSettings, LanguageServerName, LspAdapter, LspAdapterDelegate, + language_settings::AllLanguageSettings, LanguageServerName, LanguageToolchainStore, LspAdapter, + LspAdapterDelegate, }; use lsp::LanguageServerBinary; use node_runtime::NodeRuntime; @@ -92,6 +93,7 @@ impl LspAdapter for YamlLspAdapter { async fn workspace_configuration( self: Arc, delegate: &Arc, + _: Arc, cx: &mut AsyncAppContext, ) -> Result { let location = SettingsLocation { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 8152ddb3c0fbc361e7d9337f35d0a86b0bb2fb41..40e87b55e5b81bd3a74ffd3e8821daca4a1d5706 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -7,10 +7,11 @@ use crate::{ prettier_store::{self, PrettierStore, PrettierStoreEvent}, project_settings::{LspSettings, ProjectSettings}, relativize_path, resolve_path, + toolchain_store::{EmptyToolchainStore, ToolchainStoreEvent}, worktree_store::{WorktreeStore, WorktreeStoreEvent}, yarn::YarnPathStore, CodeAction, Completion, CoreCompletion, Hover, InlayHint, Item as _, ProjectPath, - ProjectTransaction, ResolveState, Symbol, + ProjectTransaction, ResolveState, Symbol, ToolchainStore, }; use anyhow::{anyhow, Context as _, Result}; use async_trait::async_trait; @@ -36,9 +37,9 @@ use language::{ proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version}, range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeLabel, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Documentation, File as _, Language, LanguageName, - LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName, LocalFile, LspAdapter, - LspAdapterDelegate, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, - Unclipped, + LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName, LanguageToolchainStore, + LocalFile, LspAdapter, LspAdapterDelegate, Patch, PointUtf16, TextBufferSnapshot, ToOffset, + ToPointUtf16, Transaction, Unclipped, }; use lsp::{ CodeActionKind, CompletionContext, DiagnosticSeverity, DiagnosticTag, @@ -707,12 +708,13 @@ pub struct LspStore { nonce: u128, buffer_store: Model, worktree_store: Model, + toolchain_store: Option>, buffer_snapshots: HashMap>>, // buffer_id -> server_id -> vec of snapshots pub languages: Arc, language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>, pub language_server_statuses: BTreeMap, active_entry: Option, - _maintain_workspace_config: Task>, + _maintain_workspace_config: (Task>, watch::Sender<()>), _maintain_buffer_languages: Task<()>, next_diagnostic_group_id: usize, diagnostic_summaries: @@ -871,6 +873,7 @@ impl LspStore { buffer_store: Model, worktree_store: Model, prettier_store: Model, + toolchain_store: Model, environment: Model, languages: Arc, http_client: Arc, @@ -884,9 +887,15 @@ impl LspStore { .detach(); cx.subscribe(&prettier_store, Self::on_prettier_store_event) .detach(); + cx.subscribe(&toolchain_store, Self::on_toolchain_store_event) + .detach(); cx.observe_global::(Self::on_settings_changed) .detach(); + let _maintain_workspace_config = { + let (sender, receiver) = watch::channel(); + (Self::maintain_workspace_config(receiver, cx), sender) + }; Self { mode: LspStoreMode::Local(LocalLspStore { supplementary_language_servers: Default::default(), @@ -909,6 +918,7 @@ impl LspStore { downstream_client: None, buffer_store, worktree_store, + toolchain_store: Some(toolchain_store), languages: languages.clone(), language_server_ids: Default::default(), language_server_statuses: Default::default(), @@ -919,7 +929,7 @@ impl LspStore { diagnostics: Default::default(), active_entry: None, - _maintain_workspace_config: Self::maintain_workspace_config(cx), + _maintain_workspace_config, _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx), } } @@ -942,9 +952,10 @@ impl LspStore { }) } - pub fn new_remote( + pub(super) fn new_remote( buffer_store: Model, worktree_store: Model, + toolchain_store: Option>, languages: Arc, upstream_client: AnyProtoClient, project_id: u64, @@ -954,7 +965,10 @@ impl LspStore { .detach(); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); - + let _maintain_workspace_config = { + let (sender, receiver) = watch::channel(); + (Self::maintain_workspace_config(receiver, cx), sender) + }; Self { mode: LspStoreMode::Remote(RemoteLspStore { upstream_client: Some(upstream_client), @@ -972,7 +986,8 @@ impl LspStore { diagnostic_summaries: Default::default(), diagnostics: Default::default(), active_entry: None, - _maintain_workspace_config: Self::maintain_workspace_config(cx), + toolchain_store, + _maintain_workspace_config, _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx), } } @@ -1063,6 +1078,22 @@ impl LspStore { } } + fn on_toolchain_store_event( + &mut self, + _: Model, + event: &ToolchainStoreEvent, + _: &mut ModelContext, + ) { + match event { + ToolchainStoreEvent::ToolchainActivated { .. } => { + self.request_workspace_config_refresh() + } + } + } + + fn request_workspace_config_refresh(&mut self) { + *self._maintain_workspace_config.1.borrow_mut() = (); + } // todo! pub fn prettier_store(&self) -> Option> { self.as_local().map(|local| local.prettier_store.clone()) @@ -3029,17 +3060,13 @@ impl LspStore { None } - fn maintain_workspace_config(cx: &mut ModelContext) -> Task> { - let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel(); - let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx); - - let settings_observation = cx.observe_global::(move |_, _| { - *settings_changed_tx.borrow_mut() = (); - }); - - cx.spawn(move |this, mut cx| async move { - while let Some(()) = settings_changed_rx.next().await { - let servers = this.update(&mut cx, |this, cx| { + pub(crate) async fn refresh_workspace_configurations( + this: &WeakModel, + mut cx: AsyncAppContext, + ) { + maybe!(async move { + let servers = this + .update(&mut cx, |this, cx| { this.language_server_ids .iter() .filter_map(|((worktree_id, _), server_id)| { @@ -3061,17 +3088,52 @@ impl LspStore { } }) .collect::>() - })?; + }) + .ok()?; + + let toolchain_store = this + .update(&mut cx, |this, cx| this.toolchain_store(cx)) + .ok()?; + for (adapter, server, delegate) in servers { + let settings = adapter + .workspace_configuration(&delegate, toolchain_store.clone(), &mut cx) + .await + .ok()?; - for (adapter, server, delegate) in servers { - let settings = adapter.workspace_configuration(&delegate, &mut cx).await?; + server + .notify::( + lsp::DidChangeConfigurationParams { settings }, + ) + .ok(); + } + Some(()) + }) + .await; + } - server - .notify::( - lsp::DidChangeConfigurationParams { settings }, - ) - .ok(); - } + fn toolchain_store(&self, cx: &AppContext) -> Arc { + if let Some(toolchain_store) = self.toolchain_store.as_ref() { + toolchain_store.read(cx).as_language_toolchain_store() + } else { + Arc::new(EmptyToolchainStore) + } + } + fn maintain_workspace_config( + external_refresh_requests: watch::Receiver<()>, + cx: &mut ModelContext, + ) -> Task> { + let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel(); + let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx); + + let settings_observation = cx.observe_global::(move |_, _| { + *settings_changed_tx.borrow_mut() = (); + }); + + let mut joint_future = + futures::stream::select(settings_changed_rx, external_refresh_requests); + cx.spawn(move |this, cx| async move { + while let Some(()) = joint_future.next().await { + Self::refresh_workspace_configurations(&this, cx.clone()).await; } drop(settings_observation); @@ -5517,6 +5579,9 @@ impl LspStore { let delegate = delegate.clone(); let adapter = adapter.clone(); let this = this.clone(); + let toolchains = this + .update(&mut cx, |this, cx| this.toolchain_store(cx)) + .ok()?; let mut cx = cx.clone(); async move { let language_server = pending_server.await?; @@ -5524,7 +5589,7 @@ impl LspStore { let workspace_config = adapter .adapter .clone() - .workspace_configuration(&delegate, &mut cx) + .workspace_configuration(&delegate, toolchains.clone(), &mut cx) .await?; let mut initialization_options = adapter @@ -5864,17 +5929,21 @@ impl LspStore { } }) .detach(); - language_server .on_request::({ let adapter = adapter.adapter.clone(); let delegate = delegate.clone(); + let this = this.clone(); move |params, mut cx| { let adapter = adapter.clone(); let delegate = delegate.clone(); + let this = this.clone(); async move { - let workspace_config = - adapter.workspace_configuration(&delegate, &mut cx).await?; + let toolchains = + this.update(&mut cx, |this, cx| this.toolchain_store(cx))?; + let workspace_config = adapter + .workspace_configuration(&delegate, toolchains, &mut cx) + .await?; Ok(params .items .into_iter() diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 49f4b7c6f3b23c6230a98fe4f87983ad68336c18..7a57e048c8868dd715d9d96bcf2158f9d4141c84 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -11,6 +11,7 @@ pub mod search; mod task_inventory; pub mod task_store; pub mod terminals; +pub mod toolchain_store; pub mod worktree_store; #[cfg(test)] @@ -44,8 +45,8 @@ use itertools::Itertools; use language::{ language_settings::InlayHintKind, proto::split_operations, Buffer, BufferEvent, CachedLspAdapter, Capability, CodeLabel, DiagnosticEntry, Documentation, File as _, Language, - LanguageRegistry, LanguageServerName, PointUtf16, ToOffset, ToPointUtf16, Transaction, - Unclipped, + LanguageName, LanguageRegistry, LanguageServerName, PointUtf16, ToOffset, ToPointUtf16, + Toolchain, ToolchainList, Transaction, Unclipped, }; use lsp::{ CompletionContext, CompletionItemKind, DocumentHighlightKind, LanguageServer, LanguageServerId, @@ -101,7 +102,7 @@ pub use lsp_store::{ LanguageServerStatus, LanguageServerToQuery, LspStore, LspStoreEvent, SERVER_PROGRESS_THROTTLE_TIMEOUT, }; - +pub use toolchain_store::ToolchainStore; const MAX_PROJECT_SEARCH_HISTORY_SIZE: usize = 500; const MAX_SEARCH_RESULT_FILES: usize = 5_000; const MAX_SEARCH_RESULT_RANGES: usize = 10_000; @@ -158,6 +159,7 @@ pub struct Project { snippets: Model, environment: Model, settings_observer: Model, + toolchain_store: Option>, } #[derive(Default)] @@ -579,6 +581,7 @@ impl Project { LspStore::init(&client); SettingsObserver::init(&client); TaskStore::init(Some(&client)); + ToolchainStore::init(&client); } pub fn local( @@ -635,12 +638,15 @@ impl Project { }); cx.subscribe(&settings_observer, Self::on_settings_observer_event) .detach(); - + let toolchain_store = cx.new_model(|cx| { + ToolchainStore::local(languages.clone(), worktree_store.clone(), cx) + }); let lsp_store = cx.new_model(|cx| { LspStore::new_local( buffer_store.clone(), worktree_store.clone(), prettier_store.clone(), + toolchain_store.clone(), environment.clone(), languages.clone(), client.http_client(), @@ -681,6 +687,8 @@ impl Project { search_included_history: Self::new_search_history(), search_excluded_history: Self::new_search_history(), + + toolchain_store: Some(toolchain_store), } }) } @@ -737,10 +745,14 @@ impl Project { .detach(); let environment = ProjectEnvironment::new(&worktree_store, None, cx); + let toolchain_store = Some(cx.new_model(|cx| { + ToolchainStore::remote(SSH_PROJECT_ID, ssh.read(cx).proto_client(), cx) + })); let lsp_store = cx.new_model(|cx| { LspStore::new_remote( buffer_store.clone(), worktree_store.clone(), + toolchain_store.clone(), languages.clone(), ssh_proto.clone(), SSH_PROJECT_ID, @@ -798,6 +810,8 @@ impl Project { search_included_history: Self::new_search_history(), search_excluded_history: Self::new_search_history(), + + toolchain_store, }; let ssh = ssh.read(cx); @@ -818,6 +832,7 @@ impl Project { LspStore::init(&ssh_proto); SettingsObserver::init(&ssh_proto); TaskStore::init(Some(&ssh_proto)); + ToolchainStore::init(&ssh_proto); this }) @@ -905,6 +920,7 @@ impl Project { let mut lsp_store = LspStore::new_remote( buffer_store.clone(), worktree_store.clone(), + None, languages.clone(), client.clone().into(), remote_id, @@ -993,6 +1009,7 @@ impl Project { search_excluded_history: Self::new_search_history(), environment: ProjectEnvironment::new(&worktree_store, None, cx), remotely_created_models: Arc::new(Mutex::new(RemotelyCreatedModels::default())), + toolchain_store: None, }; this.set_role(role, cx); for worktree in worktrees { @@ -2346,6 +2363,46 @@ impl Project { .map_err(|e| anyhow!(e)) } + pub fn available_toolchains( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + if let Some(toolchain_store) = self.toolchain_store.as_ref() { + toolchain_store + .read(cx) + .list_toolchains(worktree_id, language_name, cx) + } else { + Task::ready(None) + } + } + pub fn activate_toolchain( + &self, + worktree_id: WorktreeId, + toolchain: Toolchain, + cx: &mut AppContext, + ) -> Task> { + let Some(toolchain_store) = self.toolchain_store.clone() else { + return Task::ready(None); + }; + toolchain_store.update(cx, |this, cx| { + this.activate_toolchain(worktree_id, toolchain, cx) + }) + } + pub fn active_toolchain( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + let Some(toolchain_store) = self.toolchain_store.clone() else { + return Task::ready(None); + }; + toolchain_store + .read(cx) + .active_toolchain(worktree_id, language_name, cx) + } pub fn language_server_statuses<'a>( &'a self, cx: &'a AppContext, diff --git a/crates/project/src/toolchain_store.rs b/crates/project/src/toolchain_store.rs new file mode 100644 index 0000000000000000000000000000000000000000..a3f27d731b6ae412d7a9547f72e8557225c3af2f --- /dev/null +++ b/crates/project/src/toolchain_store.rs @@ -0,0 +1,416 @@ +use std::sync::Arc; + +use anyhow::{bail, Result}; + +use async_trait::async_trait; +use collections::BTreeMap; +use gpui::{ + AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Subscription, Task, + WeakModel, +}; +use language::{LanguageName, LanguageRegistry, LanguageToolchainStore, Toolchain, ToolchainList}; +use rpc::{proto, AnyProtoClient, TypedEnvelope}; +use settings::WorktreeId; +use util::ResultExt as _; + +use crate::worktree_store::WorktreeStore; + +pub struct ToolchainStore(ToolchainStoreInner); +enum ToolchainStoreInner { + Local(Model, #[allow(dead_code)] Subscription), + Remote(Model), +} + +impl EventEmitter for ToolchainStore {} +impl ToolchainStore { + pub fn init(client: &AnyProtoClient) { + client.add_model_request_handler(Self::handle_activate_toolchain); + client.add_model_request_handler(Self::handle_list_toolchains); + client.add_model_request_handler(Self::handle_active_toolchain); + } + + pub fn local( + languages: Arc, + worktree_store: Model, + cx: &mut ModelContext, + ) -> Self { + let model = cx.new_model(|_| LocalToolchainStore { + languages, + worktree_store, + active_toolchains: Default::default(), + }); + let subscription = cx.subscribe(&model, |_, _, e: &ToolchainStoreEvent, cx| { + cx.emit(e.clone()) + }); + Self(ToolchainStoreInner::Local(model, subscription)) + } + pub(super) fn remote(project_id: u64, client: AnyProtoClient, cx: &mut AppContext) -> Self { + Self(ToolchainStoreInner::Remote( + cx.new_model(|_| RemoteToolchainStore { client, project_id }), + )) + } + pub(crate) fn activate_toolchain( + &self, + worktree_id: WorktreeId, + toolchain: Toolchain, + cx: &mut AppContext, + ) -> Task> { + match &self.0 { + ToolchainStoreInner::Local(local, _) => local.update(cx, |this, cx| { + this.activate_toolchain(worktree_id, toolchain, cx) + }), + ToolchainStoreInner::Remote(remote) => { + remote + .read(cx) + .activate_toolchain(worktree_id, toolchain, cx) + } + } + } + pub(crate) fn list_toolchains( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + match &self.0 { + ToolchainStoreInner::Local(local, _) => { + local + .read(cx) + .list_toolchains(worktree_id, language_name, cx) + } + ToolchainStoreInner::Remote(remote) => { + remote + .read(cx) + .list_toolchains(worktree_id, language_name, cx) + } + } + } + pub(crate) fn active_toolchain( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + match &self.0 { + ToolchainStoreInner::Local(local, _) => { + local + .read(cx) + .active_toolchain(worktree_id, language_name, cx) + } + ToolchainStoreInner::Remote(remote) => { + remote + .read(cx) + .active_toolchain(worktree_id, language_name, cx) + } + } + } + async fn handle_activate_toolchain( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + this.update(&mut cx, |this, cx| { + let language_name = LanguageName::from_proto(envelope.payload.language_name); + let Some(toolchain) = envelope.payload.toolchain else { + bail!("Missing `toolchain` in payload"); + }; + let toolchain = Toolchain { + name: toolchain.name.into(), + path: toolchain.path.into(), + language_name, + }; + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + Ok(this.activate_toolchain(worktree_id, toolchain, cx)) + })?? + .await; + Ok(proto::Ack {}) + } + async fn handle_active_toolchain( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let toolchain = this + .update(&mut cx, |this, cx| { + let language_name = LanguageName::from_proto(envelope.payload.language_name); + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + this.active_toolchain(worktree_id, language_name, cx) + })? + .await; + + Ok(proto::ActiveToolchainResponse { + toolchain: toolchain.map(|toolchain| proto::Toolchain { + name: toolchain.name.into(), + path: toolchain.path.into(), + }), + }) + } + + async fn handle_list_toolchains( + this: Model, + envelope: TypedEnvelope, + mut cx: AsyncAppContext, + ) -> Result { + let toolchains = this + .update(&mut cx, |this, cx| { + let language_name = LanguageName::from_proto(envelope.payload.language_name); + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + this.list_toolchains(worktree_id, language_name, cx) + })? + .await; + let has_values = toolchains.is_some(); + let groups = if let Some(toolchains) = &toolchains { + toolchains + .groups + .iter() + .filter_map(|group| { + Some(proto::ToolchainGroup { + start_index: u64::try_from(group.0).ok()?, + name: String::from(group.1.as_ref()), + }) + }) + .collect() + } else { + vec![] + }; + let toolchains = if let Some(toolchains) = toolchains { + toolchains + .toolchains + .into_iter() + .map(|toolchain| proto::Toolchain { + name: toolchain.name.to_string(), + path: toolchain.path.to_string(), + }) + .collect::>() + } else { + vec![] + }; + + Ok(proto::ListToolchainsResponse { + has_values, + toolchains, + groups, + }) + } + pub(crate) fn as_language_toolchain_store(&self) -> Arc { + match &self.0 { + ToolchainStoreInner::Local(local, _) => Arc::new(LocalStore(local.downgrade())), + ToolchainStoreInner::Remote(remote) => Arc::new(RemoteStore(remote.downgrade())), + } + } +} + +struct LocalToolchainStore { + languages: Arc, + worktree_store: Model, + active_toolchains: BTreeMap<(WorktreeId, LanguageName), Toolchain>, +} + +#[async_trait(?Send)] +impl language::LanguageToolchainStore for LocalStore { + async fn active_toolchain( + self: Arc, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &mut AsyncAppContext, + ) -> Option { + self.0 + .update(cx, |this, cx| { + this.active_toolchain(worktree_id, language_name, cx) + }) + .ok()? + .await + } +} + +#[async_trait(?Send)] +impl language::LanguageToolchainStore for RemoteStore { + async fn active_toolchain( + self: Arc, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &mut AsyncAppContext, + ) -> Option { + self.0 + .update(cx, |this, cx| { + this.active_toolchain(worktree_id, language_name, cx) + }) + .ok()? + .await + } +} + +pub(crate) struct EmptyToolchainStore; +#[async_trait(?Send)] +impl language::LanguageToolchainStore for EmptyToolchainStore { + async fn active_toolchain( + self: Arc, + _: WorktreeId, + _: LanguageName, + _: &mut AsyncAppContext, + ) -> Option { + None + } +} +struct LocalStore(WeakModel); +struct RemoteStore(WeakModel); + +#[derive(Clone)] +pub(crate) enum ToolchainStoreEvent { + ToolchainActivated, +} + +impl EventEmitter for LocalToolchainStore {} + +impl LocalToolchainStore { + pub(crate) fn activate_toolchain( + &self, + worktree_id: WorktreeId, + toolchain: Toolchain, + cx: &mut ModelContext, + ) -> Task> { + cx.spawn(move |this, mut cx| async move { + this.update(&mut cx, |this, cx| { + this.active_toolchains.insert( + (worktree_id, toolchain.language_name.clone()), + toolchain.clone(), + ); + cx.emit(ToolchainStoreEvent::ToolchainActivated); + }) + .ok(); + Some(()) + }) + } + pub(crate) fn list_toolchains( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + let registry = self.languages.clone(); + let Some(root) = self + .worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) + .map(|worktree| worktree.read(cx).abs_path()) + else { + return Task::ready(None); + }; + cx.spawn(|_| async move { + let language = registry.language_for_name(&language_name.0).await.ok()?; + let toolchains = language.toolchain_lister()?.list(root.to_path_buf()).await; + Some(toolchains) + }) + } + pub(crate) fn active_toolchain( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + _: &AppContext, + ) -> Task> { + Task::ready( + self.active_toolchains + .get(&(worktree_id, language_name)) + .cloned(), + ) + } +} +struct RemoteToolchainStore { + client: AnyProtoClient, + project_id: u64, +} + +impl RemoteToolchainStore { + pub(crate) fn activate_toolchain( + &self, + worktree_id: WorktreeId, + toolchain: Toolchain, + cx: &AppContext, + ) -> Task> { + let project_id = self.project_id; + let client = self.client.clone(); + cx.spawn(move |_| async move { + let _ = client + .request(proto::ActivateToolchain { + project_id, + worktree_id: worktree_id.to_proto(), + language_name: toolchain.language_name.into(), + toolchain: Some(proto::Toolchain { + name: toolchain.name.into(), + path: toolchain.path.into(), + }), + }) + .await + .log_err()?; + Some(()) + }) + } + pub(crate) fn list_toolchains( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + let project_id = self.project_id; + let client = self.client.clone(); + cx.spawn(move |_| async move { + let response = client + .request(proto::ListToolchains { + project_id, + worktree_id: worktree_id.to_proto(), + language_name: language_name.clone().into(), + }) + .await + .log_err()?; + if !response.has_values { + return None; + } + let toolchains = response + .toolchains + .into_iter() + .map(|toolchain| Toolchain { + language_name: language_name.clone(), + name: toolchain.name.into(), + path: toolchain.path.into(), + }) + .collect(); + let groups = response + .groups + .into_iter() + .filter_map(|group| { + Some((usize::try_from(group.start_index).ok()?, group.name.into())) + }) + .collect(); + Some(ToolchainList { + toolchains, + default: None, + groups, + }) + }) + } + pub(crate) fn active_toolchain( + &self, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: &AppContext, + ) -> Task> { + let project_id = self.project_id; + let client = self.client.clone(); + cx.spawn(move |_| async move { + let response = client + .request(proto::ActiveToolchain { + project_id, + worktree_id: worktree_id.to_proto(), + language_name: language_name.clone().into(), + }) + .await + .log_err()?; + + response.toolchain.map(|toolchain| Toolchain { + language_name: language_name.clone(), + name: toolchain.name.into(), + path: toolchain.path.into(), + }) + }) + } +} diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 53aaa6ef6d73abc1b6368890c5eb133733977e16..95a54c3d5c821fe25796c062583de21fa03bf6ce 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -280,11 +280,15 @@ message Envelope { LanguageServerPromptRequest language_server_prompt_request = 268; LanguageServerPromptResponse language_server_prompt_response = 269; - GitBranches git_branches = 270; GitBranchesResponse git_branches_response = 271; - UpdateGitBranch update_git_branch = 272; // current max + UpdateGitBranch update_git_branch = 272; + ListToolchains list_toolchains = 273; + ListToolchainsResponse list_toolchains_response = 274; + ActivateToolchain activate_toolchain = 275; + ActiveToolchain active_toolchain = 276; + ActiveToolchainResponse active_toolchain_response = 277; // current max } @@ -2393,7 +2397,6 @@ message GetPermalinkToLine { message GetPermalinkToLineResponse { string permalink = 1; } - message FlushBufferedMessages {} message FlushBufferedMessagesResponse {} @@ -2419,6 +2422,45 @@ message LanguageServerPromptResponse { optional uint64 action_response = 1; } +message ListToolchains { + uint64 project_id = 1; + uint64 worktree_id = 2; + string language_name = 3; +} + +message Toolchain { + string name = 1; + string path = 2; +} + +message ToolchainGroup { + uint64 start_index = 1; + string name = 2; +} + +message ListToolchainsResponse { + repeated Toolchain toolchains = 1; + bool has_values = 2; + repeated ToolchainGroup groups = 3; +} + +message ActivateToolchain { + uint64 project_id = 1; + uint64 worktree_id = 2; + Toolchain toolchain = 3; + string language_name = 4; +} + +message ActiveToolchain { + uint64 project_id = 1; + uint64 worktree_id = 2; + string language_name = 3; +} + +message ActiveToolchainResponse { + optional Toolchain toolchain = 1; +} + message Branch { bool is_head = 1; string name = 2; @@ -2438,4 +2480,5 @@ message UpdateGitBranch { uint64 project_id = 1; string branch_name = 2; ProjectPath repository = 3; + } diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index a7140cc7ed5d12245b89469689b842c711574f75..7fcebf051375d4ff79e28b2a4c982bfcc53a7d90 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -358,7 +358,12 @@ messages!( (LanguageServerPromptResponse, Foreground), (GitBranches, Background), (GitBranchesResponse, Background), - (UpdateGitBranch, Background) + (UpdateGitBranch, Background), + (ListToolchains, Foreground), + (ListToolchainsResponse, Foreground), + (ActivateToolchain, Foreground), + (ActiveToolchain, Foreground), + (ActiveToolchainResponse, Foreground) ); request_messages!( @@ -475,7 +480,10 @@ request_messages!( (FlushBufferedMessages, Ack), (LanguageServerPromptRequest, LanguageServerPromptResponse), (GitBranches, GitBranchesResponse), - (UpdateGitBranch, Ack) + (UpdateGitBranch, Ack), + (ListToolchains, ListToolchainsResponse), + (ActivateToolchain, Ack), + (ActiveToolchain, ActiveToolchainResponse) ); entity_messages!( @@ -555,7 +563,10 @@ entity_messages!( GetPermalinkToLine, LanguageServerPromptRequest, GitBranches, - UpdateGitBranch + UpdateGitBranch, + ListToolchains, + ActivateToolchain, + ActiveToolchain ); entity_messages!( diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 81be01b6a640e806985f6afe8252f43d226f9107..ce34af247f02695b9140fcf64050a299d36e2e03 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -10,7 +10,7 @@ use project::{ search::SearchQuery, task_store::TaskStore, worktree_store::WorktreeStore, - LspStore, LspStoreEvent, PrettierStore, ProjectPath, WorktreeId, + LspStore, LspStoreEvent, PrettierStore, ProjectPath, ToolchainStore, WorktreeId, }; use remote::ssh_session::ChannelClient; use rpc::{ @@ -108,11 +108,14 @@ impl HeadlessProject { observer.shared(SSH_PROJECT_ID, session.clone().into(), cx); observer }); + let toolchain_store = + cx.new_model(|cx| ToolchainStore::local(languages.clone(), worktree_store.clone(), cx)); let lsp_store = cx.new_model(|cx| { let mut lsp_store = LspStore::new_local( buffer_store.clone(), worktree_store.clone(), prettier_store.clone(), + toolchain_store.clone(), environment, languages.clone(), http_client, @@ -143,6 +146,7 @@ impl HeadlessProject { session.subscribe_to_entity(SSH_PROJECT_ID, &cx.handle()); session.subscribe_to_entity(SSH_PROJECT_ID, &lsp_store); session.subscribe_to_entity(SSH_PROJECT_ID, &task_store); + session.subscribe_to_entity(SSH_PROJECT_ID, &toolchain_store); session.subscribe_to_entity(SSH_PROJECT_ID, &settings_observer); client.add_request_handler(cx.weak_model(), Self::handle_list_remote_directory); @@ -166,6 +170,7 @@ impl HeadlessProject { SettingsObserver::init(&client); LspStore::init(&client); TaskStore::init(Some(&client)); + ToolchainStore::init(&client); HeadlessProject { session: client, diff --git a/crates/toolchain_selector/Cargo.toml b/crates/toolchain_selector/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..ed80bd0dc999e42441602710af0f050e6399cc17 --- /dev/null +++ b/crates/toolchain_selector/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "toolchain_selector" +version = "0.1.0" +edition = "2021" +publish = false +license = "GPL-3.0-or-later" + +[dependencies] +editor.workspace = true +fuzzy.workspace = true +gpui.workspace = true +language.workspace = true +picker.workspace = true +project.workspace = true +ui.workspace = true +util.workspace = true +workspace.workspace = true + +[lints] +workspace = true + +[lib] +path = "src/toolchain_selector.rs" +doctest = false diff --git a/crates/toolchain_selector/LICENSE-GPL b/crates/toolchain_selector/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/toolchain_selector/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/toolchain_selector/src/active_toolchain.rs b/crates/toolchain_selector/src/active_toolchain.rs new file mode 100644 index 0000000000000000000000000000000000000000..74a6bd7107834f4a92fc430747cfba9a1a253cf3 --- /dev/null +++ b/crates/toolchain_selector/src/active_toolchain.rs @@ -0,0 +1,173 @@ +use editor::Editor; +use gpui::{ + div, AsyncWindowContext, EventEmitter, IntoElement, ParentElement, Render, Subscription, Task, + View, ViewContext, WeakModel, WeakView, +}; +use language::{Buffer, BufferEvent, LanguageName, Toolchain}; +use project::WorktreeId; +use ui::{Button, ButtonCommon, Clickable, FluentBuilder, LabelSize, Tooltip}; +use workspace::{item::ItemHandle, StatusItemView, Workspace}; + +use crate::ToolchainSelector; + +pub struct ActiveToolchain { + active_toolchain: Option, + workspace: WeakView, + active_buffer: Option<(WorktreeId, WeakModel, Subscription)>, + _observe_language_changes: Subscription, + _update_toolchain_task: Task>, +} + +struct LanguageChanged; + +impl EventEmitter for ActiveToolchain {} + +impl ActiveToolchain { + pub fn new(workspace: &Workspace, cx: &mut ViewContext) -> Self { + let view = cx.view().clone(); + Self { + active_toolchain: None, + active_buffer: None, + workspace: workspace.weak_handle(), + _observe_language_changes: cx.subscribe(&view, |this, _, _: &LanguageChanged, cx| { + this._update_toolchain_task = Self::spawn_tracker_task(cx); + }), + _update_toolchain_task: Self::spawn_tracker_task(cx), + } + } + fn spawn_tracker_task(cx: &mut ViewContext) -> Task> { + cx.spawn(|this, mut cx| async move { + let active_file = this + .update(&mut cx, |this, _| { + this.active_buffer + .as_ref() + .map(|(_, buffer, _)| buffer.clone()) + }) + .ok() + .flatten()?; + let workspace = this + .update(&mut cx, |this, _| this.workspace.clone()) + .ok()?; + + let language_name = active_file + .update(&mut cx, |this, _| Some(this.language()?.name())) + .ok() + .flatten()?; + + let worktree_id = active_file + .update(&mut cx, |this, cx| Some(this.file()?.worktree_id(cx))) + .ok() + .flatten()?; + let toolchain = + Self::active_toolchain(workspace, worktree_id, language_name, cx.clone()).await?; + let _ = this.update(&mut cx, |this, cx| { + this.active_toolchain = Some(toolchain); + + cx.notify(); + }); + Some(()) + }) + } + + fn update_lister(&mut self, editor: View, cx: &mut ViewContext) { + let editor = editor.read(cx); + if let Some((_, buffer, _)) = editor.active_excerpt(cx) { + if let Some(worktree_id) = buffer.read(cx).file().map(|file| file.worktree_id(cx)) { + let subscription = cx.subscribe(&buffer, |_, _, event: &BufferEvent, cx| { + if let BufferEvent::LanguageChanged = event { + cx.emit(LanguageChanged) + } + }); + self.active_buffer = Some((worktree_id, buffer.downgrade(), subscription)); + cx.emit(LanguageChanged); + } + } + + cx.notify(); + } + + fn active_toolchain( + workspace: WeakView, + worktree_id: WorktreeId, + language_name: LanguageName, + cx: AsyncWindowContext, + ) -> Task> { + cx.spawn(move |mut cx| async move { + let workspace_id = workspace + .update(&mut cx, |this, _| this.database_id()) + .ok() + .flatten()?; + let selected_toolchain = workspace + .update(&mut cx, |this, cx| { + this.project() + .read(cx) + .active_toolchain(worktree_id, language_name.clone(), cx) + }) + .ok()? + .await; + if let Some(toolchain) = selected_toolchain { + Some(toolchain) + } else { + let project = workspace + .update(&mut cx, |this, _| this.project().clone()) + .ok()?; + let toolchains = cx + .update(|cx| { + project + .read(cx) + .available_toolchains(worktree_id, language_name, cx) + }) + .ok()? + .await?; + if let Some(toolchain) = toolchains.toolchains.first() { + // Since we don't have a selected toolchain, pick one for user here. + workspace::WORKSPACE_DB + .set_toolchain(workspace_id, worktree_id, toolchain.clone()) + .await + .ok()?; + project + .update(&mut cx, |this, cx| { + this.activate_toolchain(worktree_id, toolchain.clone(), cx) + }) + .ok()? + .await; + } + + toolchains.toolchains.first().cloned() + } + }) + } +} + +impl Render for ActiveToolchain { + fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { + div().when_some(self.active_toolchain.as_ref(), |el, active_toolchain| { + el.child( + Button::new("change-toolchain", active_toolchain.name.clone()) + .label_size(LabelSize::Small) + .on_click(cx.listener(|this, _, cx| { + if let Some(workspace) = this.workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + ToolchainSelector::toggle(workspace, cx) + }); + } + })) + .tooltip(|cx| Tooltip::text("Select Toolchain", cx)), + ) + }) + } +} + +impl StatusItemView for ActiveToolchain { + fn set_active_pane_item( + &mut self, + active_pane_item: Option<&dyn ItemHandle>, + cx: &mut ViewContext, + ) { + if let Some(editor) = active_pane_item.and_then(|item| item.act_as::(cx)) { + self.active_toolchain.take(); + self.update_lister(editor, cx); + } + cx.notify(); + } +} diff --git a/crates/toolchain_selector/src/toolchain_selector.rs b/crates/toolchain_selector/src/toolchain_selector.rs new file mode 100644 index 0000000000000000000000000000000000000000..8a3368f81675d4cc5a4a9c01a83b1107e43a9403 --- /dev/null +++ b/crates/toolchain_selector/src/toolchain_selector.rs @@ -0,0 +1,343 @@ +mod active_toolchain; + +pub use active_toolchain::ActiveToolchain; +use editor::Editor; +use fuzzy::{match_strings, StringMatch, StringMatchCandidate}; +use gpui::{ + actions, AppContext, DismissEvent, EventEmitter, FocusHandle, FocusableView, Model, + ParentElement, Render, Styled, Task, View, ViewContext, VisualContext, WeakView, +}; +use language::{LanguageName, Toolchain, ToolchainList}; +use picker::{Picker, PickerDelegate}; +use project::{Project, WorktreeId}; +use std::{path::Path, sync::Arc}; +use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing}; +use util::ResultExt; +use workspace::{ModalView, Workspace}; + +actions!(toolchain, [Select]); + +pub fn init(cx: &mut AppContext) { + cx.observe_new_views(ToolchainSelector::register).detach(); +} + +pub struct ToolchainSelector { + picker: View>, +} + +impl ToolchainSelector { + fn register(workspace: &mut Workspace, _: &mut ViewContext) { + workspace.register_action(move |workspace, _: &Select, cx| { + Self::toggle(workspace, cx); + }); + } + + fn toggle(workspace: &mut Workspace, cx: &mut ViewContext) -> Option<()> { + let (_, buffer, _) = workspace + .active_item(cx)? + .act_as::(cx)? + .read(cx) + .active_excerpt(cx)?; + let project = workspace.project().clone(); + + let language_name = buffer.read(cx).language()?.name(); + let worktree_id = buffer.read(cx).file()?.worktree_id(cx); + let worktree_root_path = project + .read(cx) + .worktree_for_id(worktree_id, cx)? + .read(cx) + .abs_path(); + let workspace_id = workspace.database_id()?; + let weak = workspace.weak_handle(); + cx.spawn(move |workspace, mut cx| async move { + let active_toolchain = workspace::WORKSPACE_DB + .toolchain(workspace_id, worktree_id, language_name.clone()) + .await + .ok() + .flatten(); + workspace + .update(&mut cx, |this, cx| { + this.toggle_modal(cx, move |cx| { + ToolchainSelector::new( + weak, + project, + active_toolchain, + worktree_id, + worktree_root_path, + language_name, + cx, + ) + }); + }) + .ok(); + }) + .detach(); + + Some(()) + } + + fn new( + workspace: WeakView, + project: Model, + active_toolchain: Option, + worktree_id: WorktreeId, + worktree_root: Arc, + language_name: LanguageName, + cx: &mut ViewContext, + ) -> Self { + let view = cx.view().downgrade(); + let picker = cx.new_view(|cx| { + let delegate = ToolchainSelectorDelegate::new( + active_toolchain, + view, + workspace, + worktree_id, + worktree_root, + project, + language_name, + cx, + ); + Picker::uniform_list(delegate, cx) + }); + Self { picker } + } +} + +impl Render for ToolchainSelector { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + v_flex().w(rems(34.)).child(self.picker.clone()) + } +} + +impl FocusableView for ToolchainSelector { + fn focus_handle(&self, cx: &AppContext) -> FocusHandle { + self.picker.focus_handle(cx) + } +} + +impl EventEmitter for ToolchainSelector {} +impl ModalView for ToolchainSelector {} + +pub struct ToolchainSelectorDelegate { + toolchain_selector: WeakView, + candidates: ToolchainList, + matches: Vec, + selected_index: usize, + workspace: WeakView, + worktree_id: WorktreeId, + worktree_abs_path_root: Arc, + _fetch_candidates_task: Task>, +} + +impl ToolchainSelectorDelegate { + #[allow(clippy::too_many_arguments)] + fn new( + active_toolchain: Option, + language_selector: WeakView, + workspace: WeakView, + worktree_id: WorktreeId, + worktree_abs_path_root: Arc, + project: Model, + language_name: LanguageName, + cx: &mut ViewContext>, + ) -> Self { + let _fetch_candidates_task = cx.spawn({ + let project = project.clone(); + move |this, mut cx| async move { + let available_toolchains = project + .update(&mut cx, |this, cx| { + this.available_toolchains(worktree_id, language_name, cx) + }) + .ok()? + .await?; + + let _ = this.update(&mut cx, move |this, cx| { + this.delegate.candidates = available_toolchains; + if let Some(active_toolchain) = active_toolchain { + if let Some(position) = this + .delegate + .candidates + .toolchains + .iter() + .position(|toolchain| *toolchain == active_toolchain) + { + this.delegate.set_selected_index(position, cx); + } + } + this.update_matches(this.query(cx), cx); + }); + + Some(()) + } + }); + + Self { + toolchain_selector: language_selector, + candidates: Default::default(), + matches: vec![], + selected_index: 0, + workspace, + worktree_id, + worktree_abs_path_root, + _fetch_candidates_task, + } + } + fn relativize_path(path: SharedString, worktree_root: &Path) -> SharedString { + Path::new(&path.as_ref()) + .strip_prefix(&worktree_root) + .ok() + .map(|suffix| Path::new(".").join(suffix)) + .and_then(|path| path.to_str().map(String::from).map(SharedString::from)) + .unwrap_or(path) + } +} + +impl PickerDelegate for ToolchainSelectorDelegate { + type ListItem = ListItem; + + fn placeholder_text(&self, _cx: &mut WindowContext) -> Arc { + "Select a toolchain...".into() + } + + fn match_count(&self) -> usize { + self.matches.len() + } + + fn confirm(&mut self, _: bool, cx: &mut ViewContext>) { + if let Some(string_match) = self.matches.get(self.selected_index) { + let toolchain = self.candidates.toolchains[string_match.candidate_id].clone(); + if let Some(workspace_id) = self + .workspace + .update(cx, |this, _| this.database_id()) + .ok() + .flatten() + { + let workspace = self.workspace.clone(); + let worktree_id = self.worktree_id; + cx.spawn(|_, mut cx| async move { + workspace::WORKSPACE_DB + .set_toolchain(workspace_id, worktree_id, toolchain.clone()) + .await + .log_err(); + workspace + .update(&mut cx, |this, cx| { + this.project().update(cx, |this, cx| { + this.activate_toolchain(worktree_id, toolchain, cx) + }) + }) + .ok()? + .await; + Some(()) + }) + .detach(); + } + } + self.dismissed(cx); + } + + fn dismissed(&mut self, cx: &mut ViewContext>) { + self.toolchain_selector + .update(cx, |_, cx| cx.emit(DismissEvent)) + .log_err(); + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index(&mut self, ix: usize, _: &mut ViewContext>) { + self.selected_index = ix; + } + + fn update_matches( + &mut self, + query: String, + cx: &mut ViewContext>, + ) -> gpui::Task<()> { + let background = cx.background_executor().clone(); + let candidates = self.candidates.clone(); + let worktree_root_path = self.worktree_abs_path_root.clone(); + cx.spawn(|this, mut cx| async move { + let matches = if query.is_empty() { + candidates + .toolchains + .into_iter() + .enumerate() + .map(|(index, candidate)| { + let path = Self::relativize_path(candidate.path, &worktree_root_path); + let string = format!("{}{}", candidate.name, path); + StringMatch { + candidate_id: index, + string, + positions: Vec::new(), + score: 0.0, + } + }) + .collect() + } else { + let candidates = candidates + .toolchains + .into_iter() + .enumerate() + .map(|(candidate_id, toolchain)| { + let path = Self::relativize_path(toolchain.path, &worktree_root_path); + let string = format!("{}{}", toolchain.name, path); + StringMatchCandidate::new(candidate_id, string) + }) + .collect::>(); + match_strings( + &candidates, + &query, + false, + 100, + &Default::default(), + background, + ) + .await + }; + + this.update(&mut cx, |this, cx| { + let delegate = &mut this.delegate; + delegate.matches = matches; + delegate.selected_index = delegate + .selected_index + .min(delegate.matches.len().saturating_sub(1)); + cx.notify(); + }) + .log_err(); + }) + } + + fn render_match( + &self, + ix: usize, + selected: bool, + _: &mut ViewContext>, + ) -> Option { + let mat = &self.matches[ix]; + let toolchain = &self.candidates.toolchains[mat.candidate_id]; + + let label = toolchain.name.clone(); + let path = Self::relativize_path(toolchain.path.clone(), &self.worktree_abs_path_root); + let (name_highlights, mut path_highlights) = mat + .positions + .iter() + .cloned() + .partition::, _>(|index| *index < label.len()); + path_highlights.iter_mut().for_each(|index| { + *index -= label.len(); + }); + Some( + ListItem::new(ix) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .selected(selected) + .child(HighlightedLabel::new(label, name_highlights)) + .child( + HighlightedLabel::new(path, path_highlights) + .size(LabelSize::Small) + .color(Color::Muted), + ), + ) + } +} diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index 7c4fb93ba1a6702c1a5a6279934f1ccf11a1acd6..925d56a921819fdd56dcdbc5e6f0e64483ca6240 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -7,6 +7,8 @@ use client::DevServerProjectId; use db::{define_connection, query, sqlez::connection::Connection, sqlez_macros::sql}; use gpui::{point, size, Axis, Bounds, WindowBounds, WindowId}; +use language::{LanguageName, Toolchain}; +use project::WorktreeId; use remote::ssh_session::SshProjectId; use sqlez::{ bindable::{Bind, Column, StaticColumnCount}, @@ -204,7 +206,8 @@ define_connection! { // preview: bool // Indicates if this item is a preview item // ) pub static ref DB: WorkspaceDb<()> = - &[sql!( + &[ + sql!( CREATE TABLE workspaces( workspace_id INTEGER PRIMARY KEY, workspace_location BLOB UNIQUE, @@ -367,6 +370,16 @@ define_connection! { sql!( ALTER TABLE ssh_projects RENAME COLUMN path TO paths; ), + sql!( + CREATE TABLE toolchains ( + workspace_id INTEGER, + worktree_id INTEGER, + language_name TEXT NOT NULL, + name TEXT NOT NULL, + path TEXT NOT NULL, + PRIMARY KEY (workspace_id, worktree_id, language_name) + ); + ), ]; } @@ -528,6 +541,7 @@ impl WorkspaceDb { match workspace.location { SerializedWorkspaceLocation::Local(local_paths, local_paths_order) => { conn.exec_bound(sql!( + DELETE FROM toolchains WHERE workspace_id = ?1; DELETE FROM workspaces WHERE local_paths = ? AND workspace_id != ? ))?((&local_paths, workspace.id)) .context("clearing out old locations")?; @@ -576,6 +590,7 @@ impl WorkspaceDb { } SerializedWorkspaceLocation::Ssh(ssh_project) => { conn.exec_bound(sql!( + DELETE FROM toolchains WHERE workspace_id = ?1; DELETE FROM workspaces WHERE ssh_project_id = ? AND workspace_id != ? ))?((ssh_project.id.0, workspace.id)) .context("clearing out old locations")?; @@ -737,6 +752,7 @@ impl WorkspaceDb { query! { pub async fn delete_workspace_by_id(id: WorkspaceId) -> Result<()> { + DELETE FROM toolchains WHERE workspace_id = ?1; DELETE FROM workspaces WHERE workspace_id IS ? } @@ -751,6 +767,7 @@ impl WorkspaceDb { DELETE FROM dev_server_projects WHERE id = ? ))?(id.0)?; conn.exec_bound(sql!( + DELETE FROM toolchains WHERE workspace_id = ?1; DELETE FROM workspaces WHERE dev_server_project_id IS ? ))?(id.0) @@ -1053,6 +1070,83 @@ impl WorkspaceDb { WHERE workspace_id = ?1 } } + + pub async fn toolchain( + &self, + workspace_id: WorkspaceId, + worktree_id: WorktreeId, + language_name: LanguageName, + ) -> Result> { + self.write(move |this| { + let mut select = this + .select_bound(sql!( + SELECT name, path FROM toolchains WHERE workspace_id = ? AND language_name = ? AND worktree_id = ? + )) + .context("Preparing insertion")?; + + let toolchain: Vec<(String, String)> = + select((workspace_id, language_name.0.to_owned(), worktree_id.to_usize()))?; + + Ok(toolchain.into_iter().next().map(|(name, path)| Toolchain { + name: name.into(), + path: path.into(), + language_name, + })) + }) + .await + } + + pub(crate) async fn toolchains( + &self, + workspace_id: WorkspaceId, + ) -> Result> { + self.write(move |this| { + let mut select = this + .select_bound(sql!( + SELECT name, path, worktree_id, language_name FROM toolchains WHERE workspace_id = ? + )) + .context("Preparing insertion")?; + + let toolchain: Vec<(String, String, u64, String)> = + select(workspace_id)?; + + Ok(toolchain.into_iter().map(|(name, path, worktree_id, language_name)| (Toolchain { + name: name.into(), + path: path.into(), + language_name: LanguageName::new(&language_name), + }, WorktreeId::from_proto(worktree_id))).collect()) + }) + .await + } + pub async fn set_toolchain( + &self, + workspace_id: WorkspaceId, + worktree_id: WorktreeId, + toolchain: Toolchain, + ) -> Result<()> { + self.write(move |conn| { + let mut insert = conn + .exec_bound(sql!( + INSERT INTO toolchains(workspace_id, worktree_id, language_name, name, path) VALUES (?, ?, ?, ?, ?) + ON CONFLICT DO + UPDATE SET + name = ?4, + path = ?5 + + )) + .context("Preparing insertion")?; + + insert(( + workspace_id, + worktree_id.to_usize(), + toolchain.language_name.0.as_ref(), + toolchain.name.as_ref(), + toolchain.path.as_ref(), + ))?; + + Ok(()) + }).await + } } #[cfg(test)] diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index b92417b2936c31df71ef5e02b7bc83d3b1a3350c..de2c985f340a338a44d7984f7ffafac62b07e920 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1153,6 +1153,14 @@ impl Workspace { DB.next_id().await.unwrap_or_else(|_| Default::default()) }; + let toolchains = DB.toolchains(workspace_id).await?; + for (toolchain, worktree_id) in toolchains { + project_handle + .update(&mut cx, |this, cx| { + this.activate_toolchain(worktree_id, toolchain, cx) + })? + .await; + } let window = if let Some(window) = requesting_window { cx.update_window(window.into(), |_, cx| { cx.replace_root_view(|cx| { @@ -5522,6 +5530,14 @@ pub fn open_ssh_project( ) })?; + let toolchains = DB.toolchains(workspace_id).await?; + for (toolchain, worktree_id) in toolchains { + project + .update(&mut cx, |this, cx| { + this.activate_toolchain(worktree_id, toolchain, cx) + })? + .await; + } let mut project_paths_to_open = vec![]; let mut project_path_errors = vec![]; diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 58728d504b3ea5019f1d73ce31b83eea4e4f5ba8..e2a3f2be3639af6c59a051f25a1bad7ba9af0897 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -104,6 +104,7 @@ terminal_view.workspace = true theme.workspace = true theme_selector.workspace = true time.workspace = true +toolchain_selector.workspace = true ui.workspace = true reqwest_client.workspace = true url.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 3cb717d24fa8cd1363bd6afb2ad0afbe8468a0b1..89ff72b5a9bcca45370b869c1949362f920f1f59 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -441,6 +441,7 @@ fn main() { terminal_view::init(cx); journal::init(app_state.clone(), cx); language_selector::init(cx); + toolchain_selector::init(cx); theme_selector::init(cx); language_tools::init(cx); call::init(app_state.client.clone(), app_state.user_store.clone(), cx); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 8965a1755a1e9c7dff2c81a5ad485d661e91882e..7b630489cf1bb29ae281e704de294dfefa47a10f 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -208,6 +208,8 @@ pub fn initialize_workspace( activity_indicator::ActivityIndicator::new(workspace, app_state.languages.clone(), cx); let active_buffer_language = cx.new_view(|_| language_selector::ActiveBufferLanguage::new(workspace)); + let active_toolchain_language = + cx.new_view(|cx| toolchain_selector::ActiveToolchain::new(workspace, cx)); let vim_mode_indicator = cx.new_view(vim::ModeIndicator::new); let cursor_position = cx.new_view(|_| go_to_line::cursor_position::CursorPosition::new(workspace)); @@ -216,6 +218,7 @@ pub fn initialize_workspace( status_bar.add_left_item(activity_indicator, cx); status_bar.add_right_item(inline_completion_button, cx); status_bar.add_right_item(active_buffer_language, cx); + status_bar.add_right_item(active_toolchain_language, cx); status_bar.add_right_item(vim_mode_indicator, cx); status_bar.add_right_item(cursor_position, cx); }); diff --git a/script/licenses/zed-licenses.toml b/script/licenses/zed-licenses.toml index 3459fee3e507bffc510d07487ad638cb9b0ea56b..15c98c67020076bdfc17b8135ae9424b787c176c 100644 --- a/script/licenses/zed-licenses.toml +++ b/script/licenses/zed-licenses.toml @@ -36,3 +36,141 @@ license = "BSD-3-Clause" [[fuchsia-cprng.clarify.files]] path = 'LICENSE' checksum = '03b114f53e6587a398931762ee11e2395bfdba252a329940e2c8c9e81813845b' + +[pet.clarify] +license = "MIT" +[[pet.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-conda.clarify] +license = "MIT" +[[pet-conda.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-core.clarify] +license = "MIT" +[[pet-core.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-env-var-path.clarify] +license = "MIT" +[[pet-env-var-path.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-fs.clarify] +license = "MIT" +[[pet-fs.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-global-virtualenvs.clarify] +license = "MIT" +[[pet-global-virtualenvs.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-homebrew.clarify] +license = "MIT" +[[pet-homebrew.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-jsonrpc.clarify] +license = "MIT" +[[pet-jsonrpc.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-linux-global-python.clarify] +license = "MIT" +[[pet-linux-global-python.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-mac-commandlinetools.clarify] +license = "MIT" +[[pet-mac-commandlinetools.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-mac-python-org.clarify] +license = "MIT" +[[pet-mac-python-org.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-mac-xcode.clarify] +license = "MIT" +[[pet-mac-xcode.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-pipenv.clarify] +license = "MIT" +[[pet-pipenv.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-poetry.clarify] +license = "MIT" +[[pet-poetry.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-pyenv.clarify] +license = "MIT" +[[pet-pyenv.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-python-utils.clarify] +license = "MIT" +[[pet-python-utils.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-reporter.clarify] +license = "MIT" +[[pet-reporter.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-telemetry.clarify] +license = "MIT" +[[pet-telemetry.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-venv.clarify] +license = "MIT" +[[pet-venv.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-virtualenv.clarify] +license = "MIT" +[[pet-virtualenv.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-virtualenvwrapper.clarify] +license = "MIT" +[[pet-virtualenvwrapper.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-windows-registry.clarify] +license = "MIT" +[[pet-windows-registry.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' + +[pet-windows-store.clarify] +license = "MIT" +[[pet-windows-store.clarify.git]] +path = 'LICENSE' +checksum = 'c2cfccb812fe482101a8f04597dfc5a9991a6b2748266c47ac91b6a5aae15383' From 8a96ea25c465697ec74ce3447bcd2ce9cb25b4f0 Mon Sep 17 00:00:00 2001 From: David Soria Parra <167242713+dsp-ant@users.noreply.github.com> Date: Mon, 28 Oct 2024 14:37:58 +0000 Subject: [PATCH 65/76] context_servers: Support tools (#19548) This PR depends on #19547 This PR adds support for tools from context servers. Context servers are free to expose tools that Zed can pass to models. When called by the model, Zed forwards the request to context servers. This allows for some interesting techniques. Context servers can easily expose tools such as querying local databases, reading or writing local files, reading resources over authenticated APIs (e.g. kubernetes, asana, etc). This is currently experimental. Things to discuss * I want to still add a confirm dialog asking people if a server is allows to use the tool. Should do this or just use the tool and assume trustworthyness of context servers? * Can we add tool use behind a local setting flag? Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- crates/assistant/src/assistant.rs | 85 ++++++++++++++----- crates/assistant/src/tools.rs | 1 + .../src/tools/context_server_tool.rs | 82 ++++++++++++++++++ crates/context_servers/src/protocol.rs | 33 +++++++ crates/context_servers/src/registry.rs | 32 +++++-- crates/context_servers/src/types.rs | 18 ++++ .../language_model/src/provider/anthropic.rs | 12 ++- 7 files changed, 235 insertions(+), 28 deletions(-) create mode 100644 crates/assistant/src/tools/context_server_tool.rs diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index e1e574744fff61a05da0a7ccb6e1ddff9162cb11..a48f6d6c29424a6de87ec038e8b42ba2726f6f79 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -298,25 +298,64 @@ fn register_context_server_handlers(cx: &mut AppContext) { return; }; - if let Some(prompts) = protocol.list_prompts().await.log_err() { - for prompt in prompts - .into_iter() - .filter(context_server_command::acceptable_prompt) - { - log::info!( - "registering context server command: {:?}", - prompt.name - ); - context_server_registry.register_command( - server.id.clone(), - prompt.name.as_str(), - ); - slash_command_registry.register_command( - context_server_command::ContextServerSlashCommand::new( - &server, prompt, - ), - true, - ); + if protocol.capable(context_servers::protocol::ServerCapability::Prompts) { + if let Some(prompts) = protocol.list_prompts().await.log_err() { + for prompt in prompts + .into_iter() + .filter(context_server_command::acceptable_prompt) + { + log::info!( + "registering context server command: {:?}", + prompt.name + ); + context_server_registry.register_command( + server.id.clone(), + prompt.name.as_str(), + ); + slash_command_registry.register_command( + context_server_command::ContextServerSlashCommand::new( + &server, prompt, + ), + true, + ); + } + } + } + }) + .detach(); + } + }, + ); + + cx.update_model( + &manager, + |manager: &mut context_servers::manager::ContextServerManager, cx| { + let tool_registry = ToolRegistry::global(cx); + let context_server_registry = ContextServerRegistry::global(cx); + if let Some(server) = manager.get_server(server_id) { + cx.spawn(|_, _| async move { + let Some(protocol) = server.client.read().clone() else { + return; + }; + + if protocol.capable(context_servers::protocol::ServerCapability::Tools) { + if let Some(tools) = protocol.list_tools().await.log_err() { + for tool in tools.tools { + log::info!( + "registering context server tool: {:?}", + tool.name + ); + context_server_registry.register_tool( + server.id.clone(), + tool.name.as_str(), + ); + tool_registry.register_tool( + tools::context_server_tool::ContextServerTool::new( + server.id.clone(), + tool + ), + ); + } } } }) @@ -334,6 +373,14 @@ fn register_context_server_handlers(cx: &mut AppContext) { context_server_registry.unregister_command(&server_id, &command_name); } } + + if let Some(tools) = context_server_registry.get_tools(server_id) { + let tool_registry = ToolRegistry::global(cx); + for tool_name in tools { + tool_registry.unregister_tool_by_name(&tool_name); + context_server_registry.unregister_tool(&server_id, &tool_name); + } + } } }, ) diff --git a/crates/assistant/src/tools.rs b/crates/assistant/src/tools.rs index abde04e760e3ee92e8d6e05fb503637734beadcd..83a396c0203cb24fb6053c857a6065ed500c2542 100644 --- a/crates/assistant/src/tools.rs +++ b/crates/assistant/src/tools.rs @@ -1 +1,2 @@ +pub mod context_server_tool; pub mod now_tool; diff --git a/crates/assistant/src/tools/context_server_tool.rs b/crates/assistant/src/tools/context_server_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..93edb32b75b72586347b8794615868dd881d3881 --- /dev/null +++ b/crates/assistant/src/tools/context_server_tool.rs @@ -0,0 +1,82 @@ +use anyhow::{anyhow, bail}; +use assistant_tool::Tool; +use context_servers::manager::ContextServerManager; +use context_servers::types; +use gpui::Task; + +pub struct ContextServerTool { + server_id: String, + tool: types::Tool, +} + +impl ContextServerTool { + pub fn new(server_id: impl Into, tool: types::Tool) -> Self { + Self { + server_id: server_id.into(), + tool, + } + } +} + +impl Tool for ContextServerTool { + fn name(&self) -> String { + self.tool.name.clone() + } + + fn description(&self) -> String { + self.tool.description.clone().unwrap_or_default() + } + + fn input_schema(&self) -> serde_json::Value { + match &self.tool.input_schema { + serde_json::Value::Null => { + serde_json::json!({ "type": "object", "properties": [] }) + } + serde_json::Value::Object(map) if map.is_empty() => { + serde_json::json!({ "type": "object", "properties": [] }) + } + _ => self.tool.input_schema.clone(), + } + } + + fn run( + self: std::sync::Arc, + input: serde_json::Value, + _workspace: gpui::WeakView, + cx: &mut ui::WindowContext, + ) -> gpui::Task> { + let manager = ContextServerManager::global(cx); + let manager = manager.read(cx); + if let Some(server) = manager.get_server(&self.server_id) { + cx.foreground_executor().spawn({ + let tool_name = self.tool.name.clone(); + async move { + let Some(protocol) = server.client.read().clone() else { + bail!("Context server not initialized"); + }; + + let arguments = if let serde_json::Value::Object(map) = input { + Some(map.into_iter().collect()) + } else { + None + }; + + log::trace!( + "Running tool: {} with arguments: {:?}", + tool_name, + arguments + ); + let response = protocol.run_tool(tool_name, arguments).await?; + + let tool_result = match response.tool_result { + serde_json::Value::String(s) => s, + _ => serde_json::to_string(&response.tool_result)?, + }; + Ok(tool_result) + } + }) + } else { + Task::ready(Err(anyhow!("Context server not found"))) + } + } +} diff --git a/crates/context_servers/src/protocol.rs b/crates/context_servers/src/protocol.rs index 80a7a7f991a23f5fe963ae54e836b3240b8844c5..996fc34f462c5f7e5ab3cdfa59fec1990643aa22 100644 --- a/crates/context_servers/src/protocol.rs +++ b/crates/context_servers/src/protocol.rs @@ -180,6 +180,39 @@ impl InitializedContextServerProtocol { Ok(completion) } + + /// List MCP tools. + pub async fn list_tools(&self) -> Result { + self.check_capability(ServerCapability::Tools)?; + + let response = self + .inner + .request::(types::RequestType::ListTools.as_str(), ()) + .await?; + + Ok(response) + } + + /// Executes a tool with the given arguments + pub async fn run_tool>( + &self, + tool: P, + arguments: Option>, + ) -> Result { + self.check_capability(ServerCapability::Tools)?; + + let params = types::CallToolParams { + name: tool.as_ref().to_string(), + arguments, + }; + + let response: types::CallToolResponse = self + .inner + .request(types::RequestType::CallTool.as_str(), params) + .await?; + + Ok(response) + } } impl InitializedContextServerProtocol { diff --git a/crates/context_servers/src/registry.rs b/crates/context_servers/src/registry.rs index 625f308c15228fc5f69795f601e87c30433fdaa5..5490187034972448152c377d369854a43702d29f 100644 --- a/crates/context_servers/src/registry.rs +++ b/crates/context_servers/src/registry.rs @@ -9,7 +9,8 @@ struct GlobalContextServerRegistry(Arc); impl Global for GlobalContextServerRegistry {} pub struct ContextServerRegistry { - registry: RwLock>>>, + command_registry: RwLock>>>, + tool_registry: RwLock>>>, } impl ContextServerRegistry { @@ -20,13 +21,14 @@ impl ContextServerRegistry { pub fn register(cx: &mut AppContext) { cx.set_global(GlobalContextServerRegistry(Arc::new( ContextServerRegistry { - registry: RwLock::new(HashMap::default()), + command_registry: RwLock::new(HashMap::default()), + tool_registry: RwLock::new(HashMap::default()), }, ))) } pub fn register_command(&self, server_id: String, command_name: &str) { - let mut registry = self.registry.write(); + let mut registry = self.command_registry.write(); registry .entry(server_id) .or_default() @@ -34,14 +36,34 @@ impl ContextServerRegistry { } pub fn unregister_command(&self, server_id: &str, command_name: &str) { - let mut registry = self.registry.write(); + let mut registry = self.command_registry.write(); if let Some(commands) = registry.get_mut(server_id) { commands.retain(|name| name.as_ref() != command_name); } } pub fn get_commands(&self, server_id: &str) -> Option>> { - let registry = self.registry.read(); + let registry = self.command_registry.read(); + registry.get(server_id).cloned() + } + + pub fn register_tool(&self, server_id: String, tool_name: &str) { + let mut registry = self.tool_registry.write(); + registry + .entry(server_id) + .or_default() + .push(tool_name.into()); + } + + pub fn unregister_tool(&self, server_id: &str, tool_name: &str) { + let mut registry = self.tool_registry.write(); + if let Some(tools) = registry.get_mut(server_id) { + tools.retain(|name| name.as_ref() != tool_name); + } + } + + pub fn get_tools(&self, server_id: &str) -> Option>> { + let registry = self.tool_registry.read(); registry.get(server_id).cloned() } } diff --git a/crates/context_servers/src/types.rs b/crates/context_servers/src/types.rs index 2bca0a021a129029b55d6371f9db98332418a7a5..b6d8a958bb1264c1e323dc9f13450ddf7551ff2f 100644 --- a/crates/context_servers/src/types.rs +++ b/crates/context_servers/src/types.rs @@ -16,6 +16,8 @@ pub enum RequestType { PromptsList, CompletionComplete, Ping, + ListTools, + ListResourceTemplates, } impl RequestType { @@ -32,6 +34,8 @@ impl RequestType { RequestType::PromptsList => "prompts/list", RequestType::CompletionComplete => "completion/complete", RequestType::Ping => "ping", + RequestType::ListTools => "tools/list", + RequestType::ListResourceTemplates => "resources/templates/list", } } } @@ -402,3 +406,17 @@ pub struct Completion { pub values: Vec, pub total: CompletionTotal, } + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CallToolResponse { + pub tool_result: serde_json::Value, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ListToolsResponse { + pub tools: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub next_cursor: Option, +} diff --git a/crates/language_model/src/provider/anthropic.rs b/crates/language_model/src/provider/anthropic.rs index fe88c73b90deb6ee8a7af07497b0b59cab1fd7a5..b7e65650b55a3075fcb598d06fd027189ea0df31 100644 --- a/crates/language_model/src/provider/anthropic.rs +++ b/crates/language_model/src/provider/anthropic.rs @@ -505,10 +505,14 @@ pub fn map_to_language_model_completion_events( LanguageModelToolUse { id: tool_use.id, name: tool_use.name, - input: serde_json::Value::from_str( - &tool_use.input_json, - ) - .map_err(|err| anyhow!(err))?, + input: if tool_use.input_json.is_empty() { + serde_json::Value::Null + } else { + serde_json::Value::from_str( + &tool_use.input_json, + ) + .map_err(|err| anyhow!(err))? + }, }, )) })), From 6686f66949f10c189f27bca1fa3cfc1eddc6bdf0 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 28 Oct 2024 15:40:50 +0100 Subject: [PATCH 66/76] ollama: Ensure only single task fetches models (#19830) Before this change, we'd see a ton of requests from the Ollama provider trying to fetch models: ``` [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: https://api.zed.dev/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ [2024-10-28T15:00:52+01:00 DEBUG reqwest::connect] starting new connection: http://localhost:11434/ ``` Turns out we'd send a request on *every* change to settings. Now, with this change, we only send a single request. Release Notes: - N/A Co-authored-by: Bennet --- crates/language_model/src/provider/ollama.rs | 34 +++++++++++++++----- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/crates/language_model/src/provider/ollama.rs b/crates/language_model/src/provider/ollama.rs index a29ff3cf6a7a1a34cbe10bec99583a8cee5a5b00..c95bed181aa6f49703e73fc9348d4878c342c179 100644 --- a/crates/language_model/src/provider/ollama.rs +++ b/crates/language_model/src/provider/ollama.rs @@ -54,6 +54,7 @@ pub struct OllamaLanguageModelProvider { pub struct State { http_client: Arc, available_models: Vec, + fetch_model_task: Option>>, _subscription: Subscription, } @@ -89,6 +90,11 @@ impl State { }) } + fn restart_fetch_models_task(&mut self, cx: &mut ModelContext) { + let task = self.fetch_models(cx); + self.fetch_model_task.replace(task); + } + fn authenticate(&mut self, cx: &mut ModelContext) -> Task> { if self.is_authenticated() { Task::ready(Ok(())) @@ -102,17 +108,29 @@ impl OllamaLanguageModelProvider { pub fn new(http_client: Arc, cx: &mut AppContext) -> Self { let this = Self { http_client: http_client.clone(), - state: cx.new_model(|cx| State { - http_client, - available_models: Default::default(), - _subscription: cx.observe_global::(|this: &mut State, cx| { - this.fetch_models(cx).detach(); - cx.notify(); - }), + state: cx.new_model(|cx| { + let subscription = cx.observe_global::({ + let mut settings = AllLanguageModelSettings::get_global(cx).ollama.clone(); + move |this: &mut State, cx| { + let new_settings = &AllLanguageModelSettings::get_global(cx).ollama; + if &settings != new_settings { + settings = new_settings.clone(); + this.restart_fetch_models_task(cx); + cx.notify(); + } + } + }); + + State { + http_client, + available_models: Default::default(), + fetch_model_task: None, + _subscription: subscription, + } }), }; this.state - .update(cx, |state, cx| state.fetch_models(cx).detach()); + .update(cx, |state, cx| state.restart_fetch_models_task(cx)); this } } From ff29a34298614ef65fa5e5cdcab0356e05ecd472 Mon Sep 17 00:00:00 2001 From: xdBronch <51252236+xdBronch@users.noreply.github.com> Date: Mon, 28 Oct 2024 10:49:40 -0400 Subject: [PATCH 67/76] zig: Account for doctests in outline (#19776) zig has a feature called [doctests](https://ziglang.org/documentation/master/#Doctests) where instead of providing a string as the name of a test you use an identifier so that the test is "tied" to it and can be used in documentation. this wasnt accounted for so any tests using this were unnamed in the outline Release Notes: - N/A --- extensions/zig/languages/zig/outline.scm | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/extensions/zig/languages/zig/outline.scm b/extensions/zig/languages/zig/outline.scm index d87cff2058dfc0447b4d6ac134d9fa16f40c5881..7ae683a876e2e7743614fed1f4767875a7d6036c 100644 --- a/extensions/zig/languages/zig/outline.scm +++ b/extensions/zig/languages/zig/outline.scm @@ -19,6 +19,9 @@ ( TestDecl ( "test" @context - (STRINGLITERALSINGLE)? @name + [ + (STRINGLITERALSINGLE) + (IDENTIFIER) + ]? @name ) ) @item From e0ea9a9ab55a188e0aa4a1e43b2bad9f4d815e26 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 28 Oct 2024 16:00:38 +0100 Subject: [PATCH 68/76] Remove leftover comments from previous PR (#19820) Co-Authored-by: Thorsten Removes some leftover comments from #19766 Release Notes: - N/A Co-authored-by: Thorsten --- crates/editor/src/editor.rs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 5bd3accc13fbc49253c3f6b50b56037db5a1147c..d23889b42767991c5021edf3ff69f9dd23c66a82 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -223,7 +223,6 @@ pub fn render_parsed_markdown( } }), ); - // hello let mut links = Vec::new(); let mut link_ranges = Vec::new(); @@ -3785,9 +3784,6 @@ impl Editor { pub fn newline_below(&mut self, _: &NewlineBelow, cx: &mut ViewContext) { let buffer = self.buffer.read(cx); let snapshot = buffer.snapshot(cx); - // - // - // let mut edits = Vec::new(); let mut rows = Vec::new(); From 67eb652bf1c0c8b7183f89043146d665ce0cab1d Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 28 Oct 2024 16:12:37 +0100 Subject: [PATCH 69/76] remote servers: Always dismiss modal (#19831) We display the errors in another window anyway and if the connection takes a while it looks like a bug that the modal stays open. Release Notes: - N/A Co-authored-by: Bennet --- crates/recent_projects/src/remote_servers.rs | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index faf58f312fafa7d7038c3b7c807c0a88ee0d14c6..a7ffee5e573c7429038471910629615c31dc7920 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -738,7 +738,8 @@ impl RemoteServerProjects { }; let project = project.clone(); let server = server.clone(); - cx.spawn(|remote_server_projects, mut cx| async move { + cx.emit(DismissEvent); + cx.spawn(|_, mut cx| async move { let result = open_ssh_project( server.into(), project.paths.into_iter().map(PathBuf::from).collect(), @@ -757,10 +758,6 @@ impl RemoteServerProjects { ) .await .ok(); - } else { - remote_server_projects - .update(&mut cx, |_, cx| cx.emit(DismissEvent)) - .ok(); } }) .detach(); From 5e89fba68116077369252539fc199108cabd0a8f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=96mer=20Sinan=20A=C4=9Facan?= Date: Mon, 28 Oct 2024 16:20:04 +0100 Subject: [PATCH 70/76] dart: Add support for documentation comments (#19592) Closes #19590 Release Notes: - N/A --- I'm unable to test this because rebuilding Zed with the changes does not seem to use the changes. If maintainers could let me know how to test these changes I'd like to verify that this really fixes #19590. --------- Co-authored-by: Marshall Bowers --- extensions/dart/languages/dart/config.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extensions/dart/languages/dart/config.toml b/extensions/dart/languages/dart/config.toml index d723d4d6d3713bd2b76c02a8c49c9335d368a1cb..15420c74f0536df89c19e5e00c0b4d0e1baad85e 100644 --- a/extensions/dart/languages/dart/config.toml +++ b/extensions/dart/languages/dart/config.toml @@ -1,7 +1,7 @@ name = "Dart" grammar = "dart" path_suffixes = ["dart"] -line_comments = ["// "] +line_comments = ["// ", "/// "] autoclose_before = ";:.,=}])>" brackets = [ { start = "{", end = "}", close = true, newline = true }, From cc81f19c68260669c275973054ad2d466d54b5e9 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 28 Oct 2024 16:35:37 +0100 Subject: [PATCH 71/76] remote server: Fix error log about inability to open buffer (#19824) Turns out that we used client-side `fs` to check whether something is a directory or not, which obviously doesn't work with SSH projects. Release Notes: - N/A --------- Co-authored-by: Bennet --- crates/editor/src/hover_links.rs | 45 ++++++++++- crates/file_finder/src/file_finder.rs | 4 +- crates/project/src/project.rs | 64 +++++++++++---- crates/proto/proto/zed.proto | 16 ++-- crates/proto/src/proto.rs | 14 ++-- crates/remote_server/src/headless_project.rs | 16 ++-- .../remote_server/src/remote_editing_tests.rs | 77 ++++++++++++++++++- crates/workspace/src/workspace.rs | 36 +++++---- 8 files changed, 213 insertions(+), 59 deletions(-) diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index 4a636f673abb7d0fcc921a8feaafc776a42861ed..31be9e93a948074f250b42a7c9126d9834c3eb34 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -706,10 +706,11 @@ pub(crate) async fn find_file( ) -> Option { project .update(cx, |project, cx| { - project.resolve_existing_file_path(&candidate_file_path, buffer, cx) + project.resolve_path_in_buffer(&candidate_file_path, buffer, cx) }) .ok()? .await + .filter(|s| s.is_file()) } if let Some(existing_path) = check_path(&candidate_file_path, &project, buffer, cx).await { @@ -1612,4 +1613,46 @@ mod tests { assert_eq!(file_path.to_str().unwrap(), "/root/dir/file2.rs"); }); } + + #[gpui::test] + async fn test_hover_directories(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + ..Default::default() + }, + cx, + ) + .await; + + // Insert a new file + let fs = cx.update_workspace(|workspace, cx| workspace.project().read(cx).fs().clone()); + fs.as_fake() + .insert_file("/root/dir/file2.rs", "This is file2.rs".as_bytes().to_vec()) + .await; + + cx.set_state(indoc! {" + You can't open ../diˇr because it's a directory. + "}); + + // File does not exist + let screen_coord = cx.pixel_position(indoc! {" + You can't open ../diˇr because it's a directory. + "}); + cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key()); + + // No highlight + cx.update_editor(|editor, cx| { + assert!(editor + .snapshot(cx) + .text_highlight_ranges::() + .unwrap_or_default() + .1 + .is_empty()); + }); + + // Does not open the directory + cx.simulate_click(screen_coord, Modifiers::secondary_key()); + cx.update_workspace(|workspace, cx| assert_eq!(workspace.items(cx).count(), 1)); + } } diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 299b129d82a90dd52c4f8b015eda5932bf0fb783..ce0e3850576443d9e1cc58b828494f52fd5522a8 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -790,9 +790,9 @@ impl FileFinderDelegate { let mut path_matches = Vec::new(); let abs_file_exists = if let Ok(task) = project.update(&mut cx, |this, cx| { - this.abs_file_path_exists(query.path_query(), cx) + this.resolve_abs_file_path(query.path_query(), cx) }) { - task.await + task.await.is_some() } else { false }; diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 7a57e048c8868dd715d9d96bcf2158f9d4141c84..04ae203b4d2257f6bae6847669ffc63c3c558bf1 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -3094,7 +3094,7 @@ impl Project { } /// Returns the resolved version of `path`, that was found in `buffer`, if it exists. - pub fn resolve_existing_file_path( + pub fn resolve_path_in_buffer( &self, path: &str, buffer: &Model, @@ -3102,47 +3102,56 @@ impl Project { ) -> Task> { let path_buf = PathBuf::from(path); if path_buf.is_absolute() || path.starts_with("~") { - self.resolve_abs_file_path(path, cx) + self.resolve_abs_path(path, cx) } else { self.resolve_path_in_worktrees(path_buf, buffer, cx) } } - pub fn abs_file_path_exists(&self, path: &str, cx: &mut ModelContext) -> Task { - let resolve_task = self.resolve_abs_file_path(path, cx); + pub fn resolve_abs_file_path( + &self, + path: &str, + cx: &mut ModelContext, + ) -> Task> { + let resolve_task = self.resolve_abs_path(path, cx); cx.background_executor().spawn(async move { let resolved_path = resolve_task.await; - resolved_path.is_some() + resolved_path.filter(|path| path.is_file()) }) } - fn resolve_abs_file_path( + pub fn resolve_abs_path( &self, path: &str, cx: &mut ModelContext, ) -> Task> { if self.is_local() { let expanded = PathBuf::from(shellexpand::tilde(&path).into_owned()); - let fs = self.fs.clone(); cx.background_executor().spawn(async move { let path = expanded.as_path(); - let exists = fs.is_file(path).await; + let metadata = fs.metadata(path).await.ok().flatten(); - exists.then(|| ResolvedPath::AbsPath(expanded)) + metadata.map(|metadata| ResolvedPath::AbsPath { + path: expanded, + is_dir: metadata.is_dir, + }) }) } else if let Some(ssh_client) = self.ssh_client.as_ref() { let request = ssh_client .read(cx) .proto_client() - .request(proto::CheckFileExists { + .request(proto::GetPathMetadata { project_id: SSH_PROJECT_ID, path: path.to_string(), }); cx.background_executor().spawn(async move { let response = request.await.log_err()?; if response.exists { - Some(ResolvedPath::AbsPath(PathBuf::from(response.path))) + Some(ResolvedPath::AbsPath { + path: PathBuf::from(response.path), + is_dir: response.is_dir, + }) } else { None } @@ -3181,10 +3190,14 @@ impl Project { resolved.strip_prefix(root_entry_path).unwrap_or(&resolved); worktree.entry_for_path(stripped).map(|entry| { - ResolvedPath::ProjectPath(ProjectPath { + let project_path = ProjectPath { worktree_id: worktree.id(), path: entry.path.clone(), - }) + }; + ResolvedPath::ProjectPath { + project_path, + is_dir: entry.is_dir(), + } }) }) .ok()?; @@ -4149,24 +4162,41 @@ fn resolve_path(base: &Path, path: &Path) -> PathBuf { /// or an AbsPath and that *exists*. #[derive(Debug, Clone)] pub enum ResolvedPath { - ProjectPath(ProjectPath), - AbsPath(PathBuf), + ProjectPath { + project_path: ProjectPath, + is_dir: bool, + }, + AbsPath { + path: PathBuf, + is_dir: bool, + }, } impl ResolvedPath { pub fn abs_path(&self) -> Option<&Path> { match self { - Self::AbsPath(path) => Some(path.as_path()), + Self::AbsPath { path, .. } => Some(path.as_path()), _ => None, } } pub fn project_path(&self) -> Option<&ProjectPath> { match self { - Self::ProjectPath(path) => Some(&path), + Self::ProjectPath { project_path, .. } => Some(&project_path), _ => None, } } + + pub fn is_file(&self) -> bool { + !self.is_dir() + } + + pub fn is_dir(&self) -> bool { + match self { + Self::ProjectPath { is_dir, .. } => *is_dir, + Self::AbsPath { is_dir, .. } => *is_dir, + } + } } impl Item for Buffer { diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 95a54c3d5c821fe25796c062583de21fa03bf6ce..e9e42dac18fb297061d46b570662d6d8fcc96922 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -259,9 +259,6 @@ message Envelope { CloseBuffer close_buffer = 245; UpdateUserSettings update_user_settings = 246; - CheckFileExists check_file_exists = 255; - CheckFileExistsResponse check_file_exists_response = 256; - ShutdownRemoteServer shutdown_remote_server = 257; RemoveWorktree remove_worktree = 258; @@ -284,13 +281,16 @@ message Envelope { GitBranchesResponse git_branches_response = 271; UpdateGitBranch update_git_branch = 272; + ListToolchains list_toolchains = 273; ListToolchainsResponse list_toolchains_response = 274; ActivateToolchain activate_toolchain = 275; ActiveToolchain active_toolchain = 276; - ActiveToolchainResponse active_toolchain_response = 277; // current max - } + ActiveToolchainResponse active_toolchain_response = 277; + GetPathMetadata get_path_metadata = 278; + GetPathMetadataResponse get_path_metadata_response = 279; // current max + } reserved 87 to 88; reserved 158 to 161; @@ -305,6 +305,7 @@ message Envelope { reserved 221; reserved 224 to 229; reserved 247 to 254; + reserved 255 to 256; } // Messages @@ -2357,14 +2358,15 @@ message UpdateUserSettings { } } -message CheckFileExists { +message GetPathMetadata { uint64 project_id = 1; string path = 2; } -message CheckFileExistsResponse { +message GetPathMetadataResponse { bool exists = 1; string path = 2; + bool is_dir = 3; } message ShutdownRemoteServer {} diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 7fcebf051375d4ff79e28b2a4c982bfcc53a7d90..4bae2d993145ff1e856eeddb670dfd27ede7b792 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -343,8 +343,6 @@ messages!( (FindSearchCandidatesResponse, Background), (CloseBuffer, Foreground), (UpdateUserSettings, Foreground), - (CheckFileExists, Background), - (CheckFileExistsResponse, Background), (ShutdownRemoteServer, Foreground), (RemoveWorktree, Foreground), (LanguageServerLog, Foreground), @@ -363,7 +361,9 @@ messages!( (ListToolchainsResponse, Foreground), (ActivateToolchain, Foreground), (ActiveToolchain, Foreground), - (ActiveToolchainResponse, Foreground) + (ActiveToolchainResponse, Foreground), + (GetPathMetadata, Background), + (GetPathMetadataResponse, Background) ); request_messages!( @@ -472,7 +472,6 @@ request_messages!( (SynchronizeContexts, SynchronizeContextsResponse), (LspExtSwitchSourceHeader, LspExtSwitchSourceHeaderResponse), (AddWorktree, AddWorktreeResponse), - (CheckFileExists, CheckFileExistsResponse), (ShutdownRemoteServer, Ack), (RemoveWorktree, Ack), (OpenServerSettings, OpenBufferResponse), @@ -483,7 +482,8 @@ request_messages!( (UpdateGitBranch, Ack), (ListToolchains, ListToolchainsResponse), (ActivateToolchain, Ack), - (ActiveToolchain, ActiveToolchainResponse) + (ActiveToolchain, ActiveToolchainResponse), + (GetPathMetadata, GetPathMetadataResponse) ); entity_messages!( @@ -555,7 +555,6 @@ entity_messages!( SynchronizeContexts, LspExtSwitchSourceHeader, UpdateUserSettings, - CheckFileExists, LanguageServerLog, Toast, HideToast, @@ -566,7 +565,8 @@ entity_messages!( UpdateGitBranch, ListToolchains, ActivateToolchain, - ActiveToolchain + ActiveToolchain, + GetPathMetadata ); entity_messages!( diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index ce34af247f02695b9140fcf64050a299d36e2e03..155b141af6940ae7638954cd20c796c6408c2cc8 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -150,7 +150,7 @@ impl HeadlessProject { session.subscribe_to_entity(SSH_PROJECT_ID, &settings_observer); client.add_request_handler(cx.weak_model(), Self::handle_list_remote_directory); - client.add_request_handler(cx.weak_model(), Self::handle_check_file_exists); + client.add_request_handler(cx.weak_model(), Self::handle_get_path_metadata); client.add_request_handler(cx.weak_model(), Self::handle_shutdown_remote_server); client.add_request_handler(cx.weak_model(), Self::handle_ping); @@ -525,18 +525,20 @@ impl HeadlessProject { Ok(proto::ListRemoteDirectoryResponse { entries }) } - pub async fn handle_check_file_exists( + pub async fn handle_get_path_metadata( this: Model, - envelope: TypedEnvelope, + envelope: TypedEnvelope, cx: AsyncAppContext, - ) -> Result { + ) -> Result { let fs = cx.read_model(&this, |this, _| this.fs.clone())?; let expanded = shellexpand::tilde(&envelope.payload.path).to_string(); - let exists = fs.is_file(&PathBuf::from(expanded.clone())).await; + let metadata = fs.metadata(&PathBuf::from(expanded.clone())).await?; + let is_dir = metadata.map(|metadata| metadata.is_dir).unwrap_or(false); - Ok(proto::CheckFileExistsResponse { - exists, + Ok(proto::GetPathMetadataResponse { + exists: metadata.is_some(), + is_dir, path: expanded, }) } diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 82e3824eb07fef24988ad634709ce095ed1cc43d..c7d3a3c97fbda3dc609a19330e4f726f2e9250f8 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -604,7 +604,10 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont } #[gpui::test] -async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { +async fn test_remote_resolve_path_in_buffer( + cx: &mut TestAppContext, + server_cx: &mut TestAppContext, +) { let fs = FakeFs::new(server_cx.executor()); fs.insert_tree( "/code", @@ -639,10 +642,11 @@ async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut let path = project .update(cx, |project, cx| { - project.resolve_existing_file_path("/code/project1/README.md", &buffer, cx) + project.resolve_path_in_buffer("/code/project1/README.md", &buffer, cx) }) .await .unwrap(); + assert!(path.is_file()); assert_eq!( path.abs_path().unwrap().to_string_lossy(), "/code/project1/README.md" @@ -650,15 +654,80 @@ async fn test_remote_resolve_file_path(cx: &mut TestAppContext, server_cx: &mut let path = project .update(cx, |project, cx| { - project.resolve_existing_file_path("../README.md", &buffer, cx) + project.resolve_path_in_buffer("../README.md", &buffer, cx) }) .await .unwrap(); - + assert!(path.is_file()); assert_eq!( path.project_path().unwrap().clone(), ProjectPath::from((worktree_id, "README.md")) ); + + let path = project + .update(cx, |project, cx| { + project.resolve_path_in_buffer("../src", &buffer, cx) + }) + .await + .unwrap(); + assert_eq!( + path.project_path().unwrap().clone(), + ProjectPath::from((worktree_id, "src")) + ); + assert!(path.is_dir()); +} + +#[gpui::test] +async fn test_remote_resolve_abs_path(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "project1": { + ".git": {}, + "README.md": "# project 1", + "src": { + "lib.rs": "fn one() -> usize { 1 }" + } + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; + + let path = project + .update(cx, |project, cx| { + project.resolve_abs_path("/code/project1/README.md", cx) + }) + .await + .unwrap(); + + assert!(path.is_file()); + assert_eq!( + path.abs_path().unwrap().to_string_lossy(), + "/code/project1/README.md" + ); + + let path = project + .update(cx, |project, cx| { + project.resolve_abs_path("/code/project1/src", cx) + }) + .await + .unwrap(); + + assert!(path.is_dir()); + assert_eq!( + path.abs_path().unwrap().to_string_lossy(), + "/code/project1/src" + ); + + let path = project + .update(cx, |project, cx| { + project.resolve_abs_path("/code/project1/DOESNOTEXIST", cx) + }) + .await; + assert!(path.is_none()); } #[gpui::test(iterations = 10)] diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index de2c985f340a338a44d7984f7ffafac62b07e920..f0786aa47949749e87284b5639a293ab6add361e 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1218,7 +1218,7 @@ impl Workspace { notify_if_database_failed(window, &mut cx); let opened_items = window .update(&mut cx, |_workspace, cx| { - open_items(serialized_workspace, project_paths, app_state, cx) + open_items(serialized_workspace, project_paths, cx) })? .await .unwrap_or_default(); @@ -2058,8 +2058,10 @@ impl Workspace { cx: &mut ViewContext, ) -> Task>> { match path { - ResolvedPath::ProjectPath(project_path) => self.open_path(project_path, None, true, cx), - ResolvedPath::AbsPath(path) => self.open_abs_path(path, false, cx), + ResolvedPath::ProjectPath { project_path, .. } => { + self.open_path(project_path, None, true, cx) + } + ResolvedPath::AbsPath { path, .. } => self.open_abs_path(path, false, cx), } } @@ -4563,7 +4565,6 @@ fn window_bounds_env_override() -> Option> { fn open_items( serialized_workspace: Option, mut project_paths_to_open: Vec<(PathBuf, Option)>, - app_state: Arc, cx: &mut ViewContext, ) -> impl 'static + Future>>>>> { let restored_items = serialized_workspace.map(|serialized_workspace| { @@ -4619,14 +4620,20 @@ fn open_items( .enumerate() .map(|(ix, (abs_path, project_path))| { let workspace = workspace.clone(); - cx.spawn(|mut cx| { - let fs = app_state.fs.clone(); - async move { - let file_project_path = project_path?; - if fs.is_dir(&abs_path).await { - None - } else { - Some(( + cx.spawn(|mut cx| async move { + let file_project_path = project_path?; + let abs_path_task = workspace.update(&mut cx, |workspace, cx| { + workspace.project().update(cx, |project, cx| { + project.resolve_abs_path(abs_path.to_string_lossy().as_ref(), cx) + }) + }); + + // We only want to open file paths here. If one of the items + // here is a directory, it was already opened further above + // with a `find_or_create_worktree`. + if let Ok(task) = abs_path_task { + if task.await.map_or(true, |p| p.is_file()) { + return Some(( ix, workspace .update(&mut cx, |workspace, cx| { @@ -4634,9 +4641,10 @@ fn open_items( }) .log_err()? .await, - )) + )); } } + None }) }); @@ -5580,7 +5588,7 @@ pub fn open_ssh_project( .update(&mut cx, |_, cx| { cx.activate_window(); - open_items(serialized_workspace, project_paths_to_open, app_state, cx) + open_items(serialized_workspace, project_paths_to_open, cx) })? .await?; From 5e9ff3e3131e766a01e161353a2ba1596de261b7 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 28 Oct 2024 11:36:44 -0400 Subject: [PATCH 72/76] dart: Bump to v0.1.2 (#19835) This PR bumps the Dart extension to v0.1.2. Changes: - https://github.com/zed-industries/zed/pull/19592 Release Notes: - N/A --- Cargo.lock | 2 +- extensions/dart/Cargo.toml | 2 +- extensions/dart/extension.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index bd9ad91bf7416a7281396b3de2e9550c6d995a4b..921ec3a4f06cfcb4af215757ddd43f87e67409a5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -15132,7 +15132,7 @@ dependencies = [ [[package]] name = "zed_dart" -version = "0.1.1" +version = "0.1.2" dependencies = [ "zed_extension_api 0.1.0", ] diff --git a/extensions/dart/Cargo.toml b/extensions/dart/Cargo.toml index 3d79e104c105c68d7707fb8fc81d1266a4567195..8d50e620cc3c03d87eca7132b531376e0ec067e1 100644 --- a/extensions/dart/Cargo.toml +++ b/extensions/dart/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_dart" -version = "0.1.1" +version = "0.1.2" edition = "2021" publish = false license = "Apache-2.0" diff --git a/extensions/dart/extension.toml b/extensions/dart/extension.toml index 5ea8c37c2f917fc9724b477a266374a5c002f0f0..684580e7c05666dddb28f34e097f046d27efca53 100644 --- a/extensions/dart/extension.toml +++ b/extensions/dart/extension.toml @@ -1,7 +1,7 @@ id = "dart" name = "Dart" description = "Dart support." -version = "0.1.1" +version = "0.1.2" schema_version = 1 authors = ["Abdullah Alsigar ", "Flo ", "ybbond "] repository = "https://github.com/zed-industries/zed" From a451bcc3c47e044fa7adadc0a636b1a8e0e32a87 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 28 Oct 2024 11:45:18 -0400 Subject: [PATCH 73/76] collab: Exempt staff from LLM usage limits (#19836) This PR updates the usage limit check to exempt Zed staff members from usage limits. We previously had some affordances for the rate limits, but hadn't yet updated it for the usage-based billing. Release Notes: - N/A --- crates/collab/src/llm.rs | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs index cb3478879e14907900015106b43433d018ab71de..654327c4637ad25df58529ee0cf15bd7b00690fd 100644 --- a/crates/collab/src/llm.rs +++ b/crates/collab/src/llm.rs @@ -449,6 +449,10 @@ async fn check_usage_limit( model_name: &str, claims: &LlmTokenClaims, ) -> Result<()> { + if claims.is_staff { + return Ok(()); + } + let model = state.db.model(provider, model_name)?; let usage = state .db @@ -513,11 +517,6 @@ async fn check_usage_limit( ]; for (used, limit, usage_measure) in checks { - // Temporarily bypass rate-limiting for staff members. - if claims.is_staff { - continue; - } - if used > limit { let resource = match usage_measure { UsageMeasure::RequestsPerMinute => "requests_per_minute", From fab2f22a89443f2845ed625e3961c3cb23c00d01 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Mon, 28 Oct 2024 17:07:30 +0100 Subject: [PATCH 74/76] remote project: Fix project reference leak when waiting for prompt reply (#19838) When the language server gave us a prompt and we'd close the window, we wouldn't release the `project` until the next `flush_effects` call that came in when opening a window. With this change, we no longer hold a strong reference to the project in the future. Fixes the leak and makes sure we clean up the SSH connection when closing a window. Release Notes: - N/A Co-authored-by: Bennet --- crates/project/src/project.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 04ae203b4d2257f6bae6847669ffc63c3c558bf1..eb5edabc8e6cfdbbc438f19e5a98045edb02d7fa 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -3603,6 +3603,13 @@ impl Project { anyhow::Ok(()) })??; + // We drop `this` to avoid holding a reference in this future for too + // long. + // If we keep the reference, we might not drop the `Project` early + // enough when closing a window and it will only get releases on the + // next `flush_effects()` call. + drop(this); + let answer = rx.next().await; Ok(LanguageServerPromptResponse { From f5d5fab2c8fc2066426b7fc80ccb964bef1ef534 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Mon, 28 Oct 2024 13:37:28 -0400 Subject: [PATCH 75/76] Improve `fold_at_level` performance (#19845) Just spotted a tiny error that was causing us to continue looking for nested folds 1 layer deeper than any fold already found at the target level. We shouldn't continue to seek for a deeper fold after the fold at the target level is found. Tested on a debug build and used `editor.rs` as the source material: ``` Old Level 1 fold: [crates/editor/src/editor.rs:10777:9] counter = 2806 [crates/editor/src/editor.rs:10778:9] time_elapsed = 320.570792ms Level 2 fold: [crates/editor/src/editor.rs:10777:9] counter = 5615 [crates/editor/src/editor.rs:10778:9] time_elapsed = 497.4305ms Level 3 fold: [crates/editor/src/editor.rs:10777:9] counter = 7528 [crates/editor/src/editor.rs:10778:9] time_elapsed = 619.818334ms New Level 1 fold: [crates/editor/src/editor.rs:10776:9] counter = 543 [crates/editor/src/editor.rs:10777:9] time_elapsed = 139.115625ms Level 2 fold: [crates/editor/src/editor.rs:10776:9] counter = 2806 [crates/editor/src/editor.rs:10777:9] time_elapsed = 312.560416ms Level 3 fold: [crates/editor/src/editor.rs:10776:9] counter = 5615 [crates/editor/src/editor.rs:10777:9] time_elapsed = 498.873292ms ``` Release Notes: - N/A --- crates/editor/src/editor.rs | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index d23889b42767991c5021edf3ff69f9dd23c66a82..df13f748066a3b98da2b69bf32d262c3aadc625b 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -10756,12 +10756,10 @@ impl Editor { let nested_start_row = foldable_range.0.start.row + 1; let nested_end_row = foldable_range.0.end.row; - if current_level == fold_at_level { - fold_ranges.push(foldable_range); - } - - if current_level <= fold_at_level { + if current_level < fold_at_level { stack.push((nested_start_row, nested_end_row, current_level + 1)); + } else if current_level == fold_at_level { + fold_ranges.push(foldable_range); } start_row = nested_end_row + 1; From 826d83edfee474ede86377666c3c13dd7422d453 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Mon, 28 Oct 2024 12:28:42 -0700 Subject: [PATCH 76/76] Fix backtrace spam on remote server (#19850) Release Notes: - N/A Co-authored-by: conrad --- crates/remote/src/ssh_session.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 422937ed234de3813fc92c8e9c121dbf5e876e4e..857b139736b8adf11ba0a67f4ccda6005e803c70 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -1221,9 +1221,11 @@ impl RemoteConnection for SshRemoteConnection { delegate.set_status(Some("Starting proxy"), cx); let mut start_proxy_command = format!( - "RUST_LOG={} RUST_BACKTRACE={} {:?} proxy --identifier {}", + "RUST_LOG={} {} {:?} proxy --identifier {}", std::env::var("RUST_LOG").unwrap_or_default(), - std::env::var("RUST_BACKTRACE").unwrap_or_default(), + std::env::var("RUST_BACKTRACE") + .map(|b| { format!("RUST_BACKTRACE={}", b) }) + .unwrap_or_default(), remote_binary_path, unique_identifier, );