From 1ae326432e7bda2e89fd63e59a56c082277106b4 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Thu, 4 Sep 2025 09:14:53 -0600 Subject: [PATCH 001/109] Extract a scheduler crate from GPUI to enable unified integration testing of client and server code (#37326) Extracts and cleans up GPUI's scheduler code into a new `scheduler` crate, making it pluggable by external runtimes. This will enable deterministic integration testing with cloud components by providing a unified test scheduler across Zed and backend code. In Zed, it will replace the existing GPUI scheduler for consistent async task management across platforms. ## Changes - **Core Implementation**: `TestScheduler` with seed-based randomization, session tracking (`SessionId`), and foreground/background task separation for reproducible testing. - **Executors**: `ForegroundExecutor` (!Send, thread-local) and `BackgroundExecutor` (Send, with blocking/timeout support) as GPUI-compatible wrappers. - **Clock and Timer**: Controllable `TestClock` and future-based `Timer` for time-sensitive tests. - **Testing APIs**: `once()`, `with_seed()`, and `many()` methods for configurable test runs. - **Dependencies**: Added `async-task`, `chrono`, `futures`, etc., with updates to `Cargo.toml` and lock file. ## Benefits - **Integration Testing**: Facilitates reliable async tests involving cloud sessions, reducing flakiness via deterministic execution. - **Pluggability**: Trait-based design (`Scheduler`) allows easy integration into non-GPUI runtimes while maintaining GPUI compatibility. - **Cleanup**: Refactors GPUI scheduler logic for clarity, correctness (no `unwrap()`, proper error handling), and extensibility. Follows Rust guidelines; run `./script/clippy` for verification. - [x] Define and test a core scheduler that we think can power our cloud code and GPUI - [ ] Replace GPUI's scheduler Release Notes: - N/A --------- Co-authored-by: Antonio Scandurra --- .rules | 13 + Cargo.lock | 73 ++-- Cargo.toml | 6 +- crates/acp_thread/src/acp_thread.rs | 6 +- crates/action_log/src/action_log.rs | 6 +- crates/agent_ui/src/buffer_codegen.rs | 6 +- .../src/assistant_context_tests.rs | 14 +- crates/assistant_tools/src/edit_agent.rs | 8 +- .../src/edit_agent/create_file_parser.rs | 2 +- .../src/edit_agent/edit_parser.rs | 2 +- .../assistant_tools/src/edit_agent/evals.rs | 4 +- .../src/edit_agent/streaming_fuzzy_matcher.rs | 2 +- crates/buffer_diff/src/buffer_diff.rs | 17 +- crates/channel/src/channel_chat.rs | 6 +- crates/client/src/client.rs | 7 +- crates/collab/src/auth.rs | 37 +- crates/collab/src/db.rs | 2 +- crates/collab/src/db/tests.rs | 4 +- crates/collab/src/tests/integration_tests.rs | 2 +- .../src/tests/random_channel_buffer_tests.rs | 2 +- .../random_project_collaboration_tests.rs | 83 +++-- .../src/tests/randomized_test_helpers.rs | 18 +- crates/diagnostics/src/diagnostics_tests.rs | 45 +-- crates/editor/src/display_map.rs | 59 ++- crates/editor/src/display_map/block_map.rs | 56 +-- crates/editor/src/display_map/fold_map.rs | 48 +-- crates/editor/src/display_map/inlay_map.rs | 36 +- crates/editor/src/display_map/tab_map.rs | 14 +- crates/editor/src/display_map/wrap_map.rs | 24 +- crates/editor/src/editor.rs | 4 +- crates/editor/src/git/blame.rs | 8 +- crates/gpui/examples/data_table.rs | 54 +-- crates/gpui/src/app/test_context.rs | 2 +- crates/gpui/src/bounds_tree.rs | 10 +- crates/gpui/src/platform/test/dispatcher.rs | 10 +- crates/gpui/src/platform/windows/platform.rs | 6 +- crates/language/src/buffer.rs | 4 +- crates/language/src/buffer_tests.rs | 32 +- crates/multi_buffer/src/multi_buffer.rs | 39 +- crates/multi_buffer/src/multi_buffer_tests.rs | 48 +-- crates/project/src/lsp_store.rs | 4 +- crates/project/src/project_tests.rs | 6 +- crates/rope/benches/rope_benchmark.rs | 6 +- crates/rope/src/chunk.rs | 8 +- crates/rope/src/rope.rs | 22 +- crates/rpc/src/auth.rs | 40 +- crates/scheduler/Cargo.toml | 25 ++ crates/scheduler/LICENSE-APACHE | 1 + crates/scheduler/src/clock.rs | 34 ++ crates/scheduler/src/executor.rs | 137 +++++++ crates/scheduler/src/scheduler.rs | 63 ++++ crates/scheduler/src/test_scheduler.rs | 352 ++++++++++++++++++ crates/scheduler/src/tests.rs | 348 +++++++++++++++++ crates/streaming_diff/src/streaming_diff.rs | 12 +- crates/sum_tree/src/sum_tree.rs | 46 ++- crates/terminal/src/terminal.rs | 17 +- crates/text/src/locator.rs | 8 +- crates/text/src/network.rs | 6 +- crates/text/src/patch.rs | 12 +- crates/text/src/tests.rs | 21 +- crates/text/src/text.rs | 8 +- crates/util/src/util.rs | 13 +- crates/worktree/src/worktree_tests.rs | 30 +- crates/zeta/src/input_excerpt.rs | 4 +- 64 files changed, 1569 insertions(+), 473 deletions(-) create mode 100644 crates/scheduler/Cargo.toml create mode 120000 crates/scheduler/LICENSE-APACHE create mode 100644 crates/scheduler/src/clock.rs create mode 100644 crates/scheduler/src/executor.rs create mode 100644 crates/scheduler/src/scheduler.rs create mode 100644 crates/scheduler/src/test_scheduler.rs create mode 100644 crates/scheduler/src/tests.rs diff --git a/.rules b/.rules index da009f1877b4c6ef2f0613995391852d4bf1dc8a..2f2b9cd705d95775bedf092bc4e6254136da6117 100644 --- a/.rules +++ b/.rules @@ -12,6 +12,19 @@ - Example: avoid `let _ = client.request(...).await?;` - use `client.request(...).await?;` instead * When implementing async operations that may fail, ensure errors propagate to the UI layer so users get meaningful feedback. * Never create files with `mod.rs` paths - prefer `src/some_module.rs` instead of `src/some_module/mod.rs`. +* When creating new crates, prefer specifying the library root path in `Cargo.toml` using `[lib] path = "...rs"` instead of the default `lib.rs`, to maintain consistent and descriptive naming (e.g., `gpui.rs` or `main.rs`). +* Avoid creative additions unless explicitly requested +* Use full words for variable names (no abbreviations like "q" for "queue") +* Use variable shadowing to scope clones in async contexts for clarity, minimizing the lifetime of borrowed references. + Example: + ```rust + executor.spawn({ + let task_ran = task_ran.clone(); + async move { + *task_ran.borrow_mut() = true; + } + }); + ``` # GPUI diff --git a/Cargo.lock b/Cargo.lock index 58d01da63372431e107ea9c0b17fde0700f9050f..ee80d59006f50c321e80bbe6fca9288b345524be 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -26,7 +26,7 @@ dependencies = [ "portable-pty", "project", "prompt_store", - "rand 0.8.5", + "rand 0.9.1", "serde", "serde_json", "settings", @@ -79,7 +79,7 @@ dependencies = [ "log", "pretty_assertions", "project", - "rand 0.8.5", + "rand 0.9.1", "serde_json", "settings", "text", @@ -172,7 +172,7 @@ dependencies = [ "pretty_assertions", "project", "prompt_store", - "rand 0.8.5", + "rand 0.9.1", "ref-cast", "rope", "schemars", @@ -408,7 +408,7 @@ dependencies = [ "project", "prompt_store", "proto", - "rand 0.8.5", + "rand 0.9.1", "release_channel", "rope", "rules_library", @@ -834,7 +834,7 @@ dependencies = [ "project", "prompt_store", "proto", - "rand 0.8.5", + "rand 0.9.1", "regex", "rpc", "serde", @@ -933,7 +933,7 @@ dependencies = [ "parking_lot", "pretty_assertions", "project", - "rand 0.8.5", + "rand 0.9.1", "regex", "serde", "serde_json", @@ -985,7 +985,7 @@ dependencies = [ "pretty_assertions", "project", "prompt_store", - "rand 0.8.5", + "rand 0.9.1", "regex", "reqwest_client", "rust-embed", @@ -2478,7 +2478,7 @@ dependencies = [ "language", "log", "pretty_assertions", - "rand 0.8.5", + "rand 0.9.1", "rope", "serde_json", "sum_tree", @@ -2899,7 +2899,7 @@ dependencies = [ "language", "log", "postage", - "rand 0.8.5", + "rand 0.9.1", "release_channel", "rpc", "settings", @@ -3086,7 +3086,7 @@ dependencies = [ "parking_lot", "paths", "postage", - "rand 0.8.5", + "rand 0.9.1", "regex", "release_channel", "rpc", @@ -3335,7 +3335,7 @@ dependencies = [ "prometheus", "prompt_store", "prost 0.9.0", - "rand 0.8.5", + "rand 0.9.1", "recent_projects", "release_channel", "remote", @@ -4697,7 +4697,7 @@ dependencies = [ "markdown", "pretty_assertions", "project", - "rand 0.8.5", + "rand 0.9.1", "serde", "serde_json", "settings", @@ -5068,7 +5068,7 @@ dependencies = [ "parking_lot", "pretty_assertions", "project", - "rand 0.8.5", + "rand 0.9.1", "regex", "release_channel", "rpc", @@ -5563,7 +5563,7 @@ dependencies = [ "parking_lot", "paths", "project", - "rand 0.8.5", + "rand 0.9.1", "release_channel", "remote", "reqwest_client", @@ -6412,7 +6412,7 @@ dependencies = [ "log", "parking_lot", "pretty_assertions", - "rand 0.8.5", + "rand 0.9.1", "regex", "rope", "schemars", @@ -7465,7 +7465,7 @@ dependencies = [ "pathfinder_geometry", "postage", "profiling", - "rand 0.8.5", + "rand 0.9.1", "raw-window-handle", "refineable", "reqwest_client", @@ -9078,7 +9078,7 @@ dependencies = [ "parking_lot", "postage", "pretty_assertions", - "rand 0.8.5", + "rand 0.9.1", "regex", "rpc", "schemars", @@ -10392,7 +10392,7 @@ dependencies = [ "parking_lot", "pretty_assertions", "project", - "rand 0.8.5", + "rand 0.9.1", "rope", "serde", "settings", @@ -12618,7 +12618,7 @@ dependencies = [ "postage", "prettier", "pretty_assertions", - "rand 0.8.5", + "rand 0.9.1", "regex", "release_channel", "remote", @@ -13892,7 +13892,7 @@ dependencies = [ "ctor", "gpui", "log", - "rand 0.8.5", + "rand 0.9.1", "rayon", "smallvec", "sum_tree", @@ -13921,7 +13921,7 @@ dependencies = [ "gpui", "parking_lot", "proto", - "rand 0.8.5", + "rand 0.9.1", "rsa", "serde", "serde_json", @@ -14356,6 +14356,19 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "scheduler" +version = "0.1.0" +dependencies = [ + "async-task", + "chrono", + "futures 0.3.31", + "parking", + "parking_lot", + "rand 0.9.1", + "workspace-hack", +] + [[package]] name = "schema_generator" version = "0.1.0" @@ -15655,7 +15668,7 @@ name = "streaming_diff" version = "0.1.0" dependencies = [ "ordered-float 2.10.1", - "rand 0.8.5", + "rand 0.9.1", "rope", "util", "workspace-hack", @@ -15769,7 +15782,7 @@ dependencies = [ "arrayvec", "ctor", "log", - "rand 0.8.5", + "rand 0.9.1", "rayon", "workspace-hack", "zlog", @@ -16360,7 +16373,7 @@ dependencies = [ "futures 0.3.31", "gpui", "libc", - "rand 0.8.5", + "rand 0.9.1", "regex", "release_channel", "schemars", @@ -16408,7 +16421,7 @@ dependencies = [ "language", "log", "project", - "rand 0.8.5", + "rand 0.9.1", "regex", "schemars", "search", @@ -16440,7 +16453,7 @@ dependencies = [ "log", "parking_lot", "postage", - "rand 0.8.5", + "rand 0.9.1", "regex", "rope", "smallvec", @@ -17797,7 +17810,7 @@ dependencies = [ "libc", "log", "nix 0.29.0", - "rand 0.8.5", + "rand 0.9.1", "regex", "rust-embed", "schemars", @@ -18588,7 +18601,7 @@ dependencies = [ "futures 0.3.31", "gpui", "parking_lot", - "rand 0.8.5", + "rand 0.9.1", "workspace-hack", "zlog", ] @@ -20047,7 +20060,7 @@ dependencies = [ "paths", "postage", "pretty_assertions", - "rand 0.8.5", + "rand 0.9.1", "rpc", "schemars", "serde", @@ -20812,7 +20825,7 @@ dependencies = [ "menu", "postage", "project", - "rand 0.8.5", + "rand 0.9.1", "regex", "release_channel", "reqwest_client", diff --git a/Cargo.toml b/Cargo.toml index 941c364e0dd85def66ebbc4e310ef0a90458fe44..8a487b612a18dc837d3cd75697f13bf92b5b28b7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -131,6 +131,7 @@ members = [ "crates/refineable", "crates/refineable/derive_refineable", "crates/release_channel", + "crates/scheduler", "crates/remote", "crates/remote_server", "crates/repl", @@ -360,6 +361,7 @@ proto = { path = "crates/proto" } recent_projects = { path = "crates/recent_projects" } refineable = { path = "crates/refineable" } release_channel = { path = "crates/release_channel" } +scheduler = { path = "crates/scheduler" } remote = { path = "crates/remote" } remote_server = { path = "crates/remote_server" } repl = { path = "crates/repl" } @@ -444,6 +446,7 @@ async-fs = "2.1" async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553" } async-recursion = "1.0.0" async-tar = "0.5.0" +async-task = "4.7" async-trait = "0.1" async-tungstenite = "0.29.1" async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] } @@ -538,6 +541,7 @@ objc = "0.2" open = "5.0.0" ordered-float = "2.1.1" palette = { version = "0.7.5", default-features = false, features = ["std"] } +parking = "2.0" parking_lot = "0.12.1" partial-json-fixer = "0.5.3" parse_int = "0.9" @@ -560,7 +564,7 @@ prost-build = "0.9" prost-types = "0.9" pulldown-cmark = { version = "0.12.0", default-features = false } quote = "1.0.9" -rand = "0.8.5" +rand = "0.9" rayon = "1.8" ref-cast = "1.0.24" regex = "1.5" diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index dc295369cce2b8fda596e3917724187bd35b7377..a3a8e31230b749b7b774a380030aab4600d78a07 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -2114,7 +2114,7 @@ mod tests { use gpui::{App, AsyncApp, TestAppContext, WeakEntity}; use indoc::indoc; use project::{FakeFs, Fs}; - use rand::Rng as _; + use rand::{distr, prelude::*}; use serde_json::json; use settings::SettingsStore; use smol::stream::StreamExt as _; @@ -3057,8 +3057,8 @@ mod tests { cx: &mut App, ) -> Task>> { let session_id = acp::SessionId( - rand::thread_rng() - .sample_iter(&rand::distributions::Alphanumeric) + rand::rng() + .sample_iter(&distr::Alphanumeric) .take(7) .map(char::from) .collect::() diff --git a/crates/action_log/src/action_log.rs b/crates/action_log/src/action_log.rs index 9ec10f4dbb0e670bf20d9c033db9cec02e5fda67..11ba596ac5a0ecd4ed49744d0eafa9defcde20c1 100644 --- a/crates/action_log/src/action_log.rs +++ b/crates/action_log/src/action_log.rs @@ -2218,7 +2218,7 @@ mod tests { action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); for _ in 0..operations { - match rng.gen_range(0..100) { + match rng.random_range(0..100) { 0..25 => { action_log.update(cx, |log, cx| { let range = buffer.read(cx).random_byte_range(0, &mut rng); @@ -2237,7 +2237,7 @@ mod tests { .unwrap(); } _ => { - let is_agent_edit = rng.gen_bool(0.5); + let is_agent_edit = rng.random_bool(0.5); if is_agent_edit { log::info!("agent edit"); } else { @@ -2252,7 +2252,7 @@ mod tests { } } - if rng.gen_bool(0.2) { + if rng.random_bool(0.2) { quiesce(&action_log, &buffer, cx); } } diff --git a/crates/agent_ui/src/buffer_codegen.rs b/crates/agent_ui/src/buffer_codegen.rs index 04eb41793f2257a9dccfdd089594d2f90d0ce513..2309aad754aee55af5ad040c39d22304486446a4 100644 --- a/crates/agent_ui/src/buffer_codegen.rs +++ b/crates/agent_ui/src/buffer_codegen.rs @@ -1139,7 +1139,7 @@ mod tests { ); while !new_text.is_empty() { let max_len = cmp::min(new_text.len(), 10); - let len = rng.gen_range(1..=max_len); + let len = rng.random_range(1..=max_len); let (chunk, suffix) = new_text.split_at(len); chunks_tx.unbounded_send(chunk.to_string()).unwrap(); new_text = suffix; @@ -1208,7 +1208,7 @@ mod tests { ); while !new_text.is_empty() { let max_len = cmp::min(new_text.len(), 10); - let len = rng.gen_range(1..=max_len); + let len = rng.random_range(1..=max_len); let (chunk, suffix) = new_text.split_at(len); chunks_tx.unbounded_send(chunk.to_string()).unwrap(); new_text = suffix; @@ -1277,7 +1277,7 @@ mod tests { ); while !new_text.is_empty() { let max_len = cmp::min(new_text.len(), 10); - let len = rng.gen_range(1..=max_len); + let len = rng.random_range(1..=max_len); let (chunk, suffix) = new_text.split_at(len); chunks_tx.unbounded_send(chunk.to_string()).unwrap(); new_text = suffix; diff --git a/crates/assistant_context/src/assistant_context_tests.rs b/crates/assistant_context/src/assistant_context_tests.rs index 61d748cbddb0858dda2f181ea6c943426393e087..8b182685cfeb4e3ae1b9df8c532b8f0c5ad91235 100644 --- a/crates/assistant_context/src/assistant_context_tests.rs +++ b/crates/assistant_context/src/assistant_context_tests.rs @@ -764,7 +764,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std let network = Arc::new(Mutex::new(Network::new(rng.clone()))); let mut contexts = Vec::new(); - let num_peers = rng.gen_range(min_peers..=max_peers); + let num_peers = rng.random_range(min_peers..=max_peers); let context_id = ContextId::new(); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); for i in 0..num_peers { @@ -806,10 +806,10 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std || !network.lock().is_idle() || network.lock().contains_disconnected_peers() { - let context_index = rng.gen_range(0..contexts.len()); + let context_index = rng.random_range(0..contexts.len()); let context = &contexts[context_index]; - match rng.gen_range(0..100) { + match rng.random_range(0..100) { 0..=29 if mutation_count > 0 => { log::info!("Context {}: edit buffer", context_index); context.update(cx, |context, cx| { @@ -874,10 +874,10 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std merge_same_roles: true, })]; - let num_sections = rng.gen_range(0..=3); + let num_sections = rng.random_range(0..=3); let mut section_start = 0; for _ in 0..num_sections { - let mut section_end = rng.gen_range(section_start..=output_text.len()); + let mut section_end = rng.random_range(section_start..=output_text.len()); while !output_text.is_char_boundary(section_end) { section_end += 1; } @@ -924,7 +924,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std 75..=84 if mutation_count > 0 => { context.update(cx, |context, cx| { if let Some(message) = context.messages(cx).choose(&mut rng) { - let new_status = match rng.gen_range(0..3) { + let new_status = match rng.random_range(0..3) { 0 => MessageStatus::Done, 1 => MessageStatus::Pending, _ => MessageStatus::Error(SharedString::from("Random error")), @@ -971,7 +971,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std network.lock().broadcast(replica_id, ops_to_send); context.update(cx, |context, cx| context.apply_ops(ops_to_receive, cx)); - } else if rng.gen_bool(0.1) && replica_id != 0 { + } else if rng.random_bool(0.1) && replica_id != 0 { log::info!("Context {}: disconnecting", context_index); network.lock().disconnect_peer(replica_id); } else if network.lock().has_unreceived(replica_id) { diff --git a/crates/assistant_tools/src/edit_agent.rs b/crates/assistant_tools/src/edit_agent.rs index 665ece2baaeed0dac32e5c0153ec1d79fef47f12..29ac53e2a606d63873f515aff25326debf0486f1 100644 --- a/crates/assistant_tools/src/edit_agent.rs +++ b/crates/assistant_tools/src/edit_agent.rs @@ -1315,17 +1315,17 @@ mod tests { #[gpui::test(iterations = 100)] async fn test_random_indents(mut rng: StdRng) { - let len = rng.gen_range(1..=100); + let len = rng.random_range(1..=100); let new_text = util::RandomCharIter::new(&mut rng) .with_simple_text() .take(len) .collect::(); let new_text = new_text .split('\n') - .map(|line| format!("{}{}", " ".repeat(rng.gen_range(0..=8)), line)) + .map(|line| format!("{}{}", " ".repeat(rng.random_range(0..=8)), line)) .collect::>() .join("\n"); - let delta = IndentDelta::Spaces(rng.gen_range(-4..=4)); + let delta = IndentDelta::Spaces(rng.random_range(-4i8..=4i8) as isize); let chunks = to_random_chunks(&mut rng, &new_text); let new_text_chunks = stream::iter(chunks.iter().enumerate().map(|(index, chunk)| { @@ -1357,7 +1357,7 @@ mod tests { } fn to_random_chunks(rng: &mut StdRng, input: &str) -> Vec { - let chunk_count = rng.gen_range(1..=cmp::min(input.len(), 50)); + let chunk_count = rng.random_range(1..=cmp::min(input.len(), 50)); let mut chunk_indices = (0..input.len()).choose_multiple(rng, chunk_count); chunk_indices.sort(); chunk_indices.push(input.len()); diff --git a/crates/assistant_tools/src/edit_agent/create_file_parser.rs b/crates/assistant_tools/src/edit_agent/create_file_parser.rs index 0aad9ecb87c1426486b531ac4291913cd0d74092..5126f9c6b1fe4ee5cc600ae93b7300b7af09451f 100644 --- a/crates/assistant_tools/src/edit_agent/create_file_parser.rs +++ b/crates/assistant_tools/src/edit_agent/create_file_parser.rs @@ -204,7 +204,7 @@ mod tests { } fn parse_random_chunks(input: &str, parser: &mut CreateFileParser, rng: &mut StdRng) -> String { - let chunk_count = rng.gen_range(1..=cmp::min(input.len(), 50)); + let chunk_count = rng.random_range(1..=cmp::min(input.len(), 50)); let mut chunk_indices = (0..input.len()).choose_multiple(rng, chunk_count); chunk_indices.sort(); chunk_indices.push(input.len()); diff --git a/crates/assistant_tools/src/edit_agent/edit_parser.rs b/crates/assistant_tools/src/edit_agent/edit_parser.rs index db58c2bf3685030abfa6cfdd506c068c6643dce8..8411171ba4ea491d2603014a0715ce471b34e36f 100644 --- a/crates/assistant_tools/src/edit_agent/edit_parser.rs +++ b/crates/assistant_tools/src/edit_agent/edit_parser.rs @@ -996,7 +996,7 @@ mod tests { } fn parse_random_chunks(input: &str, parser: &mut EditParser, rng: &mut StdRng) -> Vec { - let chunk_count = rng.gen_range(1..=cmp::min(input.len(), 50)); + let chunk_count = rng.random_range(1..=cmp::min(input.len(), 50)); let mut chunk_indices = (0..input.len()).choose_multiple(rng, chunk_count); chunk_indices.sort(); chunk_indices.push(input.len()); diff --git a/crates/assistant_tools/src/edit_agent/evals.rs b/crates/assistant_tools/src/edit_agent/evals.rs index 4f182b31481c5d855b59f4398e104d0eea05bc74..e78d43f56b2f13f90b83968dadd5ff79e1a96658 100644 --- a/crates/assistant_tools/src/edit_agent/evals.rs +++ b/crates/assistant_tools/src/edit_agent/evals.rs @@ -1399,7 +1399,7 @@ fn eval( } fn run_eval(eval: EvalInput, tx: mpsc::Sender>) { - let dispatcher = gpui::TestDispatcher::new(StdRng::from_entropy()); + let dispatcher = gpui::TestDispatcher::new(StdRng::from_os_rng()); let mut cx = TestAppContext::build(dispatcher, None); let output = cx.executor().block_test(async { let test = EditAgentTest::new(&mut cx).await; @@ -1707,7 +1707,7 @@ async fn retry_on_rate_limit(mut request: impl AsyncFnMut() -> Result) -> }; if let Some(retry_after) = retry_delay { - let jitter = retry_after.mul_f64(rand::thread_rng().gen_range(0.0..1.0)); + let jitter = retry_after.mul_f64(rand::rng().random_range(0.0..1.0)); eprintln!("Attempt #{attempt}: Retry after {retry_after:?} + jitter of {jitter:?}"); Timer::after(retry_after + jitter).await; } else { diff --git a/crates/assistant_tools/src/edit_agent/streaming_fuzzy_matcher.rs b/crates/assistant_tools/src/edit_agent/streaming_fuzzy_matcher.rs index 33b37679f0a345ef070942057b307bd377012d05..386b8204400a157b37b2f356829fa27df3abca92 100644 --- a/crates/assistant_tools/src/edit_agent/streaming_fuzzy_matcher.rs +++ b/crates/assistant_tools/src/edit_agent/streaming_fuzzy_matcher.rs @@ -771,7 +771,7 @@ mod tests { } fn to_random_chunks(rng: &mut StdRng, input: &str) -> Vec { - let chunk_count = rng.gen_range(1..=cmp::min(input.len(), 50)); + let chunk_count = rng.random_range(1..=cmp::min(input.len(), 50)); let mut chunk_indices = (0..input.len()).choose_multiple(rng, chunk_count); chunk_indices.sort(); chunk_indices.push(input.len()); diff --git a/crates/buffer_diff/src/buffer_diff.rs b/crates/buffer_diff/src/buffer_diff.rs index b20dad4ebbcc5990bd0a6a165375ca62481e609f..22ee20e0db2810610dc2e7a4cae86dca90681337 100644 --- a/crates/buffer_diff/src/buffer_diff.rs +++ b/crates/buffer_diff/src/buffer_diff.rs @@ -2044,10 +2044,10 @@ mod tests { #[gpui::test(iterations = 100)] async fn test_staging_and_unstaging_hunks(cx: &mut TestAppContext, mut rng: StdRng) { fn gen_line(rng: &mut StdRng) -> String { - if rng.gen_bool(0.2) { + if rng.random_bool(0.2) { "\n".to_owned() } else { - let c = rng.gen_range('A'..='Z'); + let c = rng.random_range('A'..='Z'); format!("{c}{c}{c}\n") } } @@ -2066,7 +2066,7 @@ mod tests { old_lines.into_iter() }; let mut result = String::new(); - let unchanged_count = rng.gen_range(0..=old_lines.len()); + let unchanged_count = rng.random_range(0..=old_lines.len()); result += &old_lines .by_ref() @@ -2076,14 +2076,14 @@ mod tests { s }); while old_lines.len() > 0 { - let deleted_count = rng.gen_range(0..=old_lines.len()); + let deleted_count = rng.random_range(0..=old_lines.len()); let _advance = old_lines .by_ref() .take(deleted_count) .map(|line| line.len() + 1) .sum::(); let minimum_added = if deleted_count == 0 { 1 } else { 0 }; - let added_count = rng.gen_range(minimum_added..=5); + let added_count = rng.random_range(minimum_added..=5); let addition = (0..added_count).map(|_| gen_line(rng)).collect::(); result += &addition; @@ -2092,7 +2092,8 @@ mod tests { if blank_lines == old_lines.len() { break; }; - let unchanged_count = rng.gen_range((blank_lines + 1).max(1)..=old_lines.len()); + let unchanged_count = + rng.random_range((blank_lines + 1).max(1)..=old_lines.len()); result += &old_lines.by_ref().take(unchanged_count).fold( String::new(), |mut s, line| { @@ -2149,7 +2150,7 @@ mod tests { ) }); let working_copy = working_copy.read_with(cx, |working_copy, _| working_copy.snapshot()); - let mut index_text = if rng.r#gen() { + let mut index_text = if rng.random() { Rope::from(head_text.as_str()) } else { working_copy.as_rope().clone() @@ -2165,7 +2166,7 @@ mod tests { } for _ in 0..operations { - let i = rng.gen_range(0..hunks.len()); + let i = rng.random_range(0..hunks.len()); let hunk = &mut hunks[i]; let hunk_to_change = hunk.clone(); let stage = match hunk.secondary_status { diff --git a/crates/channel/src/channel_chat.rs b/crates/channel/src/channel_chat.rs index baf23ac39f983c018da2f291bec7879913f12a58..776499c8760f13fbd2903780b1e234f8755d9860 100644 --- a/crates/channel/src/channel_chat.rs +++ b/crates/channel/src/channel_chat.rs @@ -129,7 +129,7 @@ impl ChannelChat { loaded_all_messages: false, next_pending_message_id: 0, last_acknowledged_id: None, - rng: StdRng::from_entropy(), + rng: StdRng::from_os_rng(), first_loaded_message_id: None, _subscription: subscription.set_entity(&cx.entity(), &cx.to_async()), } @@ -183,7 +183,7 @@ impl ChannelChat { let channel_id = self.channel_id; let pending_id = ChannelMessageId::Pending(post_inc(&mut self.next_pending_message_id)); - let nonce = self.rng.r#gen(); + let nonce = self.rng.random(); self.insert_messages( SumTree::from_item( ChannelMessage { @@ -257,7 +257,7 @@ impl ChannelChat { cx, ); - let nonce: u128 = self.rng.r#gen(); + let nonce: u128 = self.rng.random(); let request = self.rpc.request(proto::UpdateChannelMessage { channel_id: self.channel_id.0, diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 1287b4563c99cbd387b3a18d98fbbc734e55e4db..85f6aeade69cc04c5f58b72258ac062157094460 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -691,7 +691,7 @@ impl Client { #[cfg(any(test, feature = "test-support"))] let mut rng = StdRng::seed_from_u64(0); #[cfg(not(any(test, feature = "test-support")))] - let mut rng = StdRng::from_entropy(); + let mut rng = StdRng::from_os_rng(); let mut delay = INITIAL_RECONNECTION_DELAY; loop { @@ -721,8 +721,9 @@ impl Client { }, cx, ); - let jitter = - Duration::from_millis(rng.gen_range(0..delay.as_millis() as u64)); + let jitter = Duration::from_millis( + rng.random_range(0..delay.as_millis() as u64), + ); cx.background_executor().timer(delay + jitter).await; delay = cmp::min(delay * 2, MAX_RECONNECTION_DELAY); } else { diff --git a/crates/collab/src/auth.rs b/crates/collab/src/auth.rs index e484d6b510f444e764ac38210d6a5cfc42142807..13296b79ae8b3df97753e7adf4f2078990c187b0 100644 --- a/crates/collab/src/auth.rs +++ b/crates/collab/src/auth.rs @@ -227,7 +227,7 @@ pub async fn verify_access_token( #[cfg(test)] mod test { - use rand::thread_rng; + use rand::prelude::*; use scrypt::password_hash::{PasswordHasher, SaltString}; use sea_orm::EntityTrait; @@ -358,9 +358,42 @@ mod test { None, None, params, - &SaltString::generate(thread_rng()), + &SaltString::generate(PasswordHashRngCompat::new()), ) .map_err(anyhow::Error::new)? .to_string()) } + + // TODO: remove once we password_hash v0.6 is released. + struct PasswordHashRngCompat(rand::rngs::ThreadRng); + + impl PasswordHashRngCompat { + fn new() -> Self { + Self(rand::rng()) + } + } + + impl scrypt::password_hash::rand_core::RngCore for PasswordHashRngCompat { + fn next_u32(&mut self) -> u32 { + self.0.next_u32() + } + + fn next_u64(&mut self) -> u64 { + self.0.next_u64() + } + + fn fill_bytes(&mut self, dest: &mut [u8]) { + self.0.fill_bytes(dest); + } + + fn try_fill_bytes( + &mut self, + dest: &mut [u8], + ) -> Result<(), scrypt::password_hash::rand_core::Error> { + self.fill_bytes(dest); + Ok(()) + } + } + + impl scrypt::password_hash::rand_core::CryptoRng for PasswordHashRngCompat {} } diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 95a485305ca31bde351f4962d1678e30801d8a01..f39da309dde4d4f9b2bebe4d117869f78225112d 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -256,7 +256,7 @@ impl Database { let test_options = self.test_options.as_ref().unwrap(); test_options.executor.simulate_random_delay().await; let fail_probability = *test_options.query_failure_probability.lock(); - if test_options.executor.rng().gen_bool(fail_probability) { + if test_options.executor.rng().random_bool(fail_probability) { return Err(anyhow!("simulated query failure"))?; } diff --git a/crates/collab/src/db/tests.rs b/crates/collab/src/db/tests.rs index 2eb8d377acaba9f8fe5ea558a29cc028c2aa11fd..f8560edda78217e6a5e09a5c2e66e0f436ca0477 100644 --- a/crates/collab/src/db/tests.rs +++ b/crates/collab/src/db/tests.rs @@ -75,10 +75,10 @@ impl TestDb { static LOCK: Mutex<()> = Mutex::new(()); let _guard = LOCK.lock(); - let mut rng = StdRng::from_entropy(); + let mut rng = StdRng::from_os_rng(); let url = format!( "postgres://postgres@localhost/zed-test-{}", - rng.r#gen::() + rng.random::() ); let runtime = tokio::runtime::Builder::new_current_thread() .enable_io() diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 6bb2db05201ea464053a758b390e84ccdfc6527a..07bd162e66f686c425dc441a57644b52141586e3 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -5746,7 +5746,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it( let definitions; let buffer_b2; - if rng.r#gen() { + if rng.random() { cx_a.run_until_parked(); cx_b.run_until_parked(); definitions = project_b.update(cx_b, |p, cx| p.definitions(&buffer_b1, 23, cx)); diff --git a/crates/collab/src/tests/random_channel_buffer_tests.rs b/crates/collab/src/tests/random_channel_buffer_tests.rs index 6fcd6d75cd0d827296f555bfa54c18dac518a3be..9451090af2198117ddb20241b99be5b208daa729 100644 --- a/crates/collab/src/tests/random_channel_buffer_tests.rs +++ b/crates/collab/src/tests/random_channel_buffer_tests.rs @@ -84,7 +84,7 @@ impl RandomizedTest for RandomChannelBufferTest { } loop { - match rng.gen_range(0..100_u32) { + match rng.random_range(0..100_u32) { 0..=29 => { let channel_name = client.channel_store().read_with(cx, |store, cx| { store.ordered_channels().find_map(|(_, channel)| { diff --git a/crates/collab/src/tests/random_project_collaboration_tests.rs b/crates/collab/src/tests/random_project_collaboration_tests.rs index bfe05c4a1d600bb280d3821350204d0b2d0d6e08..326f64cb244b88a64728f4347e3cfc31a8c252bf 100644 --- a/crates/collab/src/tests/random_project_collaboration_tests.rs +++ b/crates/collab/src/tests/random_project_collaboration_tests.rs @@ -17,7 +17,7 @@ use project::{ DEFAULT_COMPLETION_CONTEXT, Project, ProjectPath, search::SearchQuery, search::SearchResult, }; use rand::{ - distributions::{Alphanumeric, DistString}, + distr::{self, SampleString}, prelude::*, }; use serde::{Deserialize, Serialize}; @@ -168,19 +168,19 @@ impl RandomizedTest for ProjectCollaborationTest { ) -> ClientOperation { let call = cx.read(ActiveCall::global); loop { - match rng.gen_range(0..100_u32) { + match rng.random_range(0..100_u32) { // Mutate the call 0..=29 => { // Respond to an incoming call if call.read_with(cx, |call, _| call.incoming().borrow().is_some()) { - break if rng.gen_bool(0.7) { + break if rng.random_bool(0.7) { ClientOperation::AcceptIncomingCall } else { ClientOperation::RejectIncomingCall }; } - match rng.gen_range(0..100_u32) { + match rng.random_range(0..100_u32) { // Invite a contact to the current call 0..=70 => { let available_contacts = @@ -212,7 +212,7 @@ impl RandomizedTest for ProjectCollaborationTest { } // Mutate projects - 30..=59 => match rng.gen_range(0..100_u32) { + 30..=59 => match rng.random_range(0..100_u32) { // Open a new project 0..=70 => { // Open a remote project @@ -270,7 +270,7 @@ impl RandomizedTest for ProjectCollaborationTest { } // Mutate project worktrees - 81.. => match rng.gen_range(0..100_u32) { + 81.. => match rng.random_range(0..100_u32) { // Add a worktree to a local project 0..=50 => { let Some(project) = client.local_projects().choose(rng).cloned() else { @@ -279,7 +279,7 @@ impl RandomizedTest for ProjectCollaborationTest { let project_root_name = root_name_for_project(&project, cx); let mut paths = client.fs().paths(false); paths.remove(0); - let new_root_path = if paths.is_empty() || rng.r#gen() { + let new_root_path = if paths.is_empty() || rng.random() { Path::new(path!("/")).join(plan.next_root_dir_name()) } else { paths.choose(rng).unwrap().clone() @@ -309,7 +309,7 @@ impl RandomizedTest for ProjectCollaborationTest { .choose(rng) }); let Some(worktree) = worktree else { continue }; - let is_dir = rng.r#gen::(); + let is_dir = rng.random::(); let mut full_path = worktree.read_with(cx, |w, _| PathBuf::from(w.root_name())); full_path.push(gen_file_name(rng)); @@ -334,7 +334,7 @@ impl RandomizedTest for ProjectCollaborationTest { let project_root_name = root_name_for_project(&project, cx); let is_local = project.read_with(cx, |project, _| project.is_local()); - match rng.gen_range(0..100_u32) { + match rng.random_range(0..100_u32) { // Manipulate an existing buffer 0..=70 => { let Some(buffer) = client @@ -349,7 +349,7 @@ impl RandomizedTest for ProjectCollaborationTest { let full_path = buffer .read_with(cx, |buffer, cx| buffer.file().unwrap().full_path(cx)); - match rng.gen_range(0..100_u32) { + match rng.random_range(0..100_u32) { // Close the buffer 0..=15 => { break ClientOperation::CloseBuffer { @@ -360,7 +360,7 @@ impl RandomizedTest for ProjectCollaborationTest { } // Save the buffer 16..=29 if buffer.read_with(cx, |b, _| b.is_dirty()) => { - let detach = rng.gen_bool(0.3); + let detach = rng.random_bool(0.3); break ClientOperation::SaveBuffer { project_root_name, is_local, @@ -383,17 +383,17 @@ impl RandomizedTest for ProjectCollaborationTest { _ => { let offset = buffer.read_with(cx, |buffer, _| { buffer.clip_offset( - rng.gen_range(0..=buffer.len()), + rng.random_range(0..=buffer.len()), language::Bias::Left, ) }); - let detach = rng.r#gen(); + let detach = rng.random(); break ClientOperation::RequestLspDataInBuffer { project_root_name, full_path, offset, is_local, - kind: match rng.gen_range(0..5_u32) { + kind: match rng.random_range(0..5_u32) { 0 => LspRequestKind::Rename, 1 => LspRequestKind::Highlights, 2 => LspRequestKind::Definition, @@ -407,8 +407,8 @@ impl RandomizedTest for ProjectCollaborationTest { } 71..=80 => { - let query = rng.gen_range('a'..='z').to_string(); - let detach = rng.gen_bool(0.3); + let query = rng.random_range('a'..='z').to_string(); + let detach = rng.random_bool(0.3); break ClientOperation::SearchProject { project_root_name, is_local, @@ -460,7 +460,7 @@ impl RandomizedTest for ProjectCollaborationTest { // Create or update a file or directory 96.. => { - let is_dir = rng.r#gen::(); + let is_dir = rng.random::(); let content; let mut path; let dir_paths = client.fs().directories(false); @@ -470,11 +470,11 @@ impl RandomizedTest for ProjectCollaborationTest { path = dir_paths.choose(rng).unwrap().clone(); path.push(gen_file_name(rng)); } else { - content = Alphanumeric.sample_string(rng, 16); + content = distr::Alphanumeric.sample_string(rng, 16); // Create a new file or overwrite an existing file let file_paths = client.fs().files(); - if file_paths.is_empty() || rng.gen_bool(0.5) { + if file_paths.is_empty() || rng.random_bool(0.5) { path = dir_paths.choose(rng).unwrap().clone(); path.push(gen_file_name(rng)); path.set_extension("rs"); @@ -1090,7 +1090,7 @@ impl RandomizedTest for ProjectCollaborationTest { move |_, cx| { let background = cx.background_executor(); let mut rng = background.rng(); - let count = rng.gen_range::(1..3); + let count = rng.random_range::(1..3); let files = fs.as_fake().files(); let files = (0..count) .map(|_| files.choose(&mut rng).unwrap().clone()) @@ -1117,12 +1117,12 @@ impl RandomizedTest for ProjectCollaborationTest { let background = cx.background_executor(); let mut rng = background.rng(); - let highlight_count = rng.gen_range(1..=5); + let highlight_count = rng.random_range(1..=5); for _ in 0..highlight_count { - let start_row = rng.gen_range(0..100); - let start_column = rng.gen_range(0..100); - let end_row = rng.gen_range(0..100); - let end_column = rng.gen_range(0..100); + let start_row = rng.random_range(0..100); + let start_column = rng.random_range(0..100); + let end_row = rng.random_range(0..100); + let end_column = rng.random_range(0..100); let start = PointUtf16::new(start_row, start_column); let end = PointUtf16::new(end_row, end_column); let range = @@ -1219,8 +1219,8 @@ impl RandomizedTest for ProjectCollaborationTest { guest_project.remote_id(), ); assert_eq!( - guest_snapshot.entries(false, 0).collect::>(), - host_snapshot.entries(false, 0).collect::>(), + guest_snapshot.entries(false, 0).map(null_out_entry_size).collect::>(), + host_snapshot.entries(false, 0).map(null_out_entry_size).collect::>(), "{} has different snapshot than the host for worktree {:?} ({:?}) and project {:?}", client.username, host_snapshot.abs_path(), @@ -1248,6 +1248,18 @@ impl RandomizedTest for ProjectCollaborationTest { ); } }); + + // A hack to work around a hack in + // https://github.com/zed-industries/zed/pull/16696 that wasn't + // detected until we upgraded the rng crate. This whole crate is + // going away with DeltaDB soon, so we hold our nose and + // continue. + fn null_out_entry_size(entry: &project::Entry) -> project::Entry { + project::Entry { + size: 0, + ..entry.clone() + } + } } let buffers = client.buffers().clone(); @@ -1422,7 +1434,7 @@ fn generate_git_operation(rng: &mut StdRng, client: &TestClient) -> GitOperation .filter(|path| path.starts_with(repo_path)) .collect::>(); - let count = rng.gen_range(0..=paths.len()); + let count = rng.random_range(0..=paths.len()); paths.shuffle(rng); paths.truncate(count); @@ -1434,13 +1446,13 @@ fn generate_git_operation(rng: &mut StdRng, client: &TestClient) -> GitOperation let repo_path = client.fs().directories(false).choose(rng).unwrap().clone(); - match rng.gen_range(0..100_u32) { + match rng.random_range(0..100_u32) { 0..=25 => { let file_paths = generate_file_paths(&repo_path, rng, client); let contents = file_paths .into_iter() - .map(|path| (path, Alphanumeric.sample_string(rng, 16))) + .map(|path| (path, distr::Alphanumeric.sample_string(rng, 16))) .collect(); GitOperation::WriteGitIndex { @@ -1449,7 +1461,8 @@ fn generate_git_operation(rng: &mut StdRng, client: &TestClient) -> GitOperation } } 26..=63 => { - let new_branch = (rng.gen_range(0..10) > 3).then(|| Alphanumeric.sample_string(rng, 8)); + let new_branch = + (rng.random_range(0..10) > 3).then(|| distr::Alphanumeric.sample_string(rng, 8)); GitOperation::WriteGitBranch { repo_path, @@ -1596,7 +1609,7 @@ fn choose_random_project(client: &TestClient, rng: &mut StdRng) -> Option String { let mut name = String::new(); for _ in 0..10 { - let letter = rng.gen_range('a'..='z'); + let letter = rng.random_range('a'..='z'); name.push(letter); } name @@ -1604,7 +1617,7 @@ fn gen_file_name(rng: &mut StdRng) -> String { fn gen_status(rng: &mut StdRng) -> FileStatus { fn gen_tracked_status(rng: &mut StdRng) -> TrackedStatus { - match rng.gen_range(0..3) { + match rng.random_range(0..3) { 0 => TrackedStatus { index_status: StatusCode::Unmodified, worktree_status: StatusCode::Unmodified, @@ -1626,7 +1639,7 @@ fn gen_status(rng: &mut StdRng) -> FileStatus { } fn gen_unmerged_status_code(rng: &mut StdRng) -> UnmergedStatusCode { - match rng.gen_range(0..3) { + match rng.random_range(0..3) { 0 => UnmergedStatusCode::Updated, 1 => UnmergedStatusCode::Added, 2 => UnmergedStatusCode::Deleted, @@ -1634,7 +1647,7 @@ fn gen_status(rng: &mut StdRng) -> FileStatus { } } - match rng.gen_range(0..2) { + match rng.random_range(0..2) { 0 => FileStatus::Unmerged(UnmergedStatus { first_head: gen_unmerged_status_code(rng), second_head: gen_unmerged_status_code(rng), diff --git a/crates/collab/src/tests/randomized_test_helpers.rs b/crates/collab/src/tests/randomized_test_helpers.rs index d6c299a6a9ed4e0439573e9b33fabe8ff122963d..9a372017e34f575f780d56f3936fefec832e160c 100644 --- a/crates/collab/src/tests/randomized_test_helpers.rs +++ b/crates/collab/src/tests/randomized_test_helpers.rs @@ -208,9 +208,9 @@ pub fn save_randomized_test_plan() { impl TestPlan { pub async fn new(server: &mut TestServer, mut rng: StdRng) -> Arc> { - let allow_server_restarts = rng.gen_bool(0.7); - let allow_client_reconnection = rng.gen_bool(0.7); - let allow_client_disconnection = rng.gen_bool(0.1); + let allow_server_restarts = rng.random_bool(0.7); + let allow_client_reconnection = rng.random_bool(0.7); + let allow_client_disconnection = rng.random_bool(0.1); let mut users = Vec::new(); for ix in 0..max_peers() { @@ -407,7 +407,7 @@ impl TestPlan { } Some(loop { - break match self.rng.gen_range(0..100) { + break match self.rng.random_range(0..100) { 0..=29 if clients.len() < self.users.len() => { let user = self .users @@ -421,13 +421,13 @@ impl TestPlan { } } 30..=34 if clients.len() > 1 && self.allow_client_disconnection => { - let (client, cx) = &clients[self.rng.gen_range(0..clients.len())]; + let (client, cx) = &clients[self.rng.random_range(0..clients.len())]; let user_id = client.current_user_id(cx); self.operation_ix += 1; ServerOperation::RemoveConnection { user_id } } 35..=39 if clients.len() > 1 && self.allow_client_reconnection => { - let (client, cx) = &clients[self.rng.gen_range(0..clients.len())]; + let (client, cx) = &clients[self.rng.random_range(0..clients.len())]; let user_id = client.current_user_id(cx); self.operation_ix += 1; ServerOperation::BounceConnection { user_id } @@ -439,12 +439,12 @@ impl TestPlan { _ if !clients.is_empty() => { let count = self .rng - .gen_range(1..10) + .random_range(1..10) .min(self.max_operations - self.operation_ix); let batch_id = util::post_inc(&mut self.next_batch_id); let mut user_ids = (0..count) .map(|_| { - let ix = self.rng.gen_range(0..clients.len()); + let ix = self.rng.random_range(0..clients.len()); let (client, cx) = &clients[ix]; client.current_user_id(cx) }) @@ -453,7 +453,7 @@ impl TestPlan { ServerOperation::MutateClients { user_ids, batch_id, - quiesce: self.rng.gen_bool(0.7), + quiesce: self.rng.random_bool(0.7), } } _ => continue, diff --git a/crates/diagnostics/src/diagnostics_tests.rs b/crates/diagnostics/src/diagnostics_tests.rs index fdca32520d1e08d562ac6f533968c146b5ec0673..6a8baecdb3513754683cc748717bb94e863df509 100644 --- a/crates/diagnostics/src/diagnostics_tests.rs +++ b/crates/diagnostics/src/diagnostics_tests.rs @@ -682,7 +682,7 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng Default::default(); for _ in 0..operations { - match rng.gen_range(0..100) { + match rng.random_range(0..100) { // language server completes its diagnostic check 0..=20 if !updated_language_servers.is_empty() => { let server_id = *updated_language_servers.iter().choose(&mut rng).unwrap(); @@ -691,7 +691,7 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng lsp_store.disk_based_diagnostics_finished(server_id, cx) }); - if rng.gen_bool(0.5) { + if rng.random_bool(0.5) { cx.run_until_parked(); } } @@ -701,7 +701,7 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng let (path, server_id, diagnostics) = match current_diagnostics.iter_mut().choose(&mut rng) { // update existing set of diagnostics - Some(((path, server_id), diagnostics)) if rng.gen_bool(0.5) => { + Some(((path, server_id), diagnostics)) if rng.random_bool(0.5) => { (path.clone(), *server_id, diagnostics) } @@ -709,13 +709,13 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng _ => { let path: PathBuf = format!(path!("/test/{}.rs"), post_inc(&mut next_filename)).into(); - let len = rng.gen_range(128..256); + let len = rng.random_range(128..256); let content = RandomCharIter::new(&mut rng).take(len).collect::(); fs.insert_file(&path, content.into_bytes()).await; let server_id = match language_server_ids.iter().choose(&mut rng) { - Some(server_id) if rng.gen_bool(0.5) => *server_id, + Some(server_id) if rng.random_bool(0.5) => *server_id, _ => { let id = LanguageServerId(language_server_ids.len()); language_server_ids.push(id); @@ -846,7 +846,7 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S let mut next_inlay_id = 0; for _ in 0..operations { - match rng.gen_range(0..100) { + match rng.random_range(0..100) { // language server completes its diagnostic check 0..=20 if !updated_language_servers.is_empty() => { let server_id = *updated_language_servers.iter().choose(&mut rng).unwrap(); @@ -855,7 +855,7 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S lsp_store.disk_based_diagnostics_finished(server_id, cx) }); - if rng.gen_bool(0.5) { + if rng.random_bool(0.5) { cx.run_until_parked(); } } @@ -864,7 +864,7 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S diagnostics.editor.update(cx, |editor, cx| { let snapshot = editor.snapshot(window, cx); if !snapshot.buffer_snapshot.is_empty() { - let position = rng.gen_range(0..snapshot.buffer_snapshot.len()); + let position = rng.random_range(0..snapshot.buffer_snapshot.len()); let position = snapshot.buffer_snapshot.clip_offset(position, Bias::Left); log::info!( "adding inlay at {position}/{}: {:?}", @@ -890,7 +890,7 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S let (path, server_id, diagnostics) = match current_diagnostics.iter_mut().choose(&mut rng) { // update existing set of diagnostics - Some(((path, server_id), diagnostics)) if rng.gen_bool(0.5) => { + Some(((path, server_id), diagnostics)) if rng.random_bool(0.5) => { (path.clone(), *server_id, diagnostics) } @@ -898,13 +898,13 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S _ => { let path: PathBuf = format!(path!("/test/{}.rs"), post_inc(&mut next_filename)).into(); - let len = rng.gen_range(128..256); + let len = rng.random_range(128..256); let content = RandomCharIter::new(&mut rng).take(len).collect::(); fs.insert_file(&path, content.into_bytes()).await; let server_id = match language_server_ids.iter().choose(&mut rng) { - Some(server_id) if rng.gen_bool(0.5) => *server_id, + Some(server_id) if rng.random_bool(0.5) => *server_id, _ => { let id = LanguageServerId(language_server_ids.len()); language_server_ids.push(id); @@ -1589,10 +1589,10 @@ fn randomly_update_diagnostics_for_path( next_id: &mut usize, rng: &mut impl Rng, ) { - let mutation_count = rng.gen_range(1..=3); + let mutation_count = rng.random_range(1..=3); for _ in 0..mutation_count { - if rng.gen_bool(0.3) && !diagnostics.is_empty() { - let idx = rng.gen_range(0..diagnostics.len()); + if rng.random_bool(0.3) && !diagnostics.is_empty() { + let idx = rng.random_range(0..diagnostics.len()); log::info!(" removing diagnostic at index {idx}"); diagnostics.remove(idx); } else { @@ -1601,7 +1601,7 @@ fn randomly_update_diagnostics_for_path( let new_diagnostic = random_lsp_diagnostic(rng, fs, path, unique_id); - let ix = rng.gen_range(0..=diagnostics.len()); + let ix = rng.random_range(0..=diagnostics.len()); log::info!( " inserting {} at index {ix}. {},{}..{},{}", new_diagnostic.message, @@ -1638,8 +1638,8 @@ fn random_lsp_diagnostic( let file_content = fs.read_file_sync(path).unwrap(); let file_text = Rope::from(String::from_utf8_lossy(&file_content).as_ref()); - let start = rng.gen_range(0..file_text.len().saturating_add(ERROR_MARGIN)); - let end = rng.gen_range(start..file_text.len().saturating_add(ERROR_MARGIN)); + let start = rng.random_range(0..file_text.len().saturating_add(ERROR_MARGIN)); + let end = rng.random_range(start..file_text.len().saturating_add(ERROR_MARGIN)); let start_point = file_text.offset_to_point_utf16(start); let end_point = file_text.offset_to_point_utf16(end); @@ -1649,7 +1649,7 @@ fn random_lsp_diagnostic( lsp::Position::new(end_point.row, end_point.column), ); - let severity = if rng.gen_bool(0.5) { + let severity = if rng.random_bool(0.5) { Some(lsp::DiagnosticSeverity::ERROR) } else { Some(lsp::DiagnosticSeverity::WARNING) @@ -1657,13 +1657,14 @@ fn random_lsp_diagnostic( let message = format!("diagnostic {unique_id}"); - let related_information = if rng.gen_bool(0.3) { - let info_count = rng.gen_range(1..=3); + let related_information = if rng.random_bool(0.3) { + let info_count = rng.random_range(1..=3); let mut related_info = Vec::with_capacity(info_count); for i in 0..info_count { - let info_start = rng.gen_range(0..file_text.len().saturating_add(ERROR_MARGIN)); - let info_end = rng.gen_range(info_start..file_text.len().saturating_add(ERROR_MARGIN)); + let info_start = rng.random_range(0..file_text.len().saturating_add(ERROR_MARGIN)); + let info_end = + rng.random_range(info_start..file_text.len().saturating_add(ERROR_MARGIN)); let info_start_point = file_text.offset_to_point_utf16(info_start); let info_end_point = file_text.offset_to_point_utf16(info_end); diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index c16e4a6ddbb971b44d71421d6ad868e6423eb035..3a07ee45aff60a7ffc28e76ce5f7d4f79641d4b2 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -1552,15 +1552,15 @@ pub mod tests { .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .unwrap_or(10); - let mut tab_size = rng.gen_range(1..=4); - let buffer_start_excerpt_header_height = rng.gen_range(1..=5); - let excerpt_header_height = rng.gen_range(1..=5); + let mut tab_size = rng.random_range(1..=4); + let buffer_start_excerpt_header_height = rng.random_range(1..=5); + let excerpt_header_height = rng.random_range(1..=5); let font_size = px(14.0); let max_wrap_width = 300.0; - let mut wrap_width = if rng.gen_bool(0.1) { + let mut wrap_width = if rng.random_bool(0.1) { None } else { - Some(px(rng.gen_range(0.0..=max_wrap_width))) + Some(px(rng.random_range(0.0..=max_wrap_width))) }; log::info!("tab size: {}", tab_size); @@ -1571,8 +1571,8 @@ pub mod tests { }); let buffer = cx.update(|cx| { - if rng.r#gen() { - let len = rng.gen_range(0..10); + if rng.random() { + let len = rng.random_range(0..10); let text = util::RandomCharIter::new(&mut rng) .take(len) .collect::(); @@ -1609,12 +1609,12 @@ pub mod tests { log::info!("display text: {:?}", snapshot.text()); for _i in 0..operations { - match rng.gen_range(0..100) { + match rng.random_range(0..100) { 0..=19 => { - wrap_width = if rng.gen_bool(0.2) { + wrap_width = if rng.random_bool(0.2) { None } else { - Some(px(rng.gen_range(0.0..=max_wrap_width))) + Some(px(rng.random_range(0.0..=max_wrap_width))) }; log::info!("setting wrap width to {:?}", wrap_width); map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx)); @@ -1634,28 +1634,27 @@ pub mod tests { } 30..=44 => { map.update(cx, |map, cx| { - if rng.r#gen() || blocks.is_empty() { + if rng.random() || blocks.is_empty() { let buffer = map.snapshot(cx).buffer_snapshot; - let block_properties = (0..rng.gen_range(1..=1)) + let block_properties = (0..rng.random_range(1..=1)) .map(|_| { - let position = - buffer.anchor_after(buffer.clip_offset( - rng.gen_range(0..=buffer.len()), - Bias::Left, - )); + let position = buffer.anchor_after(buffer.clip_offset( + rng.random_range(0..=buffer.len()), + Bias::Left, + )); - let placement = if rng.r#gen() { + let placement = if rng.random() { BlockPlacement::Above(position) } else { BlockPlacement::Below(position) }; - let height = rng.gen_range(1..5); + let height = rng.random_range(1..5); log::info!( "inserting block {:?} with height {}", placement.as_ref().map(|p| p.to_point(&buffer)), height ); - let priority = rng.gen_range(1..100); + let priority = rng.random_range(1..100); BlockProperties { placement, style: BlockStyle::Fixed, @@ -1668,9 +1667,9 @@ pub mod tests { blocks.extend(map.insert_blocks(block_properties, cx)); } else { blocks.shuffle(&mut rng); - let remove_count = rng.gen_range(1..=4.min(blocks.len())); + let remove_count = rng.random_range(1..=4.min(blocks.len())); let block_ids_to_remove = (0..remove_count) - .map(|_| blocks.remove(rng.gen_range(0..blocks.len()))) + .map(|_| blocks.remove(rng.random_range(0..blocks.len()))) .collect(); log::info!("removing block ids {:?}", block_ids_to_remove); map.remove_blocks(block_ids_to_remove, cx); @@ -1679,16 +1678,16 @@ pub mod tests { } 45..=79 => { let mut ranges = Vec::new(); - for _ in 0..rng.gen_range(1..=3) { + for _ in 0..rng.random_range(1..=3) { buffer.read_with(cx, |buffer, cx| { let buffer = buffer.read(cx); - let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right); - let start = buffer.clip_offset(rng.gen_range(0..=end), Left); + let end = buffer.clip_offset(rng.random_range(0..=buffer.len()), Right); + let start = buffer.clip_offset(rng.random_range(0..=end), Left); ranges.push(start..end); }); } - if rng.r#gen() && fold_count > 0 { + if rng.random() && fold_count > 0 { log::info!("unfolding ranges: {:?}", ranges); map.update(cx, |map, cx| { map.unfold_intersecting(ranges, true, cx); @@ -1727,8 +1726,8 @@ pub mod tests { // Line boundaries let buffer = &snapshot.buffer_snapshot; for _ in 0..5 { - let row = rng.gen_range(0..=buffer.max_point().row); - let column = rng.gen_range(0..=buffer.line_len(MultiBufferRow(row))); + let row = rng.random_range(0..=buffer.max_point().row); + let column = rng.random_range(0..=buffer.line_len(MultiBufferRow(row))); let point = buffer.clip_point(Point::new(row, column), Left); let (prev_buffer_bound, prev_display_bound) = snapshot.prev_line_boundary(point); @@ -1776,8 +1775,8 @@ pub mod tests { let min_point = snapshot.clip_point(DisplayPoint::new(DisplayRow(0), 0), Left); let max_point = snapshot.clip_point(snapshot.max_point(), Right); for _ in 0..5 { - let row = rng.gen_range(0..=snapshot.max_point().row().0); - let column = rng.gen_range(0..=snapshot.line_len(DisplayRow(row))); + let row = rng.random_range(0..=snapshot.max_point().row().0); + let column = rng.random_range(0..=snapshot.line_len(DisplayRow(row))); let point = snapshot.clip_point(DisplayPoint::new(DisplayRow(row), column), Left); log::info!("Moving from point {:?}", point); diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index b073fe7be75c82754de6ca7773b68073b213c49c..de734e5ea62f23d2396fb03393c32e55d0e1fc7b 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -128,10 +128,10 @@ impl BlockPlacement { } } - fn sort_order(&self) -> u8 { + fn tie_break(&self) -> u8 { match self { - BlockPlacement::Above(_) => 0, - BlockPlacement::Replace(_) => 1, + BlockPlacement::Replace(_) => 0, + BlockPlacement::Above(_) => 1, BlockPlacement::Near(_) => 2, BlockPlacement::Below(_) => 3, } @@ -143,7 +143,7 @@ impl BlockPlacement { self.start() .cmp(other.start(), buffer) .then_with(|| other.end().cmp(self.end(), buffer)) - .then_with(|| self.sort_order().cmp(&other.sort_order())) + .then_with(|| self.tie_break().cmp(&other.tie_break())) } fn to_wrap_row(&self, wrap_snapshot: &WrapSnapshot) -> Option> { @@ -847,6 +847,7 @@ impl BlockMap { .start() .cmp(placement_b.start()) .then_with(|| placement_b.end().cmp(placement_a.end())) + .then_with(|| placement_a.tie_break().cmp(&placement_b.tie_break())) .then_with(|| { if block_a.is_header() { Ordering::Less @@ -856,7 +857,6 @@ impl BlockMap { Ordering::Equal } }) - .then_with(|| placement_a.sort_order().cmp(&placement_b.sort_order())) .then_with(|| match (block_a, block_b) { ( Block::ExcerptBoundary { @@ -2922,21 +2922,21 @@ mod tests { .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .unwrap_or(10); - let wrap_width = if rng.gen_bool(0.2) { + let wrap_width = if rng.random_bool(0.2) { None } else { - Some(px(rng.gen_range(0.0..=100.0))) + Some(px(rng.random_range(0.0..=100.0))) }; let tab_size = 1.try_into().unwrap(); let font_size = px(14.0); - let buffer_start_header_height = rng.gen_range(1..=5); - let excerpt_header_height = rng.gen_range(1..=5); + let buffer_start_header_height = rng.random_range(1..=5); + let excerpt_header_height = rng.random_range(1..=5); log::info!("Wrap width: {:?}", wrap_width); log::info!("Excerpt Header Height: {:?}", excerpt_header_height); - let is_singleton = rng.r#gen(); + let is_singleton = rng.random(); let buffer = if is_singleton { - let len = rng.gen_range(0..10); + let len = rng.random_range(0..10); let text = RandomCharIter::new(&mut rng).take(len).collect::(); log::info!("initial singleton buffer text: {:?}", text); cx.update(|cx| MultiBuffer::build_simple(&text, cx)) @@ -2966,30 +2966,30 @@ mod tests { for _ in 0..operations { let mut buffer_edits = Vec::new(); - match rng.gen_range(0..=100) { + match rng.random_range(0..=100) { 0..=19 => { - let wrap_width = if rng.gen_bool(0.2) { + let wrap_width = if rng.random_bool(0.2) { None } else { - Some(px(rng.gen_range(0.0..=100.0))) + Some(px(rng.random_range(0.0..=100.0))) }; log::info!("Setting wrap width to {:?}", wrap_width); wrap_map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx)); } 20..=39 => { - let block_count = rng.gen_range(1..=5); + let block_count = rng.random_range(1..=5); let block_properties = (0..block_count) .map(|_| { let buffer = cx.update(|cx| buffer.read(cx).read(cx).clone()); let offset = - buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Left); + buffer.clip_offset(rng.random_range(0..=buffer.len()), Bias::Left); let mut min_height = 0; - let placement = match rng.gen_range(0..3) { + let placement = match rng.random_range(0..3) { 0 => { min_height = 1; let start = buffer.anchor_after(offset); let end = buffer.anchor_after(buffer.clip_offset( - rng.gen_range(offset..=buffer.len()), + rng.random_range(offset..=buffer.len()), Bias::Left, )); BlockPlacement::Replace(start..=end) @@ -2998,7 +2998,7 @@ mod tests { _ => BlockPlacement::Below(buffer.anchor_after(offset)), }; - let height = rng.gen_range(min_height..5); + let height = rng.random_range(min_height..5); BlockProperties { style: BlockStyle::Fixed, placement, @@ -3040,7 +3040,7 @@ mod tests { } } 40..=59 if !block_map.custom_blocks.is_empty() => { - let block_count = rng.gen_range(1..=4.min(block_map.custom_blocks.len())); + let block_count = rng.random_range(1..=4.min(block_map.custom_blocks.len())); let block_ids_to_remove = block_map .custom_blocks .choose_multiple(&mut rng, block_count) @@ -3095,8 +3095,8 @@ mod tests { let mut folded_count = folded_buffers.len(); let mut unfolded_count = unfolded_buffers.len(); - let fold = !unfolded_buffers.is_empty() && rng.gen_bool(0.5); - let unfold = !folded_buffers.is_empty() && rng.gen_bool(0.5); + let fold = !unfolded_buffers.is_empty() && rng.random_bool(0.5); + let unfold = !folded_buffers.is_empty() && rng.random_bool(0.5); if !fold && !unfold { log::info!( "Noop fold/unfold operation. Unfolded buffers: {unfolded_count}, folded buffers: {folded_count}" @@ -3107,7 +3107,7 @@ mod tests { buffer.update(cx, |buffer, cx| { if fold { let buffer_to_fold = - unfolded_buffers[rng.gen_range(0..unfolded_buffers.len())]; + unfolded_buffers[rng.random_range(0..unfolded_buffers.len())]; log::info!("Folding {buffer_to_fold:?}"); let related_excerpts = buffer_snapshot .excerpts() @@ -3133,7 +3133,7 @@ mod tests { } if unfold { let buffer_to_unfold = - folded_buffers[rng.gen_range(0..folded_buffers.len())]; + folded_buffers[rng.random_range(0..folded_buffers.len())]; log::info!("Unfolding {buffer_to_unfold:?}"); unfolded_count += 1; folded_count -= 1; @@ -3146,7 +3146,7 @@ mod tests { } _ => { buffer.update(cx, |buffer, cx| { - let mutation_count = rng.gen_range(1..=5); + let mutation_count = rng.random_range(1..=5); let subscription = buffer.subscribe(); buffer.randomly_mutate(&mut rng, mutation_count, cx); buffer_snapshot = buffer.snapshot(cx); @@ -3331,7 +3331,7 @@ mod tests { ); for start_row in 0..expected_row_count { - let end_row = rng.gen_range(start_row + 1..=expected_row_count); + let end_row = rng.random_range(start_row + 1..=expected_row_count); let mut expected_text = expected_lines[start_row..end_row].join("\n"); if end_row < expected_row_count { expected_text.push('\n'); @@ -3426,8 +3426,8 @@ mod tests { ); for _ in 0..10 { - let end_row = rng.gen_range(1..=expected_lines.len()); - let start_row = rng.gen_range(0..end_row); + let end_row = rng.random_range(1..=expected_lines.len()); + let start_row = rng.random_range(0..end_row); let mut expected_longest_rows_in_range = vec![]; let mut longest_line_len_in_range = 0; diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 42f46fb74969301007d19032f1b96377d141a724..6d160d0d6d58dbeeac89749aeabcedef6010c1c3 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -1771,9 +1771,9 @@ mod tests { .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .unwrap_or(10); - let len = rng.gen_range(0..10); + let len = rng.random_range(0..10); let text = RandomCharIter::new(&mut rng).take(len).collect::(); - let buffer = if rng.r#gen() { + let buffer = if rng.random() { MultiBuffer::build_simple(&text, cx) } else { MultiBuffer::build_random(&mut rng, cx) @@ -1790,7 +1790,7 @@ mod tests { log::info!("text: {:?}", buffer_snapshot.text()); let mut buffer_edits = Vec::new(); let mut inlay_edits = Vec::new(); - match rng.gen_range(0..=100) { + match rng.random_range(0..=100) { 0..=39 => { snapshot_edits.extend(map.randomly_mutate(&mut rng)); } @@ -1800,7 +1800,7 @@ mod tests { } _ => buffer.update(cx, |buffer, cx| { let subscription = buffer.subscribe(); - let edit_count = rng.gen_range(1..=5); + let edit_count = rng.random_range(1..=5); buffer.randomly_mutate(&mut rng, edit_count, cx); buffer_snapshot = buffer.snapshot(cx); let edits = subscription.consume().into_inner(); @@ -1917,10 +1917,14 @@ mod tests { } for _ in 0..5 { - let mut start = snapshot - .clip_offset(FoldOffset(rng.gen_range(0..=snapshot.len().0)), Bias::Left); - let mut end = snapshot - .clip_offset(FoldOffset(rng.gen_range(0..=snapshot.len().0)), Bias::Right); + let mut start = snapshot.clip_offset( + FoldOffset(rng.random_range(0..=snapshot.len().0)), + Bias::Left, + ); + let mut end = snapshot.clip_offset( + FoldOffset(rng.random_range(0..=snapshot.len().0)), + Bias::Right, + ); if start > end { mem::swap(&mut start, &mut end); } @@ -1975,8 +1979,8 @@ mod tests { for _ in 0..5 { let end = - buffer_snapshot.clip_offset(rng.gen_range(0..=buffer_snapshot.len()), Right); - let start = buffer_snapshot.clip_offset(rng.gen_range(0..=end), Left); + buffer_snapshot.clip_offset(rng.random_range(0..=buffer_snapshot.len()), Right); + let start = buffer_snapshot.clip_offset(rng.random_range(0..=end), Left); let expected_folds = map .snapshot .folds @@ -2001,10 +2005,10 @@ mod tests { let text = snapshot.text(); for _ in 0..5 { - let start_row = rng.gen_range(0..=snapshot.max_point().row()); - let start_column = rng.gen_range(0..=snapshot.line_len(start_row)); - let end_row = rng.gen_range(0..=snapshot.max_point().row()); - let end_column = rng.gen_range(0..=snapshot.line_len(end_row)); + let start_row = rng.random_range(0..=snapshot.max_point().row()); + let start_column = rng.random_range(0..=snapshot.line_len(start_row)); + let end_row = rng.random_range(0..=snapshot.max_point().row()); + let end_column = rng.random_range(0..=snapshot.line_len(end_row)); let mut start = snapshot.clip_point(FoldPoint::new(start_row, start_column), Bias::Left); let mut end = snapshot.clip_point(FoldPoint::new(end_row, end_column), Bias::Right); @@ -2109,17 +2113,17 @@ mod tests { rng: &mut impl Rng, ) -> Vec<(FoldSnapshot, Vec)> { let mut snapshot_edits = Vec::new(); - match rng.gen_range(0..=100) { + match rng.random_range(0..=100) { 0..=39 if !self.snapshot.folds.is_empty() => { let inlay_snapshot = self.snapshot.inlay_snapshot.clone(); let buffer = &inlay_snapshot.buffer; let mut to_unfold = Vec::new(); - for _ in 0..rng.gen_range(1..=3) { - let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right); - let start = buffer.clip_offset(rng.gen_range(0..=end), Left); + for _ in 0..rng.random_range(1..=3) { + let end = buffer.clip_offset(rng.random_range(0..=buffer.len()), Right); + let start = buffer.clip_offset(rng.random_range(0..=end), Left); to_unfold.push(start..end); } - let inclusive = rng.r#gen(); + let inclusive = rng.random(); log::info!("unfolding {:?} (inclusive: {})", to_unfold, inclusive); let (mut writer, snapshot, edits) = self.write(inlay_snapshot, vec![]); snapshot_edits.push((snapshot, edits)); @@ -2130,9 +2134,9 @@ mod tests { let inlay_snapshot = self.snapshot.inlay_snapshot.clone(); let buffer = &inlay_snapshot.buffer; let mut to_fold = Vec::new(); - for _ in 0..rng.gen_range(1..=2) { - let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right); - let start = buffer.clip_offset(rng.gen_range(0..=end), Left); + for _ in 0..rng.random_range(1..=2) { + let end = buffer.clip_offset(rng.random_range(0..=buffer.len()), Right); + let start = buffer.clip_offset(rng.random_range(0..=end), Left); to_fold.push((start..end, FoldPlaceholder::test())); } log::info!("folding {:?}", to_fold); diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index 3db9d10fdc74f418ecd4ea682dde91185130cd46..e00ffdbf2c6ed7ee7288b2371481cb1f1b28bc92 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -719,14 +719,18 @@ impl InlayMap { let mut to_remove = Vec::new(); let mut to_insert = Vec::new(); let snapshot = &mut self.snapshot; - for i in 0..rng.gen_range(1..=5) { - if self.inlays.is_empty() || rng.r#gen() { + for i in 0..rng.random_range(1..=5) { + if self.inlays.is_empty() || rng.random() { let position = snapshot.buffer.random_byte_range(0, rng).start; - let bias = if rng.r#gen() { Bias::Left } else { Bias::Right }; - let len = if rng.gen_bool(0.01) { + let bias = if rng.random() { + Bias::Left + } else { + Bias::Right + }; + let len = if rng.random_bool(0.01) { 0 } else { - rng.gen_range(1..=5) + rng.random_range(1..=5) }; let text = util::RandomCharIter::new(&mut *rng) .filter(|ch| *ch != '\r') @@ -1665,8 +1669,8 @@ mod tests { .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .unwrap_or(10); - let len = rng.gen_range(0..30); - let buffer = if rng.r#gen() { + let len = rng.random_range(0..30); + let buffer = if rng.random() { let text = util::RandomCharIter::new(&mut rng) .take(len) .collect::(); @@ -1683,7 +1687,7 @@ mod tests { let mut prev_inlay_text = inlay_snapshot.text(); let mut buffer_edits = Vec::new(); - match rng.gen_range(0..=100) { + match rng.random_range(0..=100) { 0..=50 => { let (snapshot, edits) = inlay_map.randomly_mutate(&mut next_inlay_id, &mut rng); log::info!("mutated text: {:?}", snapshot.text()); @@ -1691,7 +1695,7 @@ mod tests { } _ => buffer.update(cx, |buffer, cx| { let subscription = buffer.subscribe(); - let edit_count = rng.gen_range(1..=5); + let edit_count = rng.random_range(1..=5); buffer.randomly_mutate(&mut rng, edit_count, cx); buffer_snapshot = buffer.snapshot(cx); let edits = subscription.consume().into_inner(); @@ -1740,7 +1744,7 @@ mod tests { } let mut text_highlights = TextHighlights::default(); - let text_highlight_count = rng.gen_range(0_usize..10); + let text_highlight_count = rng.random_range(0_usize..10); let mut text_highlight_ranges = (0..text_highlight_count) .map(|_| buffer_snapshot.random_byte_range(0, &mut rng)) .collect::>(); @@ -1762,10 +1766,10 @@ mod tests { let mut inlay_highlights = InlayHighlights::default(); if !inlays.is_empty() { - let inlay_highlight_count = rng.gen_range(0..inlays.len()); + let inlay_highlight_count = rng.random_range(0..inlays.len()); let mut inlay_indices = BTreeSet::default(); while inlay_indices.len() < inlay_highlight_count { - inlay_indices.insert(rng.gen_range(0..inlays.len())); + inlay_indices.insert(rng.random_range(0..inlays.len())); } let new_highlights = TreeMap::from_ordered_entries( inlay_indices @@ -1782,8 +1786,8 @@ mod tests { }), n => { let inlay_text = inlay.text.to_string(); - let mut highlight_end = rng.gen_range(1..n); - let mut highlight_start = rng.gen_range(0..highlight_end); + let mut highlight_end = rng.random_range(1..n); + let mut highlight_start = rng.random_range(0..highlight_end); while !inlay_text.is_char_boundary(highlight_end) { highlight_end += 1; } @@ -1805,9 +1809,9 @@ mod tests { } for _ in 0..5 { - let mut end = rng.gen_range(0..=inlay_snapshot.len().0); + let mut end = rng.random_range(0..=inlay_snapshot.len().0); end = expected_text.clip_offset(end, Bias::Right); - let mut start = rng.gen_range(0..=end); + let mut start = rng.random_range(0..=end); start = expected_text.clip_offset(start, Bias::Right); let range = InlayOffset(start)..InlayOffset(end); diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index 6f5df9bb8e658b95260dde4feb2b00c177c98520..523e777d9113b203dafbb5e151ba22a01394c956 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -736,9 +736,9 @@ mod tests { #[gpui::test(iterations = 100)] fn test_random_tabs(cx: &mut gpui::App, mut rng: StdRng) { - let tab_size = NonZeroU32::new(rng.gen_range(1..=4)).unwrap(); - let len = rng.gen_range(0..30); - let buffer = if rng.r#gen() { + let tab_size = NonZeroU32::new(rng.random_range(1..=4)).unwrap(); + let len = rng.random_range(0..30); + let buffer = if rng.random() { let text = util::RandomCharIter::new(&mut rng) .take(len) .collect::(); @@ -769,11 +769,11 @@ mod tests { ); for _ in 0..5 { - let end_row = rng.gen_range(0..=text.max_point().row); - let end_column = rng.gen_range(0..=text.line_len(end_row)); + let end_row = rng.random_range(0..=text.max_point().row); + let end_column = rng.random_range(0..=text.line_len(end_row)); let mut end = TabPoint(text.clip_point(Point::new(end_row, end_column), Bias::Right)); - let start_row = rng.gen_range(0..=text.max_point().row); - let start_column = rng.gen_range(0..=text.line_len(start_row)); + let start_row = rng.random_range(0..=text.max_point().row); + let start_column = rng.random_range(0..=text.line_len(start_row)); let mut start = TabPoint(text.clip_point(Point::new(start_row, start_column), Bias::Left)); if start > end { diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 500ec3a0bb77f8a8332e86485b81b357644e6d23..127293726a59d1945e8f9dcbfcd2eb3da0cc2290 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -1215,12 +1215,12 @@ mod tests { .unwrap_or(10); let text_system = cx.read(|cx| cx.text_system().clone()); - let mut wrap_width = if rng.gen_bool(0.1) { + let mut wrap_width = if rng.random_bool(0.1) { None } else { - Some(px(rng.gen_range(0.0..=1000.0))) + Some(px(rng.random_range(0.0..=1000.0))) }; - let tab_size = NonZeroU32::new(rng.gen_range(1..=4)).unwrap(); + let tab_size = NonZeroU32::new(rng.random_range(1..=4)).unwrap(); let font = test_font(); let _font_id = text_system.resolve_font(&font); @@ -1230,10 +1230,10 @@ mod tests { log::info!("Wrap width: {:?}", wrap_width); let buffer = cx.update(|cx| { - if rng.r#gen() { + if rng.random() { MultiBuffer::build_random(&mut rng, cx) } else { - let len = rng.gen_range(0..10); + let len = rng.random_range(0..10); let text = util::RandomCharIter::new(&mut rng) .take(len) .collect::(); @@ -1281,12 +1281,12 @@ mod tests { log::info!("{} ==============================================", _i); let mut buffer_edits = Vec::new(); - match rng.gen_range(0..=100) { + match rng.random_range(0..=100) { 0..=19 => { - wrap_width = if rng.gen_bool(0.2) { + wrap_width = if rng.random_bool(0.2) { None } else { - Some(px(rng.gen_range(0.0..=1000.0))) + Some(px(rng.random_range(0.0..=1000.0))) }; log::info!("Setting wrap width to {:?}", wrap_width); wrap_map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx)); @@ -1317,7 +1317,7 @@ mod tests { _ => { buffer.update(cx, |buffer, cx| { let subscription = buffer.subscribe(); - let edit_count = rng.gen_range(1..=5); + let edit_count = rng.random_range(1..=5); buffer.randomly_mutate(&mut rng, edit_count, cx); buffer_snapshot = buffer.snapshot(cx); buffer_edits.extend(subscription.consume()); @@ -1341,7 +1341,7 @@ mod tests { snapshot.verify_chunks(&mut rng); edits.push((snapshot, wrap_edits)); - if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) && rng.gen_bool(0.4) { + if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) && rng.random_bool(0.4) { log::info!("Waiting for wrapping to finish"); while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) { notifications.next().await.unwrap(); @@ -1479,8 +1479,8 @@ mod tests { impl WrapSnapshot { fn verify_chunks(&mut self, rng: &mut impl Rng) { for _ in 0..5 { - let mut end_row = rng.gen_range(0..=self.max_point().row()); - let start_row = rng.gen_range(0..=end_row); + let mut end_row = rng.random_range(0..=self.max_point().row()); + let start_row = rng.random_range(0..=end_row); end_row += 1; let mut expected_text = self.text_chunks(start_row).collect::(); diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index fe5b2f83c2034822d4f36d3b66bbcea3b6b7322c..37951074d15bbb8f34bcbaba9d839eae5d34cf1e 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -164,7 +164,7 @@ use project::{ DiagnosticSeverity, GitGutterSetting, GoToDiagnosticSeverityFilter, ProjectSettings, }, }; -use rand::{seq::SliceRandom, thread_rng}; +use rand::seq::SliceRandom; use rpc::{ErrorCode, ErrorExt, proto::PeerId}; use scroll::{Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, ScrollbarAutoHide}; use selections_collection::{ @@ -10971,7 +10971,7 @@ impl Editor { } pub fn shuffle_lines(&mut self, _: &ShuffleLines, window: &mut Window, cx: &mut Context) { - self.manipulate_immutable_lines(window, cx, |lines| lines.shuffle(&mut thread_rng())) + self.manipulate_immutable_lines(window, cx, |lines| lines.shuffle(&mut rand::rng())) } fn manipulate_lines( diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index 27a9b8870383b7f1136e31028bacedc8744e0650..51719048ef81cf273bc58e7d810d66d454a04805 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -1107,7 +1107,7 @@ mod tests { init_test(cx); let fs = FakeFs::new(cx.executor()); - let buffer_initial_text_len = rng.gen_range(5..15); + let buffer_initial_text_len = rng.random_range(5..15); let mut buffer_initial_text = Rope::from( RandomCharIter::new(&mut rng) .take(buffer_initial_text_len) @@ -1159,7 +1159,7 @@ mod tests { git_blame.update(cx, |blame, cx| blame.check_invariants(cx)); for _ in 0..operations { - match rng.gen_range(0..100) { + match rng.random_range(0..100) { 0..=19 => { log::info!("quiescing"); cx.executor().run_until_parked(); @@ -1202,8 +1202,8 @@ mod tests { let mut blame_entries = Vec::new(); for ix in 0..5 { if last_row < max_row { - let row_start = rng.gen_range(last_row..max_row); - let row_end = rng.gen_range(row_start + 1..cmp::min(row_start + 3, max_row) + 1); + let row_start = rng.random_range(last_row..max_row); + let row_end = rng.random_range(row_start + 1..cmp::min(row_start + 3, max_row) + 1); blame_entries.push(blame_entry(&ix.to_string(), row_start..row_end)); last_row = row_end; } else { diff --git a/crates/gpui/examples/data_table.rs b/crates/gpui/examples/data_table.rs index 5e82b08839de5f3b98ec3267b22a3bb8586fa02c..10e22828a8e8f5c8778cbcb06a087d4bdfac3adc 100644 --- a/crates/gpui/examples/data_table.rs +++ b/crates/gpui/examples/data_table.rs @@ -38,58 +38,58 @@ pub struct Quote { impl Quote { pub fn random() -> Self { use rand::Rng; - let mut rng = rand::thread_rng(); + let mut rng = rand::rng(); // simulate a base price in a realistic range - let prev_close = rng.gen_range(100.0..200.0); - let change = rng.gen_range(-5.0..5.0); + let prev_close = rng.random_range(100.0..200.0); + let change = rng.random_range(-5.0..5.0); let last_done = prev_close + change; - let open = prev_close + rng.gen_range(-3.0..3.0); - let high = (prev_close + rng.gen_range::(0.0..10.0)).max(open); - let low = (prev_close - rng.gen_range::(0.0..10.0)).min(open); - let timestamp = Duration::from_secs(rng.gen_range(0..86400)); - let volume = rng.gen_range(1_000_000..100_000_000); + let open = prev_close + rng.random_range(-3.0..3.0); + let high = (prev_close + rng.random_range::(0.0..10.0)).max(open); + let low = (prev_close - rng.random_range::(0.0..10.0)).min(open); + let timestamp = Duration::from_secs(rng.random_range(0..86400)); + let volume = rng.random_range(1_000_000..100_000_000); let turnover = last_done * volume as f64; let symbol = { let mut ticker = String::new(); - if rng.gen_bool(0.5) { + if rng.random_bool(0.5) { ticker.push_str(&format!( "{:03}.{}", - rng.gen_range(100..1000), - rng.gen_range(0..10) + rng.random_range(100..1000), + rng.random_range(0..10) )); } else { ticker.push_str(&format!( "{}{}", - rng.gen_range('A'..='Z'), - rng.gen_range('A'..='Z') + rng.random_range('A'..='Z'), + rng.random_range('A'..='Z') )); } - ticker.push_str(&format!(".{}", rng.gen_range('A'..='Z'))); + ticker.push_str(&format!(".{}", rng.random_range('A'..='Z'))); ticker }; let name = format!( "{} {} - #{}", symbol, - rng.gen_range(1..100), - rng.gen_range(10000..100000) + rng.random_range(1..100), + rng.random_range(10000..100000) ); - let ttm = rng.gen_range(0.0..10.0); - let market_cap = rng.gen_range(1_000_000.0..10_000_000.0); - let float_cap = market_cap + rng.gen_range(1_000.0..10_000.0); - let shares = rng.gen_range(100.0..1000.0); + let ttm = rng.random_range(0.0..10.0); + let market_cap = rng.random_range(1_000_000.0..10_000_000.0); + let float_cap = market_cap + rng.random_range(1_000.0..10_000.0); + let shares = rng.random_range(100.0..1000.0); let pb = market_cap / shares; let pe = market_cap / shares; let eps = market_cap / shares; - let dividend = rng.gen_range(0.0..10.0); - let dividend_yield = rng.gen_range(0.0..10.0); - let dividend_per_share = rng.gen_range(0.0..10.0); + let dividend = rng.random_range(0.0..10.0); + let dividend_yield = rng.random_range(0.0..10.0); + let dividend_per_share = rng.random_range(0.0..10.0); let dividend_date = SharedString::new(format!( "{}-{}-{}", - rng.gen_range(2000..2023), - rng.gen_range(1..12), - rng.gen_range(1..28) + rng.random_range(2000..2023), + rng.random_range(1..12), + rng.random_range(1..28) )); - let dividend_payment = rng.gen_range(0.0..10.0); + let dividend_payment = rng.random_range(0.0..10.0); Self { name: name.into(), diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index c65c045f6bc16310d3772825147ad570f209fd99..b3d342b09bf1dceb27413d3ec24fbcc0d2f541e9 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -144,7 +144,7 @@ impl TestAppContext { /// Create a single TestAppContext, for non-multi-client tests pub fn single() -> Self { - let dispatcher = TestDispatcher::new(StdRng::from_entropy()); + let dispatcher = TestDispatcher::new(StdRng::seed_from_u64(0)); Self::build(dispatcher, None) } diff --git a/crates/gpui/src/bounds_tree.rs b/crates/gpui/src/bounds_tree.rs index 03f83b95035489bd86201c4d64c15f5a12ed50ea..a96bfe55b9ff431a96da7bf42692288264eb184c 100644 --- a/crates/gpui/src/bounds_tree.rs +++ b/crates/gpui/src/bounds_tree.rs @@ -309,12 +309,12 @@ mod tests { let mut expected_quads: Vec<(Bounds, u32)> = Vec::new(); // Insert a random number of random AABBs into the tree. - let num_bounds = rng.gen_range(1..=max_bounds); + let num_bounds = rng.random_range(1..=max_bounds); for _ in 0..num_bounds { - let min_x: f32 = rng.gen_range(-100.0..100.0); - let min_y: f32 = rng.gen_range(-100.0..100.0); - let width: f32 = rng.gen_range(0.0..50.0); - let height: f32 = rng.gen_range(0.0..50.0); + let min_x: f32 = rng.random_range(-100.0..100.0); + let min_y: f32 = rng.random_range(-100.0..100.0); + let width: f32 = rng.random_range(0.0..50.0); + let height: f32 = rng.random_range(0.0..50.0); let bounds = Bounds { origin: Point { x: min_x, y: min_y }, size: Size { width, height }, diff --git a/crates/gpui/src/platform/test/dispatcher.rs b/crates/gpui/src/platform/test/dispatcher.rs index 4ce62c4bdcae60d517dd88501cb89af8fee2c9bc..e19710effda9299c6eb72e8c4acc2f615ac077ee 100644 --- a/crates/gpui/src/platform/test/dispatcher.rs +++ b/crates/gpui/src/platform/test/dispatcher.rs @@ -118,7 +118,7 @@ impl TestDispatcher { } YieldNow { - count: self.state.lock().random.gen_range(0..10), + count: self.state.lock().random.random_range(0..10), } } @@ -151,11 +151,11 @@ impl TestDispatcher { if deprioritized_background_len == 0 { return false; } - let ix = state.random.gen_range(0..deprioritized_background_len); + let ix = state.random.random_range(0..deprioritized_background_len); main_thread = false; runnable = state.deprioritized_background.swap_remove(ix); } else { - main_thread = state.random.gen_ratio( + main_thread = state.random.random_ratio( foreground_len as u32, (foreground_len + background_len) as u32, ); @@ -170,7 +170,7 @@ impl TestDispatcher { .pop_front() .unwrap(); } else { - let ix = state.random.gen_range(0..background_len); + let ix = state.random.random_range(0..background_len); runnable = state.background.swap_remove(ix); }; }; @@ -241,7 +241,7 @@ impl TestDispatcher { pub fn gen_block_on_ticks(&self) -> usize { let mut lock = self.state.lock(); let block_on_ticks = lock.block_on_ticks.clone(); - lock.random.gen_range(block_on_ticks) + lock.random.random_range(block_on_ticks) } } diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index 96db8077c4b7b139bf2c724a3502a6e4bd194f9f..4d0e6ea56f7d90f303f6634de1239a6a4542429a 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -94,7 +94,11 @@ impl WindowsPlatform { } let directx_devices = DirectXDevices::new().context("Creating DirectX devices")?; let (main_sender, main_receiver) = flume::unbounded::(); - let validation_number = rand::random::(); + let validation_number = if usize::BITS == 64 { + rand::random::() as usize + } else { + rand::random::() as usize + }; let raw_window_handles = Arc::new(RwLock::new(SmallVec::new())); let text_system = Arc::new( DirectWriteTextSystem::new(&directx_devices) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 1f056aacc57338d65705e5b7f4bd91085c6142b4..c86787e1f9de8cf31037187dc667e2a7e428cea9 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -2842,12 +2842,12 @@ impl Buffer { let new_start = last_end.map_or(0, |last_end| last_end + 1); let mut range = self.random_byte_range(new_start, rng); - if rng.gen_bool(0.2) { + if rng.random_bool(0.2) { mem::swap(&mut range.start, &mut range.end); } last_end = Some(range.end); - let new_text_len = rng.gen_range(0..10); + let new_text_len = rng.random_range(0..10); let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect(); new_text = new_text.to_uppercase(); diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index ce65afa6288767766fa9a1da5c3a24f9ca86e580..5b88112c956e5466748fc349825a78f6232e540e 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -3013,7 +3013,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .unwrap_or(10); - let base_text_len = rng.gen_range(0..10); + let base_text_len = rng.random_range(0..10); let base_text = RandomCharIter::new(&mut rng) .take(base_text_len) .collect::(); @@ -3022,7 +3022,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { let network = Arc::new(Mutex::new(Network::new(rng.clone()))); let base_buffer = cx.new(|cx| Buffer::local(base_text.as_str(), cx)); - for i in 0..rng.gen_range(min_peers..=max_peers) { + for i in 0..rng.random_range(min_peers..=max_peers) { let buffer = cx.new(|cx| { let state = base_buffer.read(cx).to_proto(cx); let ops = cx @@ -3035,7 +3035,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { .map(|op| proto::deserialize_operation(op).unwrap()), cx, ); - buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200))); + buffer.set_group_interval(Duration::from_millis(rng.random_range(0..=200))); let network = network.clone(); cx.subscribe(&cx.entity(), move |buffer, _, event, _| { if let BufferEvent::Operation { @@ -3066,11 +3066,11 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { let mut next_diagnostic_id = 0; let mut active_selections = BTreeMap::default(); loop { - let replica_index = rng.gen_range(0..replica_ids.len()); + let replica_index = rng.random_range(0..replica_ids.len()); let replica_id = replica_ids[replica_index]; let buffer = &mut buffers[replica_index]; let mut new_buffer = None; - match rng.gen_range(0..100) { + match rng.random_range(0..100) { 0..=29 if mutation_count != 0 => { buffer.update(cx, |buffer, cx| { buffer.start_transaction_at(now); @@ -3082,13 +3082,13 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { } 30..=39 if mutation_count != 0 => { buffer.update(cx, |buffer, cx| { - if rng.gen_bool(0.2) { + if rng.random_bool(0.2) { log::info!("peer {} clearing active selections", replica_id); active_selections.remove(&replica_id); buffer.remove_active_selections(cx); } else { let mut selections = Vec::new(); - for id in 0..rng.gen_range(1..=5) { + for id in 0..rng.random_range(1..=5) { let range = buffer.random_byte_range(0, &mut rng); selections.push(Selection { id, @@ -3111,7 +3111,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { mutation_count -= 1; } 40..=49 if mutation_count != 0 && replica_id == 0 => { - let entry_count = rng.gen_range(1..=5); + let entry_count = rng.random_range(1..=5); buffer.update(cx, |buffer, cx| { let diagnostics = DiagnosticSet::new( (0..entry_count).map(|_| { @@ -3166,7 +3166,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { new_buffer.replica_id(), new_buffer.text() ); - new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200))); + new_buffer.set_group_interval(Duration::from_millis(rng.random_range(0..=200))); let network = network.clone(); cx.subscribe(&cx.entity(), move |buffer, _, event, _| { if let BufferEvent::Operation { @@ -3238,7 +3238,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { _ => {} } - now += Duration::from_millis(rng.gen_range(0..=200)); + now += Duration::from_millis(rng.random_range(0..=200)); buffers.extend(new_buffer); for buffer in &buffers { @@ -3320,23 +3320,23 @@ fn test_trailing_whitespace_ranges(mut rng: StdRng) { // Generate a random multi-line string containing // some lines with trailing whitespace. let mut text = String::new(); - for _ in 0..rng.gen_range(0..16) { - for _ in 0..rng.gen_range(0..36) { - text.push(match rng.gen_range(0..10) { + for _ in 0..rng.random_range(0..16) { + for _ in 0..rng.random_range(0..36) { + text.push(match rng.random_range(0..10) { 0..=1 => ' ', 3 => '\t', - _ => rng.gen_range('a'..='z'), + _ => rng.random_range('a'..='z'), }); } text.push('\n'); } - match rng.gen_range(0..10) { + match rng.random_range(0..10) { // sometimes remove the last newline 0..=1 => drop(text.pop()), // // sometimes add extra newlines - 2..=3 => text.push_str(&"\n".repeat(rng.gen_range(1..5))), + 2..=3 => text.push_str(&"\n".repeat(rng.random_range(1..5))), _ => {} } diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index a2f28215b4655b12095da96c033d23cb3f13eb77..4535d57d7747cbe747cf55a0a0f0cd30540e6af7 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -3580,7 +3580,7 @@ impl MultiBuffer { pub fn build_random(rng: &mut impl rand::Rng, cx: &mut gpui::App) -> Entity { cx.new(|cx| { let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); - let mutation_count = rng.gen_range(1..=5); + let mutation_count = rng.random_range(1..=5); multibuffer.randomly_edit_excerpts(rng, mutation_count, cx); multibuffer }) @@ -3603,16 +3603,17 @@ impl MultiBuffer { } let new_start = last_end.map_or(0, |last_end| last_end + 1); - let end = snapshot.clip_offset(rng.gen_range(new_start..=snapshot.len()), Bias::Right); - let start = snapshot.clip_offset(rng.gen_range(new_start..=end), Bias::Right); + let end = + snapshot.clip_offset(rng.random_range(new_start..=snapshot.len()), Bias::Right); + let start = snapshot.clip_offset(rng.random_range(new_start..=end), Bias::Right); last_end = Some(end); let mut range = start..end; - if rng.gen_bool(0.2) { + if rng.random_bool(0.2) { mem::swap(&mut range.start, &mut range.end); } - let new_text_len = rng.gen_range(0..10); + let new_text_len = rng.random_range(0..10); let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect(); edits.push((range, new_text.into())); @@ -3639,18 +3640,18 @@ impl MultiBuffer { let mut buffers = Vec::new(); for _ in 0..mutation_count { - if rng.gen_bool(0.05) { + if rng.random_bool(0.05) { log::info!("Clearing multi-buffer"); self.clear(cx); continue; - } else if rng.gen_bool(0.1) && !self.excerpt_ids().is_empty() { + } else if rng.random_bool(0.1) && !self.excerpt_ids().is_empty() { let ids = self.excerpt_ids(); let mut excerpts = HashSet::default(); - for _ in 0..rng.gen_range(0..ids.len()) { + for _ in 0..rng.random_range(0..ids.len()) { excerpts.extend(ids.choose(rng).copied()); } - let line_count = rng.gen_range(0..5); + let line_count = rng.random_range(0..5); log::info!("Expanding excerpts {excerpts:?} by {line_count} lines"); @@ -3664,8 +3665,8 @@ impl MultiBuffer { } let excerpt_ids = self.excerpt_ids(); - if excerpt_ids.is_empty() || (rng.r#gen() && excerpt_ids.len() < max_excerpts) { - let buffer_handle = if rng.r#gen() || self.buffers.borrow().is_empty() { + if excerpt_ids.is_empty() || (rng.random() && excerpt_ids.len() < max_excerpts) { + let buffer_handle = if rng.random() || self.buffers.borrow().is_empty() { let text = RandomCharIter::new(&mut *rng).take(10).collect::(); buffers.push(cx.new(|cx| Buffer::local(text, cx))); let buffer = buffers.last().unwrap().read(cx); @@ -3687,11 +3688,11 @@ impl MultiBuffer { let buffer = buffer_handle.read(cx); let buffer_text = buffer.text(); - let ranges = (0..rng.gen_range(0..5)) + let ranges = (0..rng.random_range(0..5)) .map(|_| { let end_ix = - buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Right); - let start_ix = buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left); + buffer.clip_offset(rng.random_range(0..=buffer.len()), Bias::Right); + let start_ix = buffer.clip_offset(rng.random_range(0..=end_ix), Bias::Left); ExcerptRange::new(start_ix..end_ix) }) .collect::>(); @@ -3708,7 +3709,7 @@ impl MultiBuffer { let excerpt_id = self.push_excerpts(buffer_handle.clone(), ranges, cx); log::info!("Inserted with ids: {:?}", excerpt_id); } else { - let remove_count = rng.gen_range(1..=excerpt_ids.len()); + let remove_count = rng.random_range(1..=excerpt_ids.len()); let mut excerpts_to_remove = excerpt_ids .choose_multiple(rng, remove_count) .cloned() @@ -3730,7 +3731,7 @@ impl MultiBuffer { ) { use rand::prelude::*; - if rng.gen_bool(0.7) || self.singleton { + if rng.random_bool(0.7) || self.singleton { let buffer = self .buffers .borrow() @@ -3740,7 +3741,7 @@ impl MultiBuffer { if let Some(buffer) = buffer { buffer.update(cx, |buffer, cx| { - if rng.r#gen() { + if rng.random() { buffer.randomly_edit(rng, mutation_count, cx); } else { buffer.randomly_undo_redo(rng, cx); @@ -6388,8 +6389,8 @@ impl MultiBufferSnapshot { #[cfg(any(test, feature = "test-support"))] impl MultiBufferSnapshot { pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range { - let end = self.clip_offset(rng.gen_range(start_offset..=self.len()), Bias::Right); - let start = self.clip_offset(rng.gen_range(start_offset..=end), Bias::Right); + let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right); + let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right); start..end } diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index 61b4b0520f23ed50b3b36374710b52c78c37080f..efc622b0172a13ae9a6ad3bf366904706a36580f 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -2491,12 +2491,12 @@ async fn test_random_set_ranges(cx: &mut TestAppContext, mut rng: StdRng) { for _ in 0..operations { let snapshot = buf.update(cx, |buf, _| buf.snapshot()); - let num_ranges = rng.gen_range(0..=10); + let num_ranges = rng.random_range(0..=10); let max_row = snapshot.max_point().row; let mut ranges = (0..num_ranges) .map(|_| { - let start = rng.gen_range(0..max_row); - let end = rng.gen_range(start + 1..max_row + 1); + let start = rng.random_range(0..max_row); + let end = rng.random_range(start + 1..max_row + 1); Point::row_range(start..end) }) .collect::>(); @@ -2562,11 +2562,11 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { let mut needs_diff_calculation = false; for _ in 0..operations { - match rng.gen_range(0..100) { + match rng.random_range(0..100) { 0..=14 if !buffers.is_empty() => { let buffer = buffers.choose(&mut rng).unwrap(); buffer.update(cx, |buf, cx| { - let edit_count = rng.gen_range(1..5); + let edit_count = rng.random_range(1..5); buf.randomly_edit(&mut rng, edit_count, cx); log::info!("buffer text:\n{}", buf.text()); needs_diff_calculation = true; @@ -2577,11 +2577,11 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { multibuffer.update(cx, |multibuffer, cx| { let ids = multibuffer.excerpt_ids(); let mut excerpts = HashSet::default(); - for _ in 0..rng.gen_range(0..ids.len()) { + for _ in 0..rng.random_range(0..ids.len()) { excerpts.extend(ids.choose(&mut rng).copied()); } - let line_count = rng.gen_range(0..5); + let line_count = rng.random_range(0..5); let excerpt_ixs = excerpts .iter() @@ -2600,7 +2600,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { } 20..=29 if !reference.excerpts.is_empty() => { let mut ids_to_remove = vec![]; - for _ in 0..rng.gen_range(1..=3) { + for _ in 0..rng.random_range(1..=3) { let Some(excerpt) = reference.excerpts.choose(&mut rng) else { break; }; @@ -2620,8 +2620,12 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { let multibuffer = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); let offset = - multibuffer.clip_offset(rng.gen_range(0..=multibuffer.len()), Bias::Left); - let bias = if rng.r#gen() { Bias::Left } else { Bias::Right }; + multibuffer.clip_offset(rng.random_range(0..=multibuffer.len()), Bias::Left); + let bias = if rng.random() { + Bias::Left + } else { + Bias::Right + }; log::info!("Creating anchor at {} with bias {:?}", offset, bias); anchors.push(multibuffer.anchor_at(offset, bias)); anchors.sort_by(|a, b| a.cmp(b, &multibuffer)); @@ -2654,7 +2658,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { 45..=55 if !reference.excerpts.is_empty() => { multibuffer.update(cx, |multibuffer, cx| { let snapshot = multibuffer.snapshot(cx); - let excerpt_ix = rng.gen_range(0..reference.excerpts.len()); + let excerpt_ix = rng.random_range(0..reference.excerpts.len()); let excerpt = &reference.excerpts[excerpt_ix]; let start = excerpt.range.start; let end = excerpt.range.end; @@ -2691,7 +2695,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { }); } _ => { - let buffer_handle = if buffers.is_empty() || rng.gen_bool(0.4) { + let buffer_handle = if buffers.is_empty() || rng.random_bool(0.4) { let mut base_text = util::RandomCharIter::new(&mut rng) .take(256) .collect::(); @@ -2708,7 +2712,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { buffers.choose(&mut rng).unwrap() }; - let prev_excerpt_ix = rng.gen_range(0..=reference.excerpts.len()); + let prev_excerpt_ix = rng.random_range(0..=reference.excerpts.len()); let prev_excerpt_id = reference .excerpts .get(prev_excerpt_ix) @@ -2716,8 +2720,8 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { let excerpt_ix = (prev_excerpt_ix + 1).min(reference.excerpts.len()); let (range, anchor_range) = buffer_handle.read_with(cx, |buffer, _| { - let end_row = rng.gen_range(0..=buffer.max_point().row); - let start_row = rng.gen_range(0..=end_row); + let end_row = rng.random_range(0..=buffer.max_point().row); + let start_row = rng.random_range(0..=end_row); let end_ix = buffer.point_to_offset(Point::new(end_row, 0)); let start_ix = buffer.point_to_offset(Point::new(start_row, 0)); let anchor_range = buffer.anchor_before(start_ix)..buffer.anchor_after(end_ix); @@ -2766,7 +2770,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { } } - if rng.gen_bool(0.3) { + if rng.random_bool(0.3) { multibuffer.update(cx, |multibuffer, cx| { old_versions.push((multibuffer.snapshot(cx), multibuffer.subscribe())); }) @@ -2815,7 +2819,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { pretty_assertions::assert_eq!(actual_row_infos, expected_row_infos); for _ in 0..5 { - let start_row = rng.gen_range(0..=expected_row_infos.len()); + let start_row = rng.random_range(0..=expected_row_infos.len()); assert_eq!( snapshot .row_infos(MultiBufferRow(start_row as u32)) @@ -2872,8 +2876,8 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { let text_rope = Rope::from(expected_text.as_str()); for _ in 0..10 { - let end_ix = text_rope.clip_offset(rng.gen_range(0..=text_rope.len()), Bias::Right); - let start_ix = text_rope.clip_offset(rng.gen_range(0..=end_ix), Bias::Left); + let end_ix = text_rope.clip_offset(rng.random_range(0..=text_rope.len()), Bias::Right); + let start_ix = text_rope.clip_offset(rng.random_range(0..=end_ix), Bias::Left); let text_for_range = snapshot .text_for_range(start_ix..end_ix) @@ -2908,7 +2912,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { } for _ in 0..10 { - let end_ix = text_rope.clip_offset(rng.gen_range(0..=text_rope.len()), Bias::Right); + let end_ix = text_rope.clip_offset(rng.random_range(0..=text_rope.len()), Bias::Right); assert_eq!( snapshot.reversed_chars_at(end_ix).collect::(), expected_text[..end_ix].chars().rev().collect::(), @@ -2916,8 +2920,8 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { } for _ in 0..10 { - let end_ix = rng.gen_range(0..=text_rope.len()); - let start_ix = rng.gen_range(0..=end_ix); + let end_ix = rng.random_range(0..=text_rope.len()); + let start_ix = rng.random_range(0..=end_ix); assert_eq!( snapshot .bytes_in_range(start_ix..end_ix) diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 36ec338fb71ca1a130657dca1db037051691ad9d..7f7e759b275baadfe3b2d3931955ad39b03fdb05 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -3761,7 +3761,7 @@ impl LspStore { worktree_store, languages: languages.clone(), language_server_statuses: Default::default(), - nonce: StdRng::from_entropy().r#gen(), + nonce: StdRng::from_os_rng().random(), diagnostic_summaries: HashMap::default(), lsp_server_capabilities: HashMap::default(), lsp_document_colors: HashMap::default(), @@ -3823,7 +3823,7 @@ impl LspStore { worktree_store, languages: languages.clone(), language_server_statuses: Default::default(), - nonce: StdRng::from_entropy().r#gen(), + nonce: StdRng::from_os_rng().random(), diagnostic_summaries: HashMap::default(), lsp_server_capabilities: HashMap::default(), lsp_document_colors: HashMap::default(), diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index a07f94fb737745b22bf6eaf685e1a4f2874a4dae..969e18f6d40346aa86d83bd0beb77d6652ff0763 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -7661,7 +7661,7 @@ async fn test_staging_random_hunks( .unwrap_or(20); // Try to induce races between diff recalculation and index writes. - if rng.gen_bool(0.5) { + if rng.random_bool(0.5) { executor.deprioritize(*CALCULATE_DIFF_TASK); } @@ -7717,7 +7717,7 @@ async fn test_staging_random_hunks( assert_eq!(hunks.len(), 6); for _i in 0..operations { - let hunk_ix = rng.gen_range(0..hunks.len()); + let hunk_ix = rng.random_range(0..hunks.len()); let hunk = &mut hunks[hunk_ix]; let row = hunk.range.start.row; @@ -7735,7 +7735,7 @@ async fn test_staging_random_hunks( hunk.secondary_status = SecondaryHunkAdditionPending; } - for _ in 0..rng.gen_range(0..10) { + for _ in 0..rng.random_range(0..10) { log::info!("yielding"); cx.executor().simulate_random_delay().await; } diff --git a/crates/rope/benches/rope_benchmark.rs b/crates/rope/benches/rope_benchmark.rs index 2233708525a8a060c78e66340537317cc6694d18..cb741fc78481e7d03a7c18dbf0d8919359b06436 100644 --- a/crates/rope/benches/rope_benchmark.rs +++ b/crates/rope/benches/rope_benchmark.rs @@ -28,11 +28,11 @@ fn generate_random_rope_ranges(mut rng: StdRng, rope: &Rope) -> Vec let mut start = 0; for _ in 0..num_ranges { let range_start = rope.clip_offset( - rng.gen_range(start..=(start + range_max_len)), + rng.random_range(start..=(start + range_max_len)), sum_tree::Bias::Left, ); let range_end = rope.clip_offset( - rng.gen_range(range_start..(range_start + range_max_len)), + rng.random_range(range_start..(range_start + range_max_len)), sum_tree::Bias::Right, ); @@ -52,7 +52,7 @@ fn generate_random_rope_points(mut rng: StdRng, rope: &Rope) -> Vec { let mut points = Vec::new(); for _ in 0..num_points { - points.push(rope.offset_to_point(rng.gen_range(0..rope.len()))); + points.push(rope.offset_to_point(rng.random_range(0..rope.len()))); } points } diff --git a/crates/rope/src/chunk.rs b/crates/rope/src/chunk.rs index 00679d8cf539af5759250dfe6fc7406e192407fb..689875274a460abafb808ab7db7db3f5e0487a03 100644 --- a/crates/rope/src/chunk.rs +++ b/crates/rope/src/chunk.rs @@ -612,7 +612,7 @@ mod tests { #[gpui::test(iterations = 100)] fn test_random_chunks(mut rng: StdRng) { - let chunk_len = rng.gen_range(0..=MAX_BASE); + let chunk_len = rng.random_range(0..=MAX_BASE); let text = RandomCharIter::new(&mut rng) .take(chunk_len) .collect::(); @@ -627,8 +627,8 @@ mod tests { verify_chunk(chunk.as_slice(), text); for _ in 0..10 { - let mut start = rng.gen_range(0..=chunk.text.len()); - let mut end = rng.gen_range(start..=chunk.text.len()); + let mut start = rng.random_range(0..=chunk.text.len()); + let mut end = rng.random_range(start..=chunk.text.len()); while !chunk.text.is_char_boundary(start) { start -= 1; } @@ -645,7 +645,7 @@ mod tests { #[gpui::test(iterations = 1000)] fn test_nth_set_bit_random(mut rng: StdRng) { - let set_count = rng.gen_range(0..=128); + let set_count = rng.random_range(0..=128); let mut set_bits = (0..128).choose_multiple(&mut rng, set_count); set_bits.sort(); let mut n = 0; diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index 41b2a2d033eb49a1851c02e7066be22d807bca4b..33886854220862c60153dc3ea1f02180c62212a3 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -1610,9 +1610,9 @@ mod tests { let mut expected = String::new(); let mut actual = Rope::new(); for _ in 0..operations { - let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right); - let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left); - let len = rng.gen_range(0..=64); + let end_ix = clip_offset(&expected, rng.random_range(0..=expected.len()), Right); + let start_ix = clip_offset(&expected, rng.random_range(0..=end_ix), Left); + let len = rng.random_range(0..=64); let new_text: String = RandomCharIter::new(&mut rng).take(len).collect(); let mut new_actual = Rope::new(); @@ -1629,8 +1629,8 @@ mod tests { log::info!("text: {:?}", expected); for _ in 0..5 { - let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right); - let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left); + let end_ix = clip_offset(&expected, rng.random_range(0..=expected.len()), Right); + let start_ix = clip_offset(&expected, rng.random_range(0..=end_ix), Left); let actual_text = actual.chunks_in_range(start_ix..end_ix).collect::(); assert_eq!(actual_text, &expected[start_ix..end_ix]); @@ -1695,14 +1695,14 @@ mod tests { ); // Check that next_line/prev_line work correctly from random positions - let mut offset = rng.gen_range(start_ix..=end_ix); + let mut offset = rng.random_range(start_ix..=end_ix); while !expected.is_char_boundary(offset) { offset -= 1; } chunks.seek(offset); for _ in 0..5 { - if rng.r#gen() { + if rng.random() { let expected_next_line_start = expected[offset..end_ix] .find('\n') .map(|newline_ix| offset + newline_ix + 1); @@ -1791,8 +1791,8 @@ mod tests { } assert!((start_ix..=end_ix).contains(&chunks.offset())); - if rng.r#gen() { - offset = rng.gen_range(start_ix..=end_ix); + if rng.random() { + offset = rng.random_range(start_ix..=end_ix); while !expected.is_char_boundary(offset) { offset -= 1; } @@ -1876,8 +1876,8 @@ mod tests { } for _ in 0..5 { - let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right); - let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left); + let end_ix = clip_offset(&expected, rng.random_range(0..=expected.len()), Right); + let start_ix = clip_offset(&expected, rng.random_range(0..=end_ix), Left); assert_eq!( actual.cursor(start_ix).summary::(end_ix), TextSummary::from(&expected[start_ix..end_ix]) diff --git a/crates/rpc/src/auth.rs b/crates/rpc/src/auth.rs index 2e3546289d6bbc476ea7dd6002cb70d466a53d3f..3829f3d36b7cbddd00678f815d08053c864c2010 100644 --- a/crates/rpc/src/auth.rs +++ b/crates/rpc/src/auth.rs @@ -1,6 +1,6 @@ use anyhow::{Context as _, Result}; use base64::prelude::*; -use rand::{Rng as _, thread_rng}; +use rand::prelude::*; use rsa::pkcs1::{DecodeRsaPublicKey, EncodeRsaPublicKey}; use rsa::traits::PaddingScheme; use rsa::{Oaep, Pkcs1v15Encrypt, RsaPrivateKey, RsaPublicKey}; @@ -31,7 +31,7 @@ pub struct PrivateKey(RsaPrivateKey); /// Generate a public and private key for asymmetric encryption. pub fn keypair() -> Result<(PublicKey, PrivateKey)> { - let mut rng = thread_rng(); + let mut rng = RsaRngCompat::new(); let bits = 2048; let private_key = RsaPrivateKey::new(&mut rng, bits)?; let public_key = RsaPublicKey::from(&private_key); @@ -40,10 +40,10 @@ pub fn keypair() -> Result<(PublicKey, PrivateKey)> { /// Generate a random 64-character base64 string. pub fn random_token() -> String { - let mut rng = thread_rng(); + let mut rng = rand::rng(); let mut token_bytes = [0; 48]; for byte in token_bytes.iter_mut() { - *byte = rng.r#gen(); + *byte = rng.random(); } BASE64_URL_SAFE.encode(token_bytes) } @@ -52,7 +52,7 @@ impl PublicKey { /// Convert a string to a base64-encoded string that can only be decoded with the corresponding /// private key. pub fn encrypt_string(&self, string: &str, format: EncryptionFormat) -> Result { - let mut rng = thread_rng(); + let mut rng = RsaRngCompat::new(); let bytes = string.as_bytes(); let encrypted_bytes = match format { EncryptionFormat::V0 => self.0.encrypt(&mut rng, Pkcs1v15Encrypt, bytes), @@ -107,6 +107,36 @@ impl TryFrom for PublicKey { } } +// TODO: remove once we rsa v0.10 is released. +struct RsaRngCompat(rand::rngs::ThreadRng); + +impl RsaRngCompat { + fn new() -> Self { + Self(rand::rng()) + } +} + +impl rsa::signature::rand_core::RngCore for RsaRngCompat { + fn next_u32(&mut self) -> u32 { + self.0.next_u32() + } + + fn next_u64(&mut self) -> u64 { + self.0.next_u64() + } + + fn fill_bytes(&mut self, dest: &mut [u8]) { + self.0.fill_bytes(dest); + } + + fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rsa::signature::rand_core::Error> { + self.fill_bytes(dest); + Ok(()) + } +} + +impl rsa::signature::rand_core::CryptoRng for RsaRngCompat {} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/scheduler/Cargo.toml b/crates/scheduler/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..0446c67914541964f01514865ddc363c60f837c8 --- /dev/null +++ b/crates/scheduler/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "scheduler" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "Apache-2.0" + +[lints] +workspace = true + +[lib] +path = "src/scheduler.rs" +doctest = false + +[features] +test-support = [] + +[dependencies] +async-task.workspace = true +chrono.workspace = true +futures.workspace = true +parking.workspace = true +parking_lot.workspace = true +rand.workspace = true +workspace-hack.workspace = true diff --git a/crates/scheduler/LICENSE-APACHE b/crates/scheduler/LICENSE-APACHE new file mode 120000 index 0000000000000000000000000000000000000000..1cd601d0a3affae83854be02a0afdec3b7a9ec4d --- /dev/null +++ b/crates/scheduler/LICENSE-APACHE @@ -0,0 +1 @@ +../../LICENSE-APACHE \ No newline at end of file diff --git a/crates/scheduler/src/clock.rs b/crates/scheduler/src/clock.rs new file mode 100644 index 0000000000000000000000000000000000000000..c035c6b7dbcbabeaeeb2a952974cc4bf777c1f92 --- /dev/null +++ b/crates/scheduler/src/clock.rs @@ -0,0 +1,34 @@ +use chrono::{DateTime, Duration, Utc}; +use parking_lot::Mutex; + +pub trait Clock { + fn now(&self) -> DateTime; +} + +pub struct TestClock { + now: Mutex>, +} + +impl TestClock { + pub fn new() -> Self { + const START_TIME: &str = "2025-07-01T23:59:58-00:00"; + let now = DateTime::parse_from_rfc3339(START_TIME).unwrap().to_utc(); + Self { + now: Mutex::new(now), + } + } + + pub fn set_now(&self, now: DateTime) { + *self.now.lock() = now; + } + + pub fn advance(&self, duration: Duration) { + *self.now.lock() += duration; + } +} + +impl Clock for TestClock { + fn now(&self) -> DateTime { + *self.now.lock() + } +} diff --git a/crates/scheduler/src/executor.rs b/crates/scheduler/src/executor.rs new file mode 100644 index 0000000000000000000000000000000000000000..03f91ae551ff086f56e089bd53d690a2c5345949 --- /dev/null +++ b/crates/scheduler/src/executor.rs @@ -0,0 +1,137 @@ +use crate::{Scheduler, SessionId, Timer}; +use std::{ + future::Future, + marker::PhantomData, + pin::Pin, + rc::Rc, + sync::Arc, + task::{Context, Poll}, + time::Duration, +}; + +#[derive(Clone)] +pub struct ForegroundExecutor { + session_id: SessionId, + scheduler: Arc, + not_send: PhantomData>, +} + +impl ForegroundExecutor { + pub fn spawn(&self, future: F) -> Task + where + F: Future + 'static, + F::Output: 'static, + { + let session_id = self.session_id; + let scheduler = Arc::clone(&self.scheduler); + let (runnable, task) = async_task::spawn_local(future, move |runnable| { + scheduler.schedule_foreground(session_id, runnable); + }); + runnable.schedule(); + Task(TaskState::Spawned(task)) + } + + pub fn timer(&self, duration: Duration) -> Timer { + self.scheduler.timer(duration) + } +} + +impl ForegroundExecutor { + pub fn new(session_id: SessionId, scheduler: Arc) -> Self { + assert!( + scheduler.is_main_thread(), + "ForegroundExecutor must be created on the same thread as the Scheduler" + ); + Self { + session_id, + scheduler, + not_send: PhantomData, + } + } +} + +impl BackgroundExecutor { + pub fn new(scheduler: Arc) -> Self { + Self { scheduler } + } +} + +pub struct BackgroundExecutor { + scheduler: Arc, +} + +impl BackgroundExecutor { + pub fn spawn(&self, future: F) -> Task + where + F: Future + Send + 'static, + F::Output: Send + 'static, + { + let scheduler = Arc::clone(&self.scheduler); + let (runnable, task) = async_task::spawn(future, move |runnable| { + scheduler.schedule_background(runnable); + }); + runnable.schedule(); + Task(TaskState::Spawned(task)) + } + + pub fn block_on(&self, future: Fut) -> Fut::Output { + self.scheduler.block_on(future) + } + + pub fn block_with_timeout( + &self, + future: &mut Fut, + timeout: Duration, + ) -> Option { + self.scheduler.block_with_timeout(future, timeout) + } + + pub fn timer(&self, duration: Duration) -> Timer { + self.scheduler.timer(duration) + } +} + +/// Task is a primitive that allows work to happen in the background. +/// +/// It implements [`Future`] so you can `.await` on it. +/// +/// If you drop a task it will be cancelled immediately. Calling [`Task::detach`] allows +/// the task to continue running, but with no way to return a value. +#[must_use] +#[derive(Debug)] +pub struct Task(TaskState); + +#[derive(Debug)] +enum TaskState { + /// A task that is ready to return a value + Ready(Option), + + /// A task that is currently running. + Spawned(async_task::Task), +} + +impl Task { + /// Creates a new task that will resolve with the value + pub fn ready(val: T) -> Self { + Task(TaskState::Ready(Some(val))) + } + + /// Detaching a task runs it to completion in the background + pub fn detach(self) { + match self { + Task(TaskState::Ready(_)) => {} + Task(TaskState::Spawned(task)) => task.detach(), + } + } +} + +impl Future for Task { + type Output = T; + + fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll { + match unsafe { self.get_unchecked_mut() } { + Task(TaskState::Ready(val)) => Poll::Ready(val.take().unwrap()), + Task(TaskState::Spawned(task)) => Pin::new(task).poll(cx), + } + } +} diff --git a/crates/scheduler/src/scheduler.rs b/crates/scheduler/src/scheduler.rs new file mode 100644 index 0000000000000000000000000000000000000000..ee1964784565266aba2fcc1efd1cd8de0a7fd5e7 --- /dev/null +++ b/crates/scheduler/src/scheduler.rs @@ -0,0 +1,63 @@ +mod clock; +mod executor; +mod test_scheduler; +#[cfg(test)] +mod tests; + +pub use clock::*; +pub use executor::*; +pub use test_scheduler::*; + +use async_task::Runnable; +use futures::{FutureExt as _, channel::oneshot, future::LocalBoxFuture}; +use std::{ + future::Future, + pin::Pin, + task::{Context, Poll}, + time::Duration, +}; + +pub trait Scheduler: Send + Sync { + fn block(&self, future: LocalBoxFuture<()>, timeout: Option); + fn schedule_foreground(&self, session_id: SessionId, runnable: Runnable); + fn schedule_background(&self, runnable: Runnable); + fn timer(&self, timeout: Duration) -> Timer; + fn is_main_thread(&self) -> bool; +} + +impl dyn Scheduler { + pub fn block_on(&self, future: Fut) -> Fut::Output { + let mut output = None; + self.block(async { output = Some(future.await) }.boxed_local(), None); + output.unwrap() + } + + pub fn block_with_timeout( + &self, + future: &mut Fut, + timeout: Duration, + ) -> Option { + let mut output = None; + self.block( + async { output = Some(future.await) }.boxed_local(), + Some(timeout), + ); + output + } +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)] +pub struct SessionId(u16); + +pub struct Timer(oneshot::Receiver<()>); + +impl Future for Timer { + type Output = (); + + fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<()> { + match self.0.poll_unpin(cx) { + Poll::Ready(_) => Poll::Ready(()), + Poll::Pending => Poll::Pending, + } + } +} diff --git a/crates/scheduler/src/test_scheduler.rs b/crates/scheduler/src/test_scheduler.rs new file mode 100644 index 0000000000000000000000000000000000000000..479759d9bdb775a3d2a71bae586fba9d658e71ce --- /dev/null +++ b/crates/scheduler/src/test_scheduler.rs @@ -0,0 +1,352 @@ +use crate::{ + BackgroundExecutor, Clock as _, ForegroundExecutor, Scheduler, SessionId, TestClock, Timer, +}; +use async_task::Runnable; +use chrono::{DateTime, Duration as ChronoDuration, Utc}; +use futures::{FutureExt as _, channel::oneshot, future::LocalBoxFuture}; +use parking_lot::Mutex; +use rand::prelude::*; +use std::{ + collections::VecDeque, + future::Future, + panic::{self, AssertUnwindSafe}, + pin::Pin, + sync::{ + Arc, + atomic::{AtomicBool, Ordering::SeqCst}, + }, + task::{Context, Poll, Wake, Waker}, + thread, + time::{Duration, Instant}, +}; + +pub struct TestScheduler { + clock: Arc, + rng: Arc>, + state: Mutex, + pub thread_id: thread::ThreadId, + pub config: SchedulerConfig, +} + +impl TestScheduler { + /// Run a test once with default configuration (seed 0) + pub fn once(f: impl AsyncFnOnce(Arc) -> R) -> R { + Self::with_seed(0, f) + } + + /// Run a test multiple times with sequential seeds (0, 1, 2, ...) + pub fn many(iterations: usize, mut f: impl AsyncFnMut(Arc) -> R) -> Vec { + (0..iterations as u64) + .map(|seed| { + let mut unwind_safe_f = AssertUnwindSafe(&mut f); + match panic::catch_unwind(move || Self::with_seed(seed, &mut *unwind_safe_f)) { + Ok(result) => result, + Err(error) => { + eprintln!("Failing Seed: {seed}"); + panic::resume_unwind(error); + } + } + }) + .collect() + } + + /// Run a test once with a specific seed + pub fn with_seed(seed: u64, f: impl AsyncFnOnce(Arc) -> R) -> R { + let scheduler = Arc::new(TestScheduler::new(SchedulerConfig::with_seed(seed))); + let future = f(scheduler.clone()); + let result = scheduler.block_on(future); + scheduler.run(); + result + } + + pub fn new(config: SchedulerConfig) -> Self { + Self { + rng: Arc::new(Mutex::new(StdRng::seed_from_u64(config.seed))), + state: Mutex::new(SchedulerState { + runnables: VecDeque::new(), + timers: Vec::new(), + randomize_order: config.randomize_order, + allow_parking: config.allow_parking, + next_session_id: SessionId(0), + }), + thread_id: thread::current().id(), + clock: Arc::new(TestClock::new()), + config, + } + } + + pub fn clock(&self) -> Arc { + self.clock.clone() + } + + pub fn rng(&self) -> Arc> { + self.rng.clone() + } + + /// Create a foreground executor for this scheduler + pub fn foreground(self: &Arc) -> ForegroundExecutor { + let session_id = { + let mut state = self.state.lock(); + state.next_session_id.0 += 1; + state.next_session_id + }; + ForegroundExecutor::new(session_id, self.clone()) + } + + /// Create a background executor for this scheduler + pub fn background(self: &Arc) -> BackgroundExecutor { + BackgroundExecutor::new(self.clone()) + } + + pub fn block_on(&self, future: Fut) -> Fut::Output { + (self as &dyn Scheduler).block_on(future) + } + + pub fn yield_random(&self) -> Yield { + Yield(self.rng.lock().random_range(0..20)) + } + + pub fn run(&self) { + while self.step() || self.advance_clock() { + // Continue until no work remains + } + } + + fn step(&self) -> bool { + let elapsed_timers = { + let mut state = self.state.lock(); + let end_ix = state + .timers + .partition_point(|timer| timer.expiration <= self.clock.now()); + state.timers.drain(..end_ix).collect::>() + }; + + if !elapsed_timers.is_empty() { + return true; + } + + let runnable = self.state.lock().runnables.pop_front(); + if let Some(runnable) = runnable { + runnable.run(); + return true; + } + + false + } + + fn advance_clock(&self) -> bool { + if let Some(timer) = self.state.lock().timers.first() { + self.clock.set_now(timer.expiration); + true + } else { + false + } + } +} + +impl Scheduler for TestScheduler { + fn is_main_thread(&self) -> bool { + thread::current().id() == self.thread_id + } + + fn schedule_foreground(&self, session_id: SessionId, runnable: Runnable) { + let mut state = self.state.lock(); + let ix = if state.randomize_order { + let start_ix = state + .runnables + .iter() + .rposition(|task| task.session_id == Some(session_id)) + .map_or(0, |ix| ix + 1); + self.rng + .lock() + .random_range(start_ix..=state.runnables.len()) + } else { + state.runnables.len() + }; + state.runnables.insert( + ix, + ScheduledRunnable { + session_id: Some(session_id), + runnable, + }, + ); + } + + fn schedule_background(&self, runnable: Runnable) { + let mut state = self.state.lock(); + let ix = if state.randomize_order { + self.rng.lock().random_range(0..=state.runnables.len()) + } else { + state.runnables.len() + }; + state.runnables.insert( + ix, + ScheduledRunnable { + session_id: None, + runnable, + }, + ); + } + + fn timer(&self, duration: Duration) -> Timer { + let (tx, rx) = oneshot::channel(); + let expiration = self.clock.now() + ChronoDuration::from_std(duration).unwrap(); + let state = &mut *self.state.lock(); + state.timers.push(ScheduledTimer { + expiration, + _notify: tx, + }); + state.timers.sort_by_key(|timer| timer.expiration); + Timer(rx) + } + + /// Block until the given future completes, with an optional timeout. If the + /// future is unable to make progress at any moment before the timeout and + /// no other tasks or timers remain, we panic unless parking is allowed. If + /// parking is allowed, we block up to the timeout or indefinitely if none + /// is provided. This is to allow testing a mix of deterministic and + /// non-deterministic async behavior, such as when interacting with I/O in + /// an otherwise deterministic test. + fn block(&self, mut future: LocalBoxFuture<()>, timeout: Option) { + let (parker, unparker) = parking::pair(); + let deadline = timeout.map(|timeout| Instant::now() + timeout); + let awoken = Arc::new(AtomicBool::new(false)); + let waker = Waker::from(Arc::new(WakerFn::new({ + let awoken = awoken.clone(); + move || { + awoken.store(true, SeqCst); + unparker.unpark(); + } + }))); + let max_ticks = if timeout.is_some() { + self.rng + .lock() + .random_range(0..=self.config.max_timeout_ticks) + } else { + usize::MAX + }; + let mut cx = Context::from_waker(&waker); + + for _ in 0..max_ticks { + let Poll::Pending = future.poll_unpin(&mut cx) else { + break; + }; + + let mut stepped = None; + while self.rng.lock().random() && stepped.unwrap_or(true) { + *stepped.get_or_insert(false) |= self.step(); + } + + let stepped = stepped.unwrap_or(true); + let awoken = awoken.swap(false, SeqCst); + if !stepped && !awoken && !self.advance_clock() { + if self.state.lock().allow_parking { + if !park(&parker, deadline) { + break; + } + } else if deadline.is_some() { + break; + } else { + panic!("Parking forbidden"); + } + } + } + } +} + +#[derive(Clone, Debug)] +pub struct SchedulerConfig { + pub seed: u64, + pub randomize_order: bool, + pub allow_parking: bool, + pub max_timeout_ticks: usize, +} + +impl SchedulerConfig { + pub fn with_seed(seed: u64) -> Self { + Self { + seed, + ..Default::default() + } + } +} + +impl Default for SchedulerConfig { + fn default() -> Self { + Self { + seed: 0, + randomize_order: true, + allow_parking: false, + max_timeout_ticks: 1000, + } + } +} + +struct ScheduledRunnable { + session_id: Option, + runnable: Runnable, +} + +impl ScheduledRunnable { + fn run(self) { + self.runnable.run(); + } +} + +struct ScheduledTimer { + expiration: DateTime, + _notify: oneshot::Sender<()>, +} + +struct SchedulerState { + runnables: VecDeque, + timers: Vec, + randomize_order: bool, + allow_parking: bool, + next_session_id: SessionId, +} + +struct WakerFn { + f: F, +} + +impl WakerFn { + fn new(f: F) -> Self { + Self { f } + } +} + +impl Wake for WakerFn { + fn wake(self: Arc) { + (self.f)(); + } + + fn wake_by_ref(self: &Arc) { + (self.f)(); + } +} + +pub struct Yield(usize); + +impl Future for Yield { + type Output = (); + + fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll { + if self.0 == 0 { + Poll::Ready(()) + } else { + self.0 -= 1; + cx.waker().wake_by_ref(); + Poll::Pending + } + } +} + +fn park(parker: &parking::Parker, deadline: Option) -> bool { + if let Some(deadline) = deadline { + parker.park_deadline(deadline) + } else { + parker.park(); + true + } +} diff --git a/crates/scheduler/src/tests.rs b/crates/scheduler/src/tests.rs new file mode 100644 index 0000000000000000000000000000000000000000..19eb354e979083b1ec070bd5d09e5871001a8c4f --- /dev/null +++ b/crates/scheduler/src/tests.rs @@ -0,0 +1,348 @@ +use super::*; +use futures::{ + FutureExt, + channel::{mpsc, oneshot}, + executor::block_on, + future, + sink::SinkExt, + stream::{FuturesUnordered, StreamExt}, +}; +use std::{ + cell::RefCell, + collections::{BTreeSet, HashSet}, + pin::Pin, + rc::Rc, + sync::Arc, + task::{Context, Poll}, +}; + +#[test] +fn test_foreground_executor_spawn() { + let result = TestScheduler::once(async |scheduler| { + let task = scheduler.foreground().spawn(async move { 42 }); + task.await + }); + assert_eq!(result, 42); +} + +#[test] +fn test_background_executor_spawn() { + TestScheduler::once(async |scheduler| { + let task = scheduler.background().spawn(async move { 42 }); + let result = task.await; + assert_eq!(result, 42); + }); +} + +#[test] +fn test_foreground_ordering() { + let mut traces = HashSet::new(); + + TestScheduler::many(100, async |scheduler| { + #[derive(Hash, PartialEq, Eq)] + struct TraceEntry { + session: usize, + task: usize, + } + + let trace = Rc::new(RefCell::new(Vec::new())); + + let foreground_1 = scheduler.foreground(); + for task in 0..10 { + foreground_1 + .spawn({ + let trace = trace.clone(); + async move { + trace.borrow_mut().push(TraceEntry { session: 0, task }); + } + }) + .detach(); + } + + let foreground_2 = scheduler.foreground(); + for task in 0..10 { + foreground_2 + .spawn({ + let trace = trace.clone(); + async move { + trace.borrow_mut().push(TraceEntry { session: 1, task }); + } + }) + .detach(); + } + + scheduler.run(); + + assert_eq!( + trace + .borrow() + .iter() + .filter(|entry| entry.session == 0) + .map(|entry| entry.task) + .collect::>(), + (0..10).collect::>() + ); + assert_eq!( + trace + .borrow() + .iter() + .filter(|entry| entry.session == 1) + .map(|entry| entry.task) + .collect::>(), + (0..10).collect::>() + ); + + traces.insert(trace.take()); + }); + + assert!(traces.len() > 1, "Expected at least two traces"); +} + +#[test] +fn test_timer_ordering() { + TestScheduler::many(1, async |scheduler| { + let background = scheduler.background(); + let futures = FuturesUnordered::new(); + futures.push( + async { + background.timer(Duration::from_millis(100)).await; + 2 + } + .boxed(), + ); + futures.push( + async { + background.timer(Duration::from_millis(50)).await; + 1 + } + .boxed(), + ); + futures.push( + async { + background.timer(Duration::from_millis(150)).await; + 3 + } + .boxed(), + ); + assert_eq!(futures.collect::>().await, vec![1, 2, 3]); + }); +} + +#[test] +fn test_send_from_bg_to_fg() { + TestScheduler::once(async |scheduler| { + let foreground = scheduler.foreground(); + let background = scheduler.background(); + + let (sender, receiver) = oneshot::channel::(); + + background + .spawn(async move { + sender.send(42).unwrap(); + }) + .detach(); + + let task = foreground.spawn(async move { receiver.await.unwrap() }); + let result = task.await; + assert_eq!(result, 42); + }); +} + +#[test] +fn test_randomize_order() { + // Test deterministic mode: different seeds should produce same execution order + let mut deterministic_results = HashSet::new(); + for seed in 0..10 { + let config = SchedulerConfig { + seed, + randomize_order: false, + ..Default::default() + }; + let order = block_on(capture_execution_order(config)); + assert_eq!(order.len(), 6); + deterministic_results.insert(order); + } + + // All deterministic runs should produce the same result + assert_eq!( + deterministic_results.len(), + 1, + "Deterministic mode should always produce same execution order" + ); + + // Test randomized mode: different seeds can produce different execution orders + let mut randomized_results = HashSet::new(); + for seed in 0..20 { + let config = SchedulerConfig::with_seed(seed); + let order = block_on(capture_execution_order(config)); + assert_eq!(order.len(), 6); + randomized_results.insert(order); + } + + // Randomized mode should produce multiple different execution orders + assert!( + randomized_results.len() > 1, + "Randomized mode should produce multiple different orders" + ); +} + +async fn capture_execution_order(config: SchedulerConfig) -> Vec { + let scheduler = Arc::new(TestScheduler::new(config)); + let foreground = scheduler.foreground(); + let background = scheduler.background(); + + let (sender, receiver) = mpsc::unbounded::(); + + // Spawn foreground tasks + for i in 0..3 { + let mut sender = sender.clone(); + foreground + .spawn(async move { + sender.send(format!("fg-{}", i)).await.ok(); + }) + .detach(); + } + + // Spawn background tasks + for i in 0..3 { + let mut sender = sender.clone(); + background + .spawn(async move { + sender.send(format!("bg-{}", i)).await.ok(); + }) + .detach(); + } + + drop(sender); // Close sender to signal no more messages + scheduler.run(); + + receiver.collect().await +} + +#[test] +fn test_block() { + let scheduler = Arc::new(TestScheduler::new(SchedulerConfig::default())); + let executor = BackgroundExecutor::new(scheduler); + let (tx, rx) = oneshot::channel(); + + // Spawn background task to send value + let _ = executor + .spawn(async move { + tx.send(42).unwrap(); + }) + .detach(); + + // Block on receiving the value + let result = executor.block_on(async { rx.await.unwrap() }); + assert_eq!(result, 42); +} + +#[test] +#[should_panic(expected = "Parking forbidden")] +fn test_parking_panics() { + let scheduler = Arc::new(TestScheduler::new(SchedulerConfig::default())); + let executor = BackgroundExecutor::new(scheduler); + executor.block_on(future::pending::<()>()); +} + +#[test] +fn test_block_with_parking() { + let config = SchedulerConfig { + allow_parking: true, + ..Default::default() + }; + let scheduler = Arc::new(TestScheduler::new(config)); + let executor = BackgroundExecutor::new(scheduler); + let (tx, rx) = oneshot::channel(); + + // Spawn background task to send value + let _ = executor + .spawn(async move { + tx.send(42).unwrap(); + }) + .detach(); + + // Block on receiving the value (will park if needed) + let result = executor.block_on(async { rx.await.unwrap() }); + assert_eq!(result, 42); +} + +#[test] +fn test_helper_methods() { + // Test the once method + let result = TestScheduler::once(async |scheduler: Arc| { + let background = scheduler.background(); + background.spawn(async { 42 }).await + }); + assert_eq!(result, 42); + + // Test the many method + let results = TestScheduler::many(3, async |scheduler: Arc| { + let background = scheduler.background(); + background.spawn(async { 10 }).await + }); + assert_eq!(results, vec![10, 10, 10]); + + // Test the with_seed method + let result = TestScheduler::with_seed(123, async |scheduler: Arc| { + let background = scheduler.background(); + + // Spawn a background task and wait for its result + let task = background.spawn(async { 99 }); + task.await + }); + assert_eq!(result, 99); +} + +#[test] +fn test_block_with_timeout() { + // Test case: future completes within timeout + TestScheduler::once(async |scheduler| { + let background = scheduler.background(); + let mut future = future::ready(42); + let output = background.block_with_timeout(&mut future, Duration::from_millis(100)); + assert_eq!(output, Some(42)); + }); + + // Test case: future times out + TestScheduler::once(async |scheduler| { + let background = scheduler.background(); + let mut future = future::pending::<()>(); + let output = background.block_with_timeout(&mut future, Duration::from_millis(50)); + assert_eq!(output, None); + }); + + // Test case: future makes progress via timer but still times out + let mut results = BTreeSet::new(); + TestScheduler::many(100, async |scheduler| { + let background = scheduler.background(); + let mut task = background.spawn(async move { + Yield { polls: 10 }.await; + 42 + }); + let output = background.block_with_timeout(&mut task, Duration::from_millis(50)); + results.insert(output); + }); + assert_eq!( + results.into_iter().collect::>(), + vec![None, Some(42)] + ); +} + +struct Yield { + polls: usize, +} + +impl Future for Yield { + type Output = (); + + fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + self.polls -= 1; + if self.polls == 0 { + Poll::Ready(()) + } else { + cx.waker().wake_by_ref(); + Poll::Pending + } + } +} diff --git a/crates/streaming_diff/src/streaming_diff.rs b/crates/streaming_diff/src/streaming_diff.rs index 704164e01eedc64cac9a1e8e4e82f584a0b4fdb9..5677981b0dc9878963e01d09e7281749d6603c8f 100644 --- a/crates/streaming_diff/src/streaming_diff.rs +++ b/crates/streaming_diff/src/streaming_diff.rs @@ -945,7 +945,7 @@ mod tests { let mut new_len = 0; while new_len < new.len() { - let mut chunk_len = rng.gen_range(1..=new.len() - new_len); + let mut chunk_len = rng.random_range(1..=new.len() - new_len); while !new.is_char_boundary(new_len + chunk_len) { chunk_len += 1; } @@ -1034,14 +1034,14 @@ mod tests { fn randomly_edit(text: &str, rng: &mut impl Rng) -> String { let mut result = String::from(text); - let edit_count = rng.gen_range(1..=5); + let edit_count = rng.random_range(1..=5); fn random_char_range(text: &str, rng: &mut impl Rng) -> (usize, usize) { - let mut start = rng.gen_range(0..=text.len()); + let mut start = rng.random_range(0..=text.len()); while !text.is_char_boundary(start) { start -= 1; } - let mut end = rng.gen_range(start..=text.len()); + let mut end = rng.random_range(start..=text.len()); while !text.is_char_boundary(end) { end += 1; } @@ -1049,11 +1049,11 @@ mod tests { } for _ in 0..edit_count { - match rng.gen_range(0..3) { + match rng.random_range(0..3) { 0 => { // Insert let (pos, _) = random_char_range(&result, rng); - let insert_len = rng.gen_range(1..=5); + let insert_len = rng.random_range(1..=5); let insert_text: String = random_text(rng, insert_len); result.insert_str(pos, &insert_text); } diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 710fdd4fbf12ccc2b60998207d964bd31550b345..64814ad09148cc0eb318c306132f2e296fcb3cab 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -909,7 +909,7 @@ where #[cfg(test)] mod tests { use super::*; - use rand::{distributions, prelude::*}; + use rand::{distr::StandardUniform, prelude::*}; use std::cmp; #[ctor::ctor] @@ -951,24 +951,24 @@ mod tests { let rng = &mut rng; let mut tree = SumTree::::default(); - let count = rng.gen_range(0..10); - if rng.r#gen() { - tree.extend(rng.sample_iter(distributions::Standard).take(count), &()); + let count = rng.random_range(0..10); + if rng.random() { + tree.extend(rng.sample_iter(StandardUniform).take(count), &()); } else { let items = rng - .sample_iter(distributions::Standard) + .sample_iter(StandardUniform) .take(count) .collect::>(); tree.par_extend(items, &()); } for _ in 0..num_operations { - let splice_end = rng.gen_range(0..tree.extent::(&()).0 + 1); - let splice_start = rng.gen_range(0..splice_end + 1); - let count = rng.gen_range(0..10); + let splice_end = rng.random_range(0..tree.extent::(&()).0 + 1); + let splice_start = rng.random_range(0..splice_end + 1); + let count = rng.random_range(0..10); let tree_end = tree.extent::(&()); let new_items = rng - .sample_iter(distributions::Standard) + .sample_iter(StandardUniform) .take(count) .collect::>(); @@ -978,7 +978,7 @@ mod tests { tree = { let mut cursor = tree.cursor::(&()); let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right); - if rng.r#gen() { + if rng.random() { new_tree.extend(new_items, &()); } else { new_tree.par_extend(new_items, &()); @@ -1005,7 +1005,7 @@ mod tests { .filter(|(_, item)| (item & 1) == 0) .collect::>(); - let mut item_ix = if rng.r#gen() { + let mut item_ix = if rng.random() { filter_cursor.next(); 0 } else { @@ -1022,12 +1022,12 @@ mod tests { filter_cursor.next(); item_ix += 1; - while item_ix > 0 && rng.gen_bool(0.2) { + while item_ix > 0 && rng.random_bool(0.2) { log::info!("prev"); filter_cursor.prev(); item_ix -= 1; - if item_ix == 0 && rng.gen_bool(0.2) { + if item_ix == 0 && rng.random_bool(0.2) { filter_cursor.prev(); assert_eq!(filter_cursor.item(), None); assert_eq!(filter_cursor.start().0, 0); @@ -1039,9 +1039,9 @@ mod tests { let mut before_start = false; let mut cursor = tree.cursor::(&()); - let start_pos = rng.gen_range(0..=reference_items.len()); + let start_pos = rng.random_range(0..=reference_items.len()); cursor.seek(&Count(start_pos), Bias::Right); - let mut pos = rng.gen_range(start_pos..=reference_items.len()); + let mut pos = rng.random_range(start_pos..=reference_items.len()); cursor.seek_forward(&Count(pos), Bias::Right); for i in 0..10 { @@ -1084,10 +1084,18 @@ mod tests { } for _ in 0..10 { - let end = rng.gen_range(0..tree.extent::(&()).0 + 1); - let start = rng.gen_range(0..end + 1); - let start_bias = if rng.r#gen() { Bias::Left } else { Bias::Right }; - let end_bias = if rng.r#gen() { Bias::Left } else { Bias::Right }; + let end = rng.random_range(0..tree.extent::(&()).0 + 1); + let start = rng.random_range(0..end + 1); + let start_bias = if rng.random() { + Bias::Left + } else { + Bias::Right + }; + let end_bias = if rng.random() { + Bias::Left + } else { + Bias::Right + }; let mut cursor = tree.cursor::(&()); cursor.seek(&Count(start), start_bias); diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index a8b1fcf0f2a31cbd80612d2e19506d38d52fe0af..96271ea771e3fdbe42b03504797ba78170d79096 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -2198,7 +2198,7 @@ mod tests { }; use collections::HashMap; use gpui::{Pixels, Point, TestAppContext, bounds, point, size}; - use rand::{Rng, distributions::Alphanumeric, rngs::ThreadRng, thread_rng}; + use rand::{Rng, distr, rngs::ThreadRng}; #[ignore = "Test is flaky on macOS, and doesn't run on Windows"] #[gpui::test] @@ -2249,13 +2249,14 @@ mod tests { #[test] fn test_mouse_to_cell_test() { - let mut rng = thread_rng(); + let mut rng = rand::rng(); const ITERATIONS: usize = 10; const PRECISION: usize = 1000; for _ in 0..ITERATIONS { - let viewport_cells = rng.gen_range(15..20); - let cell_size = rng.gen_range(5 * PRECISION..20 * PRECISION) as f32 / PRECISION as f32; + let viewport_cells = rng.random_range(15..20); + let cell_size = + rng.random_range(5 * PRECISION..20 * PRECISION) as f32 / PRECISION as f32; let size = crate::TerminalBounds { cell_width: Pixels::from(cell_size), @@ -2277,8 +2278,8 @@ mod tests { for col in 0..(viewport_cells - 1) { let col = col as usize; - let row_offset = rng.gen_range(0..PRECISION) as f32 / PRECISION as f32; - let col_offset = rng.gen_range(0..PRECISION) as f32 / PRECISION as f32; + let row_offset = rng.random_range(0..PRECISION) as f32 / PRECISION as f32; + let col_offset = rng.random_range(0..PRECISION) as f32 / PRECISION as f32; let mouse_pos = point( Pixels::from(col as f32 * cell_size + col_offset), @@ -2298,7 +2299,7 @@ mod tests { #[test] fn test_mouse_to_cell_clamp() { - let mut rng = thread_rng(); + let mut rng = rand::rng(); let size = crate::TerminalBounds { cell_width: Pixels::from(10.), @@ -2336,7 +2337,7 @@ mod tests { for _ in 0..((size.height() / size.line_height()) as usize) { let mut row_vec = Vec::new(); for _ in 0..((size.width() / size.cell_width()) as usize) { - let cell_char = rng.sample(Alphanumeric) as char; + let cell_char = rng.sample(distr::Alphanumeric) as char; row_vec.push(cell_char) } cells.push(row_vec) diff --git a/crates/text/src/locator.rs b/crates/text/src/locator.rs index d529e60d48ed520b518ed9beee789860eb84860a..9b89cf21c74eccfe6cbb93fd2dec5bc849f2170d 100644 --- a/crates/text/src/locator.rs +++ b/crates/text/src/locator.rs @@ -106,13 +106,13 @@ mod tests { let mut rhs = Default::default(); while lhs == rhs { lhs = Locator( - (0..rng.gen_range(1..=5)) - .map(|_| rng.gen_range(0..=100)) + (0..rng.random_range(1..=5)) + .map(|_| rng.random_range(0..=100)) .collect(), ); rhs = Locator( - (0..rng.gen_range(1..=5)) - .map(|_| rng.gen_range(0..=100)) + (0..rng.random_range(1..=5)) + .map(|_| rng.random_range(0..=100)) .collect(), ); } diff --git a/crates/text/src/network.rs b/crates/text/src/network.rs index f22bb52d205ba9505d9f2dc168628734346d81f5..d0d1b650ad92f8ab258cdd37e2bfc662855d6a97 100644 --- a/crates/text/src/network.rs +++ b/crates/text/src/network.rs @@ -65,8 +65,8 @@ impl Network { for message in &messages { // Insert one or more duplicates of this message, potentially *before* the previous // message sent by this peer to simulate out-of-order delivery. - for _ in 0..self.rng.gen_range(1..4) { - let insertion_index = self.rng.gen_range(0..inbox.len() + 1); + for _ in 0..self.rng.random_range(1..4) { + let insertion_index = self.rng.random_range(0..inbox.len() + 1); inbox.insert( insertion_index, Envelope { @@ -85,7 +85,7 @@ impl Network { pub fn receive(&mut self, receiver: ReplicaId) -> Vec { let inbox = self.inboxes.get_mut(&receiver).unwrap(); - let count = self.rng.gen_range(0..inbox.len() + 1); + let count = self.rng.random_range(0..inbox.len() + 1); inbox .drain(0..count) .map(|envelope| envelope.message) diff --git a/crates/text/src/patch.rs b/crates/text/src/patch.rs index dcb35e9a921538134b94e2870011eb3b341f01de..b8bb904052be44d7b67ba51215896f6f308c39c9 100644 --- a/crates/text/src/patch.rs +++ b/crates/text/src/patch.rs @@ -497,8 +497,8 @@ mod tests { .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .unwrap_or(20); - let initial_chars = (0..rng.gen_range(0..=100)) - .map(|_| rng.gen_range(b'a'..=b'z') as char) + let initial_chars = (0..rng.random_range(0..=100)) + .map(|_| rng.random_range(b'a'..=b'z') as char) .collect::>(); log::info!("initial chars: {:?}", initial_chars); @@ -517,11 +517,11 @@ mod tests { break; } - let end = rng.gen_range(last_edit_end..=expected_chars.len()); - let start = rng.gen_range(last_edit_end..=end); + let end = rng.random_range(last_edit_end..=expected_chars.len()); + let start = rng.random_range(last_edit_end..=end); let old_len = end - start; - let mut new_len = rng.gen_range(0..=3); + let mut new_len = rng.random_range(0..=3); if start == end && new_len == 0 { new_len += 1; } @@ -529,7 +529,7 @@ mod tests { last_edit_end = start + new_len + 1; let new_chars = (0..new_len) - .map(|_| rng.gen_range(b'A'..=b'Z') as char) + .map(|_| rng.random_range(b'A'..=b'Z') as char) .collect::>(); log::info!( " editing {:?}: {:?}", diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index a096f1281f592babf7900891a6412451bdc362d0..4298e704ab5f8fbe57af363379395ef23624cfcf 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -36,14 +36,14 @@ fn test_random_edits(mut rng: StdRng) { .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .unwrap_or(10); - let reference_string_len = rng.gen_range(0..3); + let reference_string_len = rng.random_range(0..3); let mut reference_string = RandomCharIter::new(&mut rng) .take(reference_string_len) .collect::(); let mut buffer = Buffer::new(0, BufferId::new(1).unwrap(), reference_string.clone()); LineEnding::normalize(&mut reference_string); - buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200))); + buffer.set_group_interval(Duration::from_millis(rng.random_range(0..=200))); let mut buffer_versions = Vec::new(); log::info!( "buffer text {:?}, version: {:?}", @@ -64,7 +64,7 @@ fn test_random_edits(mut rng: StdRng) { buffer.version() ); - if rng.gen_bool(0.25) { + if rng.random_bool(0.25) { buffer.randomly_undo_redo(&mut rng); reference_string = buffer.text(); log::info!( @@ -82,7 +82,7 @@ fn test_random_edits(mut rng: StdRng) { buffer.check_invariants(); - if rng.gen_bool(0.3) { + if rng.random_bool(0.3) { buffer_versions.push((buffer.clone(), buffer.subscribe())); } } @@ -112,8 +112,9 @@ fn test_random_edits(mut rng: StdRng) { ); for _ in 0..5 { - let end_ix = old_buffer.clip_offset(rng.gen_range(0..=old_buffer.len()), Bias::Right); - let start_ix = old_buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left); + let end_ix = + old_buffer.clip_offset(rng.random_range(0..=old_buffer.len()), Bias::Right); + let start_ix = old_buffer.clip_offset(rng.random_range(0..=end_ix), Bias::Left); let range = old_buffer.anchor_before(start_ix)..old_buffer.anchor_after(end_ix); let mut old_text = old_buffer.text_for_range(range.clone()).collect::(); let edits = buffer @@ -731,7 +732,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) { .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .unwrap_or(10); - let base_text_len = rng.gen_range(0..10); + let base_text_len = rng.random_range(0..10); let base_text = RandomCharIter::new(&mut rng) .take(base_text_len) .collect::(); @@ -741,7 +742,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) { for i in 0..peers { let mut buffer = Buffer::new(i as ReplicaId, BufferId::new(1).unwrap(), base_text.clone()); - buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200)); + buffer.history.group_interval = Duration::from_millis(rng.random_range(0..=200)); buffers.push(buffer); replica_ids.push(i as u16); network.add_peer(i as u16); @@ -751,10 +752,10 @@ fn test_random_concurrent_edits(mut rng: StdRng) { let mut mutation_count = operations; loop { - let replica_index = rng.gen_range(0..peers); + let replica_index = rng.random_range(0..peers); let replica_id = replica_ids[replica_index]; let buffer = &mut buffers[replica_index]; - match rng.gen_range(0..=100) { + match rng.random_range(0..=100) { 0..=50 if mutation_count != 0 => { let op = buffer.randomly_edit(&mut rng, 5).1; network.broadcast(buffer.replica_id, vec![op]); diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 705d3f1788288eb67a0b3b756ba545dc99b031d3..8fb6f56222b503360a3d2dd6f4a6b27d1ac728e3 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -1818,8 +1818,8 @@ impl Buffer { } pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range { - let end = self.clip_offset(rng.gen_range(start_offset..=self.len()), Bias::Right); - let start = self.clip_offset(rng.gen_range(start_offset..=end), Bias::Right); + let end = self.clip_offset(rng.random_range(start_offset..=self.len()), Bias::Right); + let start = self.clip_offset(rng.random_range(start_offset..=end), Bias::Right); start..end } @@ -1841,7 +1841,7 @@ impl Buffer { let range = self.random_byte_range(new_start, rng); last_end = Some(range.end); - let new_text_len = rng.gen_range(0..10); + let new_text_len = rng.random_range(0..10); let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect(); edits.push((range, new_text.into())); @@ -1877,7 +1877,7 @@ impl Buffer { use rand::prelude::*; let mut ops = Vec::new(); - for _ in 0..rng.gen_range(1..=5) { + for _ in 0..rng.random_range(1..=5) { if let Some(entry) = self.history.undo_stack.choose(rng) { let transaction = entry.transaction.clone(); log::info!( diff --git a/crates/util/src/util.rs b/crates/util/src/util.rs index c66adb8b3a7ef93828e95683596f43b91f96f994..db44e3945186842990f7ef8d7b2794b023324d56 100644 --- a/crates/util/src/util.rs +++ b/crates/util/src/util.rs @@ -815,7 +815,8 @@ pub fn defer(f: F) -> Deferred { #[cfg(any(test, feature = "test-support"))] mod rng { - use rand::{Rng, seq::SliceRandom}; + use rand::prelude::*; + pub struct RandomCharIter { rng: T, simple_text: bool, @@ -840,18 +841,18 @@ mod rng { fn next(&mut self) -> Option { if self.simple_text { - return if self.rng.gen_range(0..100) < 5 { + return if self.rng.random_range(0..100) < 5 { Some('\n') } else { - Some(self.rng.gen_range(b'a'..b'z' + 1).into()) + Some(self.rng.random_range(b'a'..b'z' + 1).into()) }; } - match self.rng.gen_range(0..100) { + match self.rng.random_range(0..100) { // whitespace 0..=19 => [' ', '\n', '\r', '\t'].choose(&mut self.rng).copied(), // two-byte greek letters - 20..=32 => char::from_u32(self.rng.gen_range(('α' as u32)..('ω' as u32 + 1))), + 20..=32 => char::from_u32(self.rng.random_range(('α' as u32)..('ω' as u32 + 1))), // // three-byte characters 33..=45 => ['✋', '✅', '❌', '❎', '⭐'] .choose(&mut self.rng) @@ -859,7 +860,7 @@ mod rng { // // four-byte characters 46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.rng).copied(), // ascii letters - _ => Some(self.rng.gen_range(b'a'..b'z' + 1).into()), + _ => Some(self.rng.random_range(b'a'..b'z' + 1).into()), } } } diff --git a/crates/worktree/src/worktree_tests.rs b/crates/worktree/src/worktree_tests.rs index 1783ba317c9927bb79ebdb91b1f57f13d200b60f..92569e0f8177ea2886271e2a39580076effc4e8b 100644 --- a/crates/worktree/src/worktree_tests.rs +++ b/crates/worktree/src/worktree_tests.rs @@ -1464,7 +1464,7 @@ async fn test_random_worktree_operations_during_initial_scan( tree.as_local().unwrap().snapshot().check_invariants(true) }); - if rng.gen_bool(0.6) { + if rng.random_bool(0.6) { snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())); } } @@ -1551,7 +1551,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) let mut snapshots = Vec::new(); let mut mutations_len = operations; while mutations_len > 1 { - if rng.gen_bool(0.2) { + if rng.random_bool(0.2) { worktree .update(cx, |worktree, cx| { randomly_mutate_worktree(worktree, &mut rng, cx) @@ -1563,8 +1563,8 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) } let buffered_event_count = fs.as_fake().buffered_event_count(); - if buffered_event_count > 0 && rng.gen_bool(0.3) { - let len = rng.gen_range(0..=buffered_event_count); + if buffered_event_count > 0 && rng.random_bool(0.3) { + let len = rng.random_range(0..=buffered_event_count); log::info!("flushing {} events", len); fs.as_fake().flush_events(len); } else { @@ -1573,7 +1573,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) } cx.executor().run_until_parked(); - if rng.gen_bool(0.2) { + if rng.random_bool(0.2) { log::info!("storing snapshot {}", snapshots.len()); let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()); snapshots.push(snapshot); @@ -1701,7 +1701,7 @@ fn randomly_mutate_worktree( let snapshot = worktree.snapshot(); let entry = snapshot.entries(false, 0).choose(rng).unwrap(); - match rng.gen_range(0_u32..100) { + match rng.random_range(0_u32..100) { 0..=33 if entry.path.as_ref() != Path::new("") => { log::info!("deleting entry {:?} ({})", entry.path, entry.id.0); worktree.delete_entry(entry.id, false, cx).unwrap() @@ -1733,7 +1733,7 @@ fn randomly_mutate_worktree( _ => { if entry.is_dir() { let child_path = entry.path.join(random_filename(rng)); - let is_dir = rng.gen_bool(0.3); + let is_dir = rng.random_bool(0.3); log::info!( "creating {} at {:?}", if is_dir { "dir" } else { "file" }, @@ -1776,11 +1776,11 @@ async fn randomly_mutate_fs( } } - if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) { + if (files.is_empty() && dirs.len() == 1) || rng.random_bool(insertion_probability) { let path = dirs.choose(rng).unwrap(); let new_path = path.join(random_filename(rng)); - if rng.r#gen() { + if rng.random() { log::info!( "creating dir {:?}", new_path.strip_prefix(root_path).unwrap() @@ -1793,7 +1793,7 @@ async fn randomly_mutate_fs( ); fs.create_file(&new_path, Default::default()).await.unwrap(); } - } else if rng.gen_bool(0.05) { + } else if rng.random_bool(0.05) { let ignore_dir_path = dirs.choose(rng).unwrap(); let ignore_path = ignore_dir_path.join(*GITIGNORE); @@ -1808,11 +1808,11 @@ async fn randomly_mutate_fs( .cloned() .collect::>(); let files_to_ignore = { - let len = rng.gen_range(0..=subfiles.len()); + let len = rng.random_range(0..=subfiles.len()); subfiles.choose_multiple(rng, len) }; let dirs_to_ignore = { - let len = rng.gen_range(0..subdirs.len()); + let len = rng.random_range(0..subdirs.len()); subdirs.choose_multiple(rng, len) }; @@ -1848,7 +1848,7 @@ async fn randomly_mutate_fs( file_path.into_iter().chain(dir_path).choose(rng).unwrap() }; - let is_rename = rng.r#gen(); + let is_rename = rng.random(); if is_rename { let new_path_parent = dirs .iter() @@ -1857,7 +1857,7 @@ async fn randomly_mutate_fs( .unwrap(); let overwrite_existing_dir = - !old_path.starts_with(new_path_parent) && rng.gen_bool(0.3); + !old_path.starts_with(new_path_parent) && rng.random_bool(0.3); let new_path = if overwrite_existing_dir { fs.remove_dir( new_path_parent, @@ -1919,7 +1919,7 @@ async fn randomly_mutate_fs( fn random_filename(rng: &mut impl Rng) -> String { (0..6) - .map(|_| rng.sample(rand::distributions::Alphanumeric)) + .map(|_| rng.sample(rand::distr::Alphanumeric)) .map(char::from) .collect() } diff --git a/crates/zeta/src/input_excerpt.rs b/crates/zeta/src/input_excerpt.rs index f4add6593e9a2b15679b5b0e6e660b4ce6a52f87..dd1bbed1d72e8668e9ed55c9b66b911addfcdd43 100644 --- a/crates/zeta/src/input_excerpt.rs +++ b/crates/zeta/src/input_excerpt.rs @@ -149,7 +149,7 @@ mod tests { let mut rng = rand::thread_rng(); let mut numbers = Vec::new(); for _ in 0..5 { - numbers.push(rng.gen_range(1..101)); + numbers.push(rng.random_range(1..101)); } numbers } @@ -208,7 +208,7 @@ mod tests { <|editable_region_end|> let mut numbers = Vec::new(); for _ in 0..5 { - numbers.push(rng.gen_range(1..101)); + numbers.push(rng.random_range(1..101)); ```"#} ); } From e37efc1e9b313ae4ac28322334db464a2b84c8c4 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 4 Sep 2025 17:30:23 +0200 Subject: [PATCH 002/109] diagnostics: Fix diagnostics pane clearing up too eagerly on typing (#37546) Closes https://github.com/zed-industries/zed/issues/30494 Release Notes: - Fixed diagnostics pane closing buffers too eagerly when typing inside it --- crates/diagnostics/src/diagnostics.rs | 74 ++++++++++++++------------- crates/diagnostics/src/items.rs | 63 ++++++++++------------- 2 files changed, 64 insertions(+), 73 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 53d03718475da1eeaf2b6b3faa22baabb1695f2d..20e8a861334ac764db921d706e86605aed00c175 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -94,43 +94,44 @@ impl Render for ProjectDiagnosticsEditor { 0 }; - let child = if warning_count + self.summary.error_count == 0 { - let label = if self.summary.warning_count == 0 { - SharedString::new_static("No problems in workspace") + let child = + if warning_count + self.summary.error_count == 0 && self.editor.read(cx).is_empty(cx) { + let label = if self.summary.warning_count == 0 { + SharedString::new_static("No problems in workspace") + } else { + SharedString::new_static("No errors in workspace") + }; + v_flex() + .key_context("EmptyPane") + .size_full() + .gap_1() + .justify_center() + .items_center() + .text_center() + .bg(cx.theme().colors().editor_background) + .child(Label::new(label).color(Color::Muted)) + .when(self.summary.warning_count > 0, |this| { + let plural_suffix = if self.summary.warning_count > 1 { + "s" + } else { + "" + }; + let label = format!( + "Show {} warning{}", + self.summary.warning_count, plural_suffix + ); + this.child( + Button::new("diagnostics-show-warning-label", label).on_click( + cx.listener(|this, _, window, cx| { + this.toggle_warnings(&Default::default(), window, cx); + cx.notify(); + }), + ), + ) + }) } else { - SharedString::new_static("No errors in workspace") + div().size_full().child(self.editor.clone()) }; - v_flex() - .key_context("EmptyPane") - .size_full() - .gap_1() - .justify_center() - .items_center() - .text_center() - .bg(cx.theme().colors().editor_background) - .child(Label::new(label).color(Color::Muted)) - .when(self.summary.warning_count > 0, |this| { - let plural_suffix = if self.summary.warning_count > 1 { - "s" - } else { - "" - }; - let label = format!( - "Show {} warning{}", - self.summary.warning_count, plural_suffix - ); - this.child( - Button::new("diagnostics-show-warning-label", label).on_click(cx.listener( - |this, _, window, cx| { - this.toggle_warnings(&Default::default(), window, cx); - cx.notify(); - }, - )), - ) - }) - } else { - div().size_full().child(self.editor.clone()) - }; div() .key_context("Diagnostics") @@ -233,6 +234,7 @@ impl ProjectDiagnosticsEditor { } } EditorEvent::Blurred => this.update_stale_excerpts(window, cx), + EditorEvent::Saved => this.update_stale_excerpts(window, cx), _ => {} } }, @@ -277,7 +279,7 @@ impl ProjectDiagnosticsEditor { } fn update_stale_excerpts(&mut self, window: &mut Window, cx: &mut Context) { - if self.update_excerpts_task.is_some() { + if self.update_excerpts_task.is_some() || self.multibuffer.read(cx).is_dirty(cx) { return; } diff --git a/crates/diagnostics/src/items.rs b/crates/diagnostics/src/items.rs index 7ac6d101f315674cec4fd07f4ad2df0830284124..11ee4ece96d0c4646714d808037e7a2789bcdf85 100644 --- a/crates/diagnostics/src/items.rs +++ b/crates/diagnostics/src/items.rs @@ -32,49 +32,38 @@ impl Render for DiagnosticIndicator { } let diagnostic_indicator = match (self.summary.error_count, self.summary.warning_count) { - (0, 0) => h_flex().map(|this| { - this.child( - Icon::new(IconName::Check) - .size(IconSize::Small) - .color(Color::Default), - ) - }), - (0, warning_count) => h_flex() - .gap_1() - .child( - Icon::new(IconName::Warning) - .size(IconSize::Small) - .color(Color::Warning), - ) - .child(Label::new(warning_count.to_string()).size(LabelSize::Small)), - (error_count, 0) => h_flex() - .gap_1() - .child( - Icon::new(IconName::XCircle) - .size(IconSize::Small) - .color(Color::Error), - ) - .child(Label::new(error_count.to_string()).size(LabelSize::Small)), + (0, 0) => h_flex().child( + Icon::new(IconName::Check) + .size(IconSize::Small) + .color(Color::Default), + ), (error_count, warning_count) => h_flex() .gap_1() - .child( - Icon::new(IconName::XCircle) - .size(IconSize::Small) - .color(Color::Error), - ) - .child(Label::new(error_count.to_string()).size(LabelSize::Small)) - .child( - Icon::new(IconName::Warning) - .size(IconSize::Small) - .color(Color::Warning), - ) - .child(Label::new(warning_count.to_string()).size(LabelSize::Small)), + .when(error_count > 0, |this| { + this.child( + Icon::new(IconName::XCircle) + .size(IconSize::Small) + .color(Color::Error), + ) + .child(Label::new(error_count.to_string()).size(LabelSize::Small)) + }) + .when(warning_count > 0, |this| { + this.child( + Icon::new(IconName::Warning) + .size(IconSize::Small) + .color(Color::Warning), + ) + .child(Label::new(warning_count.to_string()).size(LabelSize::Small)) + }), }; let status = if let Some(diagnostic) = &self.current_diagnostic { - let message = diagnostic.message.split('\n').next().unwrap().to_string(); + let message = diagnostic + .message + .split_once('\n') + .map_or(&*diagnostic.message, |(first, _)| first); Some( - Button::new("diagnostic_message", message) + Button::new("diagnostic_message", SharedString::new(message)) .label_size(LabelSize::Small) .tooltip(|window, cx| { Tooltip::for_action( From 0870a1fe80b0724ddaae1408aa402761540131de Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Thu, 4 Sep 2025 09:01:50 -0700 Subject: [PATCH 003/109] acp: Don't share API key with Anthropic provider (#37543) Since Claude Code has it's own preferred method of grabbing API keys, we don't want to reuse this one. Release Notes: - acp: Don't share Anthropic API key from the Anthropic provider to allow default Claude Code login options --------- Co-authored-by: Agus Zubiaga --- crates/agent_servers/src/claude.rs | 15 ++--- crates/agent_ui/src/acp/thread_view.rs | 92 +++++++++++++------------- 2 files changed, 49 insertions(+), 58 deletions(-) diff --git a/crates/agent_servers/src/claude.rs b/crates/agent_servers/src/claude.rs index 194867241baf86cf7b3d3ab168318a00d64d6e25..48d3e33775d98dfe89801813c6926ff40f48ed87 100644 --- a/crates/agent_servers/src/claude.rs +++ b/crates/agent_servers/src/claude.rs @@ -1,4 +1,3 @@ -use language_models::provider::anthropic::AnthropicLanguageModelProvider; use settings::SettingsStore; use std::path::Path; use std::rc::Rc; @@ -99,16 +98,10 @@ impl AgentServer for ClaudeCode { .await? }; - if let Some(api_key) = cx - .update(AnthropicLanguageModelProvider::api_key)? - .await - .ok() - { - command - .env - .get_or_insert_default() - .insert("ANTHROPIC_API_KEY".to_owned(), api_key.key); - } + command + .env + .get_or_insert_default() + .insert("ANTHROPIC_API_KEY".to_owned(), "".to_owned()); let root_dir_exists = fs.is_dir(&root_dir).await; anyhow::ensure!( diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index 50da44e430fd684d0e91d43ee82a0ccb0117111d..3407f4e878e6452322aba1b5009b582f322db4b4 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -983,7 +983,7 @@ impl AcpThreadView { this, AuthRequired { description: None, - provider_id: Some(language_model::ANTHROPIC_PROVIDER_ID), + provider_id: None, }, agent, connection, @@ -3010,6 +3010,8 @@ impl AcpThreadView { let show_description = configuration_view.is_none() && description.is_none() && pending_auth_method.is_none(); + let auth_methods = connection.auth_methods(); + v_flex().flex_1().size_full().justify_end().child( v_flex() .p_2() @@ -3040,21 +3042,23 @@ impl AcpThreadView { .cloned() .map(|view| div().w_full().child(view)), ) - .when( - show_description, - |el| { - el.child( - Label::new(format!( - "You are not currently authenticated with {}. Please choose one of the following options:", - self.agent.name() - )) - .size(LabelSize::Small) - .color(Color::Muted) - .mb_1() - .ml_5(), - ) - }, - ) + .when(show_description, |el| { + el.child( + Label::new(format!( + "You are not currently authenticated with {}.{}", + self.agent.name(), + if auth_methods.len() > 1 { + " Please choose one of the following options:" + } else { + "" + } + )) + .size(LabelSize::Small) + .color(Color::Muted) + .mb_1() + .ml_5(), + ) + }) .when_some(pending_auth_method, |el, _| { el.child( h_flex() @@ -3066,12 +3070,12 @@ impl AcpThreadView { Icon::new(IconName::ArrowCircle) .size(IconSize::Small) .color(Color::Muted) - .with_rotate_animation(2) + .with_rotate_animation(2), ) .child(Label::new("Authenticating…").size(LabelSize::Small)), ) }) - .when(!connection.auth_methods().is_empty(), |this| { + .when(!auth_methods.is_empty(), |this| { this.child( h_flex() .justify_end() @@ -3083,38 +3087,32 @@ impl AcpThreadView { .pt_2() .border_color(cx.theme().colors().border.opacity(0.8)) }) - .children( - connection - .auth_methods() - .iter() - .enumerate() - .rev() - .map(|(ix, method)| { - Button::new( - SharedString::from(method.id.0.clone()), - method.name.clone(), - ) - .when(ix == 0, |el| { - el.style(ButtonStyle::Tinted(ui::TintColor::Warning)) - }) - .label_size(LabelSize::Small) - .on_click({ - let method_id = method.id.clone(); - cx.listener(move |this, _, window, cx| { - telemetry::event!( - "Authenticate Agent Started", - agent = this.agent.telemetry_id(), - method = method_id - ); + .children(connection.auth_methods().iter().enumerate().rev().map( + |(ix, method)| { + Button::new( + SharedString::from(method.id.0.clone()), + method.name.clone(), + ) + .when(ix == 0, |el| { + el.style(ButtonStyle::Tinted(ui::TintColor::Warning)) + }) + .label_size(LabelSize::Small) + .on_click({ + let method_id = method.id.clone(); + cx.listener(move |this, _, window, cx| { + telemetry::event!( + "Authenticate Agent Started", + agent = this.agent.telemetry_id(), + method = method_id + ); - this.authenticate(method_id.clone(), window, cx) - }) + this.authenticate(method_id.clone(), window, cx) }) - }), - ), + }) + }, + )), ) - }) - + }), ) } From 25ee9b1013fe10a04b429576006b88fb34bdcd85 Mon Sep 17 00:00:00 2001 From: Jiqing Yang <73824809+WERDXZ@users.noreply.github.com> Date: Thu, 4 Sep 2025 10:21:44 -0700 Subject: [PATCH 004/109] Fix Wayland crash on AMD GPUs by updating Blade (#37516) Updates blade-graphics from e0ec4e7 to bfa594e to fix GPU crashes on Wayland with AMD graphics cards. The crash was caused by incorrect BLAS scratch buffer alignment - the old version hardcoded 256-byte alignment, but AMD GPUs require different alignment values. The newer Blade version uses the GPU's actual alignment requirements instead of hardcoding. Closes #37448 Release Notes: - Migrate to newer version of Blade upstream --- Cargo.lock | 6 +++--- Cargo.toml | 6 +++--- crates/gpui/src/platform/blade/blade_renderer.rs | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ee80d59006f50c321e80bbe6fca9288b345524be..d31c8ecd88713f939293d022533715e39a48ed43 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2291,7 +2291,7 @@ dependencies = [ [[package]] name = "blade-graphics" version = "0.6.0" -source = "git+https://github.com/kvark/blade?rev=e0ec4e720957edd51b945b64dd85605ea54bcfe5#e0ec4e720957edd51b945b64dd85605ea54bcfe5" +source = "git+https://github.com/kvark/blade?rev=bfa594e#bfa594ea697d4b6326ea29f747525c85ecf933b9" dependencies = [ "ash", "ash-window", @@ -2324,7 +2324,7 @@ dependencies = [ [[package]] name = "blade-macros" version = "0.3.0" -source = "git+https://github.com/kvark/blade?rev=e0ec4e720957edd51b945b64dd85605ea54bcfe5#e0ec4e720957edd51b945b64dd85605ea54bcfe5" +source = "git+https://github.com/kvark/blade?rev=bfa594e#bfa594ea697d4b6326ea29f747525c85ecf933b9" dependencies = [ "proc-macro2", "quote", @@ -2334,7 +2334,7 @@ dependencies = [ [[package]] name = "blade-util" version = "0.2.0" -source = "git+https://github.com/kvark/blade?rev=e0ec4e720957edd51b945b64dd85605ea54bcfe5#e0ec4e720957edd51b945b64dd85605ea54bcfe5" +source = "git+https://github.com/kvark/blade?rev=bfa594e#bfa594ea697d4b6326ea29f747525c85ecf933b9" dependencies = [ "blade-graphics", "bytemuck", diff --git a/Cargo.toml b/Cargo.toml index 8a487b612a18dc837d3cd75697f13bf92b5b28b7..1cce7701c01bc2391c7ec7b505bd945226d7ce11 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -462,9 +462,9 @@ aws-smithy-types = { version = "1.3.0", features = ["http-body-1-x"] } base64 = "0.22" bincode = "1.2.1" bitflags = "2.6.0" -blade-graphics = { git = "https://github.com/kvark/blade", rev = "e0ec4e720957edd51b945b64dd85605ea54bcfe5" } -blade-macros = { git = "https://github.com/kvark/blade", rev = "e0ec4e720957edd51b945b64dd85605ea54bcfe5" } -blade-util = { git = "https://github.com/kvark/blade", rev = "e0ec4e720957edd51b945b64dd85605ea54bcfe5" } +blade-graphics = { git = "https://github.com/kvark/blade", rev = "bfa594e" } +blade-macros = { git = "https://github.com/kvark/blade", rev = "bfa594e" } +blade-util = { git = "https://github.com/kvark/blade", rev = "bfa594e" } blake3 = "1.5.3" bytes = "1.0" cargo_metadata = "0.19" diff --git a/crates/gpui/src/platform/blade/blade_renderer.rs b/crates/gpui/src/platform/blade/blade_renderer.rs index cc1df7748ba6b7947ab53a86baa8ab31644ac05d..1f60920bcc928c97c1f2b2c06e22ed235217c87e 100644 --- a/crates/gpui/src/platform/blade/blade_renderer.rs +++ b/crates/gpui/src/platform/blade/blade_renderer.rs @@ -371,7 +371,7 @@ impl BladeRenderer { .or_else(|| { [4, 2, 1] .into_iter() - .find(|count| context.gpu.supports_texture_sample_count(*count)) + .find(|&n| (context.gpu.capabilities().sample_count_mask & n) != 0) }) .unwrap_or(1); let pipelines = BladePipelines::new(&context.gpu, surface.info(), path_sample_count); From 6e2922367c16344c44d56181ba6f7348869501b7 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 4 Sep 2025 13:41:47 -0400 Subject: [PATCH 005/109] Use full SHA for `blade` dependency (#37554) In https://github.com/zed-industries/zed/pull/37516 we updated the `blade` dependency, but used a short SHA. No reason to not use the full SHA. Release Notes: - N/A --- Cargo.lock | 6 +++--- Cargo.toml | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d31c8ecd88713f939293d022533715e39a48ed43..50632ef0a4b8f23b9301c8525fe83235e49ff5f8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2291,7 +2291,7 @@ dependencies = [ [[package]] name = "blade-graphics" version = "0.6.0" -source = "git+https://github.com/kvark/blade?rev=bfa594e#bfa594ea697d4b6326ea29f747525c85ecf933b9" +source = "git+https://github.com/kvark/blade?rev=bfa594ea697d4b6326ea29f747525c85ecf933b9#bfa594ea697d4b6326ea29f747525c85ecf933b9" dependencies = [ "ash", "ash-window", @@ -2324,7 +2324,7 @@ dependencies = [ [[package]] name = "blade-macros" version = "0.3.0" -source = "git+https://github.com/kvark/blade?rev=bfa594e#bfa594ea697d4b6326ea29f747525c85ecf933b9" +source = "git+https://github.com/kvark/blade?rev=bfa594ea697d4b6326ea29f747525c85ecf933b9#bfa594ea697d4b6326ea29f747525c85ecf933b9" dependencies = [ "proc-macro2", "quote", @@ -2334,7 +2334,7 @@ dependencies = [ [[package]] name = "blade-util" version = "0.2.0" -source = "git+https://github.com/kvark/blade?rev=bfa594e#bfa594ea697d4b6326ea29f747525c85ecf933b9" +source = "git+https://github.com/kvark/blade?rev=bfa594ea697d4b6326ea29f747525c85ecf933b9#bfa594ea697d4b6326ea29f747525c85ecf933b9" dependencies = [ "blade-graphics", "bytemuck", diff --git a/Cargo.toml b/Cargo.toml index 1cce7701c01bc2391c7ec7b505bd945226d7ce11..6c9ca3b4a6e636adc32a6e1f48386bd240055c12 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -462,9 +462,9 @@ aws-smithy-types = { version = "1.3.0", features = ["http-body-1-x"] } base64 = "0.22" bincode = "1.2.1" bitflags = "2.6.0" -blade-graphics = { git = "https://github.com/kvark/blade", rev = "bfa594e" } -blade-macros = { git = "https://github.com/kvark/blade", rev = "bfa594e" } -blade-util = { git = "https://github.com/kvark/blade", rev = "bfa594e" } +blade-graphics = { git = "https://github.com/kvark/blade", rev = "bfa594ea697d4b6326ea29f747525c85ecf933b9" } +blade-macros = { git = "https://github.com/kvark/blade", rev = "bfa594ea697d4b6326ea29f747525c85ecf933b9" } +blade-util = { git = "https://github.com/kvark/blade", rev = "bfa594ea697d4b6326ea29f747525c85ecf933b9" } blake3 = "1.5.3" bytes = "1.0" cargo_metadata = "0.19" From caebd0cc4ddfd4a0838378d8ace632a92f682328 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Thu, 4 Sep 2025 13:55:20 -0400 Subject: [PATCH 006/109] debugger: Fix stack frame filter crash (#37555) The crash was caused by not accounting for the fact that a range of collapse frames only counts as one entry. Causing the filter indices to overshoot for indices after collapse frames (it was counting all collapse frames instead of just one). The test missed this because it all happened in one `cx.update` closure and didn't render the stack frame list when the filter was applied. The test has been updated to account for this. Release Notes: - N/A Co-authored-by: Cole Miller --- .../src/session/running/stack_frame_list.rs | 60 ++++-- .../debugger_ui/src/tests/stack_frame_list.rs | 194 ++++++++++++------ 2 files changed, 165 insertions(+), 89 deletions(-) diff --git a/crates/debugger_ui/src/session/running/stack_frame_list.rs b/crates/debugger_ui/src/session/running/stack_frame_list.rs index f80173c365a047da39733c94964c473bef579e1c..e51b8da362a581c96d2872a213a8be32ff31b097 100644 --- a/crates/debugger_ui/src/session/running/stack_frame_list.rs +++ b/crates/debugger_ui/src/session/running/stack_frame_list.rs @@ -28,8 +28,8 @@ pub enum StackFrameListEvent { } /// Represents the filter applied to the stack frame list -#[derive(PartialEq, Eq, Copy, Clone)] -enum StackFrameFilter { +#[derive(PartialEq, Eq, Copy, Clone, Debug)] +pub(crate) enum StackFrameFilter { /// Show all frames All, /// Show only frames from the user's code @@ -174,19 +174,29 @@ impl StackFrameList { #[cfg(test)] pub(crate) fn dap_stack_frames(&self, cx: &mut App) -> Vec { - self.stack_frames(cx) - .unwrap_or_default() - .into_iter() - .enumerate() - .filter(|(ix, _)| { - self.list_filter == StackFrameFilter::All - || self - .filter_entries_indices - .binary_search_by_key(&ix, |ix| ix) - .is_ok() - }) - .map(|(_, stack_frame)| stack_frame.dap) - .collect() + match self.list_filter { + StackFrameFilter::All => self + .stack_frames(cx) + .unwrap_or_default() + .into_iter() + .map(|stack_frame| stack_frame.dap) + .collect(), + StackFrameFilter::OnlyUserFrames => self + .filter_entries_indices + .iter() + .map(|ix| match &self.entries[*ix] { + StackFrameEntry::Label(label) => label, + StackFrameEntry::Collapsed(_) => panic!("Collapsed tabs should not be visible"), + StackFrameEntry::Normal(frame) => frame, + }) + .cloned() + .collect(), + } + } + + #[cfg(test)] + pub(crate) fn list_filter(&self) -> StackFrameFilter { + self.list_filter } pub fn opened_stack_frame_id(&self) -> Option { @@ -246,6 +256,7 @@ impl StackFrameList { self.entries.clear(); self.selected_ix = None; self.list_state.reset(0); + self.filter_entries_indices.clear(); cx.emit(StackFrameListEvent::BuiltEntries); cx.notify(); return; @@ -263,7 +274,7 @@ impl StackFrameList { .unwrap_or_default(); let mut filter_entries_indices = Vec::default(); - for (ix, stack_frame) in stack_frames.iter().enumerate() { + for stack_frame in stack_frames.iter() { let frame_in_visible_worktree = stack_frame.dap.source.as_ref().is_some_and(|source| { source.path.as_ref().is_some_and(|path| { worktree_prefixes @@ -273,10 +284,6 @@ impl StackFrameList { }) }); - if frame_in_visible_worktree { - filter_entries_indices.push(ix); - } - match stack_frame.dap.presentation_hint { Some(dap::StackFramePresentationHint::Deemphasize) | Some(dap::StackFramePresentationHint::Subtle) => { @@ -302,6 +309,9 @@ impl StackFrameList { first_stack_frame_with_path.get_or_insert(entries.len()); } entries.push(StackFrameEntry::Normal(stack_frame.dap.clone())); + if frame_in_visible_worktree { + filter_entries_indices.push(entries.len() - 1); + } } } } @@ -309,7 +319,6 @@ impl StackFrameList { let collapsed_entries = std::mem::take(&mut collapsed_entries); if !collapsed_entries.is_empty() { entries.push(StackFrameEntry::Collapsed(collapsed_entries)); - self.filter_entries_indices.push(entries.len() - 1); } self.entries = entries; self.filter_entries_indices = filter_entries_indices; @@ -612,7 +621,16 @@ impl StackFrameList { let entries = std::mem::take(stack_frames) .into_iter() .map(StackFrameEntry::Normal); + // HERE + let entries_len = entries.len(); self.entries.splice(ix..ix + 1, entries); + let (Ok(filtered_indices_start) | Err(filtered_indices_start)) = + self.filter_entries_indices.binary_search(&ix); + + for idx in &mut self.filter_entries_indices[filtered_indices_start..] { + *idx += entries_len - 1; + } + self.selected_ix = Some(ix); self.list_state.reset(self.entries.len()); cx.emit(StackFrameListEvent::BuiltEntries); diff --git a/crates/debugger_ui/src/tests/stack_frame_list.rs b/crates/debugger_ui/src/tests/stack_frame_list.rs index 023056224e177bb053f5188ced59c059c9c8ad32..a61a31d270c9d599f30185d7da3c825c51bb7898 100644 --- a/crates/debugger_ui/src/tests/stack_frame_list.rs +++ b/crates/debugger_ui/src/tests/stack_frame_list.rs @@ -1,6 +1,6 @@ use crate::{ debugger_panel::DebugPanel, - session::running::stack_frame_list::StackFrameEntry, + session::running::stack_frame_list::{StackFrameEntry, StackFrameFilter}, tests::{active_debug_session_panel, init_test, init_test_workspace, start_debug_session}, }; use dap::{ @@ -867,6 +867,28 @@ async fn test_stack_frame_filter(executor: BackgroundExecutor, cx: &mut TestAppC }, StackFrame { id: 4, + name: "node:internal/modules/run_main2".into(), + source: Some(dap::Source { + name: Some("run_main.js".into()), + path: Some(path!("/usr/lib/node/internal/modules/run_main2.js").into()), + source_reference: None, + presentation_hint: None, + origin: None, + sources: None, + adapter_data: None, + checksums: None, + }), + line: 50, + column: 1, + end_line: None, + end_column: None, + can_restart: None, + instruction_pointer_reference: None, + module_id: None, + presentation_hint: Some(dap::StackFramePresentationHint::Deemphasize), + }, + StackFrame { + id: 5, name: "doSomething".into(), source: Some(dap::Source { name: Some("test.js".into()), @@ -957,83 +979,119 @@ async fn test_stack_frame_filter(executor: BackgroundExecutor, cx: &mut TestAppC cx.run_until_parked(); - active_debug_session_panel(workspace, cx).update_in(cx, |debug_panel_item, window, cx| { - let stack_frame_list = debug_panel_item - .running_state() - .update(cx, |state, _| state.stack_frame_list().clone()); + let stack_frame_list = + active_debug_session_panel(workspace, cx).update_in(cx, |debug_panel_item, window, cx| { + let stack_frame_list = debug_panel_item + .running_state() + .update(cx, |state, _| state.stack_frame_list().clone()); + + stack_frame_list.update(cx, |stack_frame_list, cx| { + stack_frame_list.build_entries(true, window, cx); + + // Verify we have the expected collapsed structure + assert_eq!( + stack_frame_list.entries(), + &vec![ + StackFrameEntry::Normal(stack_frames_for_assertions[0].clone()), + StackFrameEntry::Collapsed(vec![ + stack_frames_for_assertions[1].clone(), + stack_frames_for_assertions[2].clone(), + stack_frames_for_assertions[3].clone() + ]), + StackFrameEntry::Normal(stack_frames_for_assertions[4].clone()), + ] + ); + }); - stack_frame_list.update(cx, |stack_frame_list, cx| { - stack_frame_list.build_entries(true, window, cx); + stack_frame_list + }); - // Verify we have the expected collapsed structure - assert_eq!( - stack_frame_list.entries(), - &vec![ - StackFrameEntry::Normal(stack_frames_for_assertions[0].clone()), - StackFrameEntry::Collapsed(vec![ - stack_frames_for_assertions[1].clone(), - stack_frames_for_assertions[2].clone() - ]), - StackFrameEntry::Normal(stack_frames_for_assertions[3].clone()), - ] - ); + stack_frame_list.update(cx, |stack_frame_list, cx| { + let all_frames = stack_frame_list.flatten_entries(true, false); + assert_eq!(all_frames.len(), 5, "Should see all 5 frames initially"); - // Test 1: Verify filtering works - let all_frames = stack_frame_list.flatten_entries(true, false); - assert_eq!(all_frames.len(), 4, "Should see all 4 frames initially"); + stack_frame_list + .toggle_frame_filter(Some(project::debugger::session::ThreadStatus::Stopped), cx); + assert_eq!( + stack_frame_list.list_filter(), + StackFrameFilter::OnlyUserFrames + ); + }); - // Toggle to user frames only - stack_frame_list - .toggle_frame_filter(Some(project::debugger::session::ThreadStatus::Stopped), cx); + stack_frame_list.update(cx, |stack_frame_list, cx| { + let user_frames = stack_frame_list.dap_stack_frames(cx); + assert_eq!(user_frames.len(), 2, "Should only see 2 user frames"); + assert_eq!(user_frames[0].name, "main"); + assert_eq!(user_frames[1].name, "doSomething"); + + // Toggle back to all frames + stack_frame_list + .toggle_frame_filter(Some(project::debugger::session::ThreadStatus::Stopped), cx); + assert_eq!(stack_frame_list.list_filter(), StackFrameFilter::All); + }); - let user_frames = stack_frame_list.dap_stack_frames(cx); - assert_eq!(user_frames.len(), 2, "Should only see 2 user frames"); - assert_eq!(user_frames[0].name, "main"); - assert_eq!(user_frames[1].name, "doSomething"); + stack_frame_list.update(cx, |stack_frame_list, cx| { + let all_frames_again = stack_frame_list.flatten_entries(true, false); + assert_eq!( + all_frames_again.len(), + 5, + "Should see all 5 frames after toggling back" + ); - // Test 2: Verify filtering toggles correctly - // Check we can toggle back and see all frames again + // Test 3: Verify collapsed entries stay expanded + stack_frame_list.expand_collapsed_entry(1, cx); + assert_eq!( + stack_frame_list.entries(), + &vec![ + StackFrameEntry::Normal(stack_frames_for_assertions[0].clone()), + StackFrameEntry::Normal(stack_frames_for_assertions[1].clone()), + StackFrameEntry::Normal(stack_frames_for_assertions[2].clone()), + StackFrameEntry::Normal(stack_frames_for_assertions[3].clone()), + StackFrameEntry::Normal(stack_frames_for_assertions[4].clone()), + ] + ); - // Toggle back to all frames - stack_frame_list - .toggle_frame_filter(Some(project::debugger::session::ThreadStatus::Stopped), cx); + stack_frame_list + .toggle_frame_filter(Some(project::debugger::session::ThreadStatus::Stopped), cx); + assert_eq!( + stack_frame_list.list_filter(), + StackFrameFilter::OnlyUserFrames + ); + }); - let all_frames_again = stack_frame_list.flatten_entries(true, false); - assert_eq!( - all_frames_again.len(), - 4, - "Should see all 4 frames after toggling back" - ); + stack_frame_list.update(cx, |stack_frame_list, cx| { + stack_frame_list + .toggle_frame_filter(Some(project::debugger::session::ThreadStatus::Stopped), cx); + assert_eq!(stack_frame_list.list_filter(), StackFrameFilter::All); + }); - // Test 3: Verify collapsed entries stay expanded - stack_frame_list.expand_collapsed_entry(1, cx); - assert_eq!( - stack_frame_list.entries(), - &vec![ - StackFrameEntry::Normal(stack_frames_for_assertions[0].clone()), - StackFrameEntry::Normal(stack_frames_for_assertions[1].clone()), - StackFrameEntry::Normal(stack_frames_for_assertions[2].clone()), - StackFrameEntry::Normal(stack_frames_for_assertions[3].clone()), - ] - ); + stack_frame_list.update(cx, |stack_frame_list, cx| { + stack_frame_list + .toggle_frame_filter(Some(project::debugger::session::ThreadStatus::Stopped), cx); + assert_eq!( + stack_frame_list.list_filter(), + StackFrameFilter::OnlyUserFrames + ); - // Toggle filter twice - stack_frame_list - .toggle_frame_filter(Some(project::debugger::session::ThreadStatus::Stopped), cx); - stack_frame_list - .toggle_frame_filter(Some(project::debugger::session::ThreadStatus::Stopped), cx); + assert_eq!( + stack_frame_list.dap_stack_frames(cx).as_slice(), + &[ + stack_frames_for_assertions[0].clone(), + stack_frames_for_assertions[4].clone() + ] + ); - // Verify entries remain expanded - assert_eq!( - stack_frame_list.entries(), - &vec![ - StackFrameEntry::Normal(stack_frames_for_assertions[0].clone()), - StackFrameEntry::Normal(stack_frames_for_assertions[1].clone()), - StackFrameEntry::Normal(stack_frames_for_assertions[2].clone()), - StackFrameEntry::Normal(stack_frames_for_assertions[3].clone()), - ], - "Expanded entries should remain expanded after toggling filter" - ); - }); + // Verify entries remain expanded + assert_eq!( + stack_frame_list.entries(), + &vec![ + StackFrameEntry::Normal(stack_frames_for_assertions[0].clone()), + StackFrameEntry::Normal(stack_frames_for_assertions[1].clone()), + StackFrameEntry::Normal(stack_frames_for_assertions[2].clone()), + StackFrameEntry::Normal(stack_frames_for_assertions[3].clone()), + StackFrameEntry::Normal(stack_frames_for_assertions[4].clone()), + ], + "Expanded entries should remain expanded after toggling filter" + ); }); } From 9e111054837ef27e3a95709867215e54d3e52ac6 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 4 Sep 2025 14:07:50 -0400 Subject: [PATCH 007/109] toml: Extract to zed-extensions/toml repository (#37558) This PR extracts the TOML extension to the [zed-extensions/toml](https://github.com/zed-extensions/toml) repository. Release Notes: - N/A --- .config/hakari.toml | 1 - Cargo.lock | 7 - Cargo.toml | 1 - docs/src/languages/toml.md | 2 +- extensions/toml/Cargo.toml | 16 -- extensions/toml/LICENSE-APACHE | 1 - extensions/toml/extension.toml | 18 --- extensions/toml/languages/toml/brackets.scm | 3 - extensions/toml/languages/toml/config.toml | 11 -- extensions/toml/languages/toml/highlights.scm | 38 ----- extensions/toml/languages/toml/indents.scm | 0 extensions/toml/languages/toml/outline.scm | 15 -- extensions/toml/languages/toml/overrides.scm | 2 - extensions/toml/languages/toml/redactions.scm | 1 - .../toml/languages/toml/textobjects.scm | 6 - extensions/toml/src/toml.rs | 152 ------------------ 16 files changed, 1 insertion(+), 273 deletions(-) delete mode 100644 extensions/toml/Cargo.toml delete mode 120000 extensions/toml/LICENSE-APACHE delete mode 100644 extensions/toml/extension.toml delete mode 100644 extensions/toml/languages/toml/brackets.scm delete mode 100644 extensions/toml/languages/toml/config.toml delete mode 100644 extensions/toml/languages/toml/highlights.scm delete mode 100644 extensions/toml/languages/toml/indents.scm delete mode 100644 extensions/toml/languages/toml/outline.scm delete mode 100644 extensions/toml/languages/toml/overrides.scm delete mode 100644 extensions/toml/languages/toml/redactions.scm delete mode 100644 extensions/toml/languages/toml/textobjects.scm delete mode 100644 extensions/toml/src/toml.rs diff --git a/.config/hakari.toml b/.config/hakari.toml index 8ce0b77490482ab5ff2d781fb78fd86b56959a6a..e8f094e618b39138df95bbdb58e5800cd396fad5 100644 --- a/.config/hakari.toml +++ b/.config/hakari.toml @@ -41,5 +41,4 @@ workspace-members = [ "slash_commands_example", "zed_snippets", "zed_test_extension", - "zed_toml", ] diff --git a/Cargo.lock b/Cargo.lock index 50632ef0a4b8f23b9301c8525fe83235e49ff5f8..1a15d11e664f808d4ba68d61a3ad7a6c557c2420 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -20653,13 +20653,6 @@ dependencies = [ "zed_extension_api 0.6.0", ] -[[package]] -name = "zed_toml" -version = "0.1.4" -dependencies = [ - "zed_extension_api 0.1.0", -] - [[package]] name = "zeno" version = "0.3.2" diff --git a/Cargo.toml b/Cargo.toml index 6c9ca3b4a6e636adc32a6e1f48386bd240055c12..f389153efe9d0719187d14bb554042fcf2888376 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -211,7 +211,6 @@ members = [ "extensions/slash-commands-example", "extensions/snippets", "extensions/test-extension", - "extensions/toml", # # Tooling diff --git a/docs/src/languages/toml.md b/docs/src/languages/toml.md index eb51dbb93bf3031449744ccd4617992f46d31351..40a6b880fccce87c20a61029418490021719fb98 100644 --- a/docs/src/languages/toml.md +++ b/docs/src/languages/toml.md @@ -1,6 +1,6 @@ # TOML -TOML support is available through the [TOML extension](https://github.com/zed-industries/zed/tree/main/extensions/toml). +TOML support is available through the [TOML extension](https://github.com/zed-extensions/toml). - Tree-sitter: [tree-sitter/tree-sitter-toml](https://github.com/tree-sitter/tree-sitter-toml) - Language Server: [tamasfe/taplo](https://github.com/tamasfe/taplo) diff --git a/extensions/toml/Cargo.toml b/extensions/toml/Cargo.toml deleted file mode 100644 index 25c2c418084dc89fe4c402c1abe13d5535bf6447..0000000000000000000000000000000000000000 --- a/extensions/toml/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "zed_toml" -version = "0.1.4" -edition.workspace = true -publish.workspace = true -license = "Apache-2.0" - -[lints] -workspace = true - -[lib] -path = "src/toml.rs" -crate-type = ["cdylib"] - -[dependencies] -zed_extension_api = "0.1.0" diff --git a/extensions/toml/LICENSE-APACHE b/extensions/toml/LICENSE-APACHE deleted file mode 120000 index 1cd601d0a3affae83854be02a0afdec3b7a9ec4d..0000000000000000000000000000000000000000 --- a/extensions/toml/LICENSE-APACHE +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-APACHE \ No newline at end of file diff --git a/extensions/toml/extension.toml b/extensions/toml/extension.toml deleted file mode 100644 index 5be7213c40362ec4bbeba8cb0846a507d9ec9e7e..0000000000000000000000000000000000000000 --- a/extensions/toml/extension.toml +++ /dev/null @@ -1,18 +0,0 @@ -id = "toml" -name = "TOML" -description = "TOML support." -version = "0.1.4" -schema_version = 1 -authors = [ - "Max Brunsfeld ", - "Ammar Arif " -] -repository = "https://github.com/zed-industries/zed" - -[language_servers.taplo] -name = "Taplo" -language = "TOML" - -[grammars.toml] -repository = "https://github.com/tree-sitter/tree-sitter-toml" -commit = "342d9be207c2dba869b9967124c679b5e6fd0ebe" diff --git a/extensions/toml/languages/toml/brackets.scm b/extensions/toml/languages/toml/brackets.scm deleted file mode 100644 index 9e8c9cd93c30f7697ead2161295b4583ffdfb93b..0000000000000000000000000000000000000000 --- a/extensions/toml/languages/toml/brackets.scm +++ /dev/null @@ -1,3 +0,0 @@ -("[" @open "]" @close) -("{" @open "}" @close) -("\"" @open "\"" @close) diff --git a/extensions/toml/languages/toml/config.toml b/extensions/toml/languages/toml/config.toml deleted file mode 100644 index f62290d9e9244603eaa22dc98297f84f694635e4..0000000000000000000000000000000000000000 --- a/extensions/toml/languages/toml/config.toml +++ /dev/null @@ -1,11 +0,0 @@ -name = "TOML" -grammar = "toml" -path_suffixes = ["Cargo.lock", "toml", "Pipfile", "uv.lock"] -line_comments = ["# "] -autoclose_before = ",]}" -brackets = [ - { start = "{", end = "}", close = true, newline = true }, - { start = "[", end = "]", close = true, newline = true }, - { start = "\"", end = "\"", close = true, newline = false, not_in = ["comment", "string"] }, - { start = "'", end = "'", close = true, newline = false, not_in = ["comment", "string"] }, -] diff --git a/extensions/toml/languages/toml/highlights.scm b/extensions/toml/languages/toml/highlights.scm deleted file mode 100644 index 4be265cce74b3d8916e96f428550ea405db915e0..0000000000000000000000000000000000000000 --- a/extensions/toml/languages/toml/highlights.scm +++ /dev/null @@ -1,38 +0,0 @@ -; Properties -;----------- - -(bare_key) @property -(quoted_key) @property - -; Literals -;--------- - -(boolean) @constant -(comment) @comment -(integer) @number -(float) @number -(string) @string -(escape_sequence) @string.escape -(offset_date_time) @string.special -(local_date_time) @string.special -(local_date) @string.special -(local_time) @string.special - -; Punctuation -;------------ - -[ - "." - "," -] @punctuation.delimiter - -"=" @operator - -[ - "[" - "]" - "[[" - "]]" - "{" - "}" -] @punctuation.bracket diff --git a/extensions/toml/languages/toml/indents.scm b/extensions/toml/languages/toml/indents.scm deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/extensions/toml/languages/toml/outline.scm b/extensions/toml/languages/toml/outline.scm deleted file mode 100644 index 0b3794962835a6c993e212aef5607bc859196fe9..0000000000000000000000000000000000000000 --- a/extensions/toml/languages/toml/outline.scm +++ /dev/null @@ -1,15 +0,0 @@ -(table - . - "[" - . - (_) @name) @item - -(table_array_element - . - "[[" - . - (_) @name) @item - -(pair - . - (_) @name) @item diff --git a/extensions/toml/languages/toml/overrides.scm b/extensions/toml/languages/toml/overrides.scm deleted file mode 100644 index 81fec9a5f57b28fc67b4781ec37df43559e21dc9..0000000000000000000000000000000000000000 --- a/extensions/toml/languages/toml/overrides.scm +++ /dev/null @@ -1,2 +0,0 @@ -(comment) @comment.inclusive -(string) @string diff --git a/extensions/toml/languages/toml/redactions.scm b/extensions/toml/languages/toml/redactions.scm deleted file mode 100644 index a906e9ac7b3e6561937ec7642e851a71fa2e3fec..0000000000000000000000000000000000000000 --- a/extensions/toml/languages/toml/redactions.scm +++ /dev/null @@ -1 +0,0 @@ -(pair (bare_key) "=" (_) @redact) diff --git a/extensions/toml/languages/toml/textobjects.scm b/extensions/toml/languages/toml/textobjects.scm deleted file mode 100644 index f5b4856e27a76a90d577f54fdd6104ec6bce795f..0000000000000000000000000000000000000000 --- a/extensions/toml/languages/toml/textobjects.scm +++ /dev/null @@ -1,6 +0,0 @@ -(comment)+ @comment -(table "[" (_) "]" - (_)* @class.inside) @class.around - -(table_array_element "[[" (_) "]]" - (_)* @class.inside) @class.around diff --git a/extensions/toml/src/toml.rs b/extensions/toml/src/toml.rs deleted file mode 100644 index c9b96aecacd17d192fad9b6801973c2f2389cf98..0000000000000000000000000000000000000000 --- a/extensions/toml/src/toml.rs +++ /dev/null @@ -1,152 +0,0 @@ -use std::fs; -use zed::LanguageServerId; -use zed_extension_api::settings::LspSettings; -use zed_extension_api::{self as zed, Result}; - -struct TaploBinary { - path: String, - args: Option>, -} - -struct TomlExtension { - cached_binary_path: Option, -} - -impl TomlExtension { - fn language_server_binary( - &mut self, - language_server_id: &LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - let binary_settings = LspSettings::for_worktree("taplo", worktree) - .ok() - .and_then(|lsp_settings| lsp_settings.binary); - let binary_args = binary_settings - .as_ref() - .and_then(|binary_settings| binary_settings.arguments.clone()); - - if let Some(path) = binary_settings.and_then(|binary_settings| binary_settings.path) { - return Ok(TaploBinary { - path, - args: binary_args, - }); - } - - if let Some(path) = worktree.which("taplo") { - return Ok(TaploBinary { - path, - args: binary_args, - }); - } - - if let Some(path) = &self.cached_binary_path - && fs::metadata(path).is_ok_and(|stat| stat.is_file()) - { - return Ok(TaploBinary { - path: path.clone(), - args: binary_args, - }); - } - - zed::set_language_server_installation_status( - language_server_id, - &zed::LanguageServerInstallationStatus::CheckingForUpdate, - ); - let release = zed::latest_github_release( - "tamasfe/taplo", - zed::GithubReleaseOptions { - require_assets: true, - pre_release: false, - }, - )?; - - let (platform, arch) = zed::current_platform(); - let asset_name = format!( - "taplo-{os}-{arch}.gz", - arch = match arch { - zed::Architecture::Aarch64 => "aarch64", - zed::Architecture::X86 => "x86", - zed::Architecture::X8664 => "x86_64", - }, - os = match platform { - zed::Os::Mac => "darwin", - zed::Os::Linux => "linux", - zed::Os::Windows => "windows", - }, - ); - - let asset = release - .assets - .iter() - .find(|asset| asset.name == asset_name) - .ok_or_else(|| format!("no asset found matching {:?}", asset_name))?; - - let version_dir = format!("taplo-{}", release.version); - fs::create_dir_all(&version_dir) - .map_err(|err| format!("failed to create directory '{version_dir}': {err}"))?; - - let binary_path = format!( - "{version_dir}/{bin_name}", - bin_name = match platform { - zed::Os::Windows => "taplo.exe", - zed::Os::Mac | zed::Os::Linux => "taplo", - } - ); - - if !fs::metadata(&binary_path).is_ok_and(|stat| stat.is_file()) { - zed::set_language_server_installation_status( - language_server_id, - &zed::LanguageServerInstallationStatus::Downloading, - ); - - zed::download_file( - &asset.download_url, - &binary_path, - zed::DownloadedFileType::Gzip, - ) - .map_err(|err| format!("failed to download file: {err}"))?; - - zed::make_file_executable(&binary_path)?; - - let entries = fs::read_dir(".") - .map_err(|err| format!("failed to list working directory {err}"))?; - for entry in entries { - let entry = entry.map_err(|err| format!("failed to load directory entry {err}"))?; - if entry.file_name().to_str() != Some(&version_dir) { - fs::remove_dir_all(entry.path()).ok(); - } - } - } - - self.cached_binary_path = Some(binary_path.clone()); - Ok(TaploBinary { - path: binary_path, - args: binary_args, - }) - } -} - -impl zed::Extension for TomlExtension { - fn new() -> Self { - Self { - cached_binary_path: None, - } - } - - fn language_server_command( - &mut self, - language_server_id: &LanguageServerId, - worktree: &zed::Worktree, - ) -> Result { - let taplo_binary = self.language_server_binary(language_server_id, worktree)?; - Ok(zed::Command { - command: taplo_binary.path, - args: taplo_binary - .args - .unwrap_or_else(|| vec!["lsp".to_string(), "stdio".to_string()]), - env: Default::default(), - }) - } -} - -zed::register_extension!(TomlExtension); From 9d943589713f20767a355ba6f0d108d59bc31482 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Thu, 4 Sep 2025 14:33:56 -0400 Subject: [PATCH 008/109] acp: Keep diff editors in sync with `AgentFontSize` global (#37559) Release Notes: - agent: Fixed `cmd-+` and `cmd--` not affecting the font size of diffs. --- crates/agent_ui/src/acp/entry_view_state.rs | 2 +- crates/agent_ui/src/acp/thread_view.rs | 11 ++++++----- crates/theme/src/settings.rs | 3 ++- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/crates/agent_ui/src/acp/entry_view_state.rs b/crates/agent_ui/src/acp/entry_view_state.rs index e60b923ca78c4613e9b8d8063a280f560d788d44..ec57ea7e6df3244b6ea1bcb99212d845fa68c457 100644 --- a/crates/agent_ui/src/acp/entry_view_state.rs +++ b/crates/agent_ui/src/acp/entry_view_state.rs @@ -207,7 +207,7 @@ impl EntryViewState { self.entries.drain(range); } - pub fn settings_changed(&mut self, cx: &mut App) { + pub fn agent_font_size_changed(&mut self, cx: &mut App) { for entry in self.entries.iter() { match entry { Entry::UserMessage { .. } | Entry::AssistantMessage { .. } => {} diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index 3407f4e878e6452322aba1b5009b582f322db4b4..b4d56ad05be1a66e9740c2432a9bd08b1adfee0e 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -43,7 +43,7 @@ use std::{collections::BTreeMap, rc::Rc, time::Duration}; use task::SpawnInTerminal; use terminal_view::terminal_panel::TerminalPanel; use text::Anchor; -use theme::ThemeSettings; +use theme::{AgentFontSize, ThemeSettings}; use ui::{ Callout, CommonAnimationExt, Disclosure, Divider, DividerColor, ElevationIndex, KeyBinding, PopoverMenuHandle, Scrollbar, ScrollbarState, SpinnerLabel, TintColor, Tooltip, prelude::*, @@ -290,7 +290,7 @@ pub struct AcpThreadView { is_loading_contents: bool, new_server_version_available: Option, _cancel_task: Option>, - _subscriptions: [Subscription; 3], + _subscriptions: [Subscription; 4], } enum ThreadState { @@ -380,7 +380,8 @@ impl AcpThreadView { }); let subscriptions = [ - cx.observe_global_in::(window, Self::settings_changed), + cx.observe_global_in::(window, Self::agent_font_size_changed), + cx.observe_global_in::(window, Self::agent_font_size_changed), cx.subscribe_in(&message_editor, window, Self::handle_message_editor_event), cx.subscribe_in(&entry_view_state, window, Self::handle_entry_view_event), ]; @@ -4735,9 +4736,9 @@ impl AcpThreadView { ) } - fn settings_changed(&mut self, _window: &mut Window, cx: &mut Context) { + fn agent_font_size_changed(&mut self, _window: &mut Window, cx: &mut Context) { self.entry_view_state.update(cx, |entry_view_state, cx| { - entry_view_state.settings_changed(cx); + entry_view_state.agent_font_size_changed(cx); }); } diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index 11db22d97485f5d400abdd8638da501abd55a192..825176a2a0b5e35c60606d0922cef37fe91caea7 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -253,8 +253,9 @@ pub(crate) struct UiFontSize(Pixels); impl Global for UiFontSize {} +/// In-memory override for the font size in the agent panel. #[derive(Default)] -pub(crate) struct AgentFontSize(Pixels); +pub struct AgentFontSize(Pixels); impl Global for AgentFontSize {} From a85946eba8cd2791a716f90b49df71c81002c3d3 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 4 Sep 2025 14:54:32 -0400 Subject: [PATCH 009/109] docs: Update TOML docs (#37561) This PR updates the TOML docs to remove references to Taplo and suggest the Tombi extension for users wanting language server support. Relates to https://github.com/zed-industries/zed/issues/36766. Release Notes: - N/A --- docs/src/languages/toml.md | 19 ++----------------- 1 file changed, 2 insertions(+), 17 deletions(-) diff --git a/docs/src/languages/toml.md b/docs/src/languages/toml.md index 40a6b880fccce87c20a61029418490021719fb98..46b93b67eb4ba85dea0c297adbfe1a261b6a22dc 100644 --- a/docs/src/languages/toml.md +++ b/docs/src/languages/toml.md @@ -1,22 +1,7 @@ # TOML -TOML support is available through the [TOML extension](https://github.com/zed-extensions/toml). +TOML support is available through the [TOML extension](https://zed.dev/extensions/toml). - Tree-sitter: [tree-sitter/tree-sitter-toml](https://github.com/tree-sitter/tree-sitter-toml) -- Language Server: [tamasfe/taplo](https://github.com/tamasfe/taplo) -## Configuration - -You can control the behavior of the Taplo TOML language server by adding a `.taplo.toml` file to the root of your project. See the [Taplo Configuration File](https://taplo.tamasfe.dev/configuration/file.html#configuration-file) and [Taplo Formatter Options](https://taplo.tamasfe.dev/configuration/formatter-options.html) documentation for more. - -```toml -# .taplo.toml -[formatting] -align_comments = false -reorder_keys = true - -include = ["Cargo.toml", "some_directory/**/*.toml"] -# exclude = ["vendor/**/*.toml"] -``` - -Note: The taplo language server will not automatically pickup changes to `.taplo.toml`. You must manually trigger {#action editor::RestartLanguageServer} or reload Zed for it to pickup changes. +A TOML language server is available in the [Tombi extension](https://zed.dev/extensions/tombi). From 223fda2fe221e5c6b3bd90b61b8e1f444203a6f6 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 4 Sep 2025 22:05:21 +0300 Subject: [PATCH 010/109] Make remote projects to sync in local user settings (#37560) Closes https://github.com/zed-industries/zed/issues/20024 Closes https://github.com/zed-industries/zed/issues/23489 https://github.com/user-attachments/assets/6466e0c1-4188-4980-8bb6-52ef6e7591c9 Release Notes: - Made remote projects to sync in local user settings --- crates/project/src/project.rs | 9 ++- crates/project/src/project_settings.rs | 55 +++++++++++++++++-- crates/proto/proto/worktree.proto | 5 ++ crates/proto/proto/zed.proto | 4 +- crates/proto/src/proto.rs | 2 + .../remote_server/src/remote_editing_tests.rs | 6 +- crates/remote_server/src/unix.rs | 48 ++++++++-------- crates/settings/src/settings_store.rs | 21 +++---- 8 files changed, 105 insertions(+), 45 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 46dd3b7d9e51aa06aa45b9cccb87533f2b90f58c..4adebabc5a03636ca81fbc3b04a277c2d6d03a66 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1271,6 +1271,7 @@ impl Project { fs.clone(), worktree_store.clone(), task_store.clone(), + Some(remote_proto.clone()), cx, ) }); @@ -1521,7 +1522,13 @@ impl Project { })?; let settings_observer = cx.new(|cx| { - SettingsObserver::new_remote(fs.clone(), worktree_store.clone(), task_store.clone(), cx) + SettingsObserver::new_remote( + fs.clone(), + worktree_store.clone(), + task_store.clone(), + None, + cx, + ) })?; let git_store = cx.new(|cx| { diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index c98065116e00fd6c643a2c809cf6e8fb1c51532b..57969ec9938602b477293aa3033a31bc8b3deae1 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -4,7 +4,7 @@ use context_server::ContextServerCommand; use dap::adapters::DebugAdapterName; use fs::Fs; use futures::StreamExt as _; -use gpui::{App, AsyncApp, BorrowAppContext, Context, Entity, EventEmitter, Task}; +use gpui::{App, AsyncApp, BorrowAppContext, Context, Entity, EventEmitter, Subscription, Task}; use lsp::LanguageServerName; use paths::{ EDITORCONFIG_NAME, local_debug_file_relative_path, local_settings_file_relative_path, @@ -13,7 +13,7 @@ use paths::{ }; use rpc::{ AnyProtoClient, TypedEnvelope, - proto::{self, FromProto, ToProto}, + proto::{self, FromProto, REMOTE_SERVER_PROJECT_ID, ToProto}, }; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -658,6 +658,7 @@ pub struct SettingsObserver { worktree_store: Entity, project_id: u64, task_store: Entity, + _user_settings_watcher: Option, _global_task_config_watcher: Task<()>, _global_debug_config_watcher: Task<()>, } @@ -670,6 +671,7 @@ pub struct SettingsObserver { impl SettingsObserver { pub fn init(client: &AnyProtoClient) { client.add_entity_message_handler(Self::handle_update_worktree_settings); + client.add_entity_message_handler(Self::handle_update_user_settings); } pub fn new_local( @@ -686,7 +688,8 @@ impl SettingsObserver { task_store, mode: SettingsObserverMode::Local(fs.clone()), downstream_client: None, - project_id: 0, + _user_settings_watcher: None, + project_id: REMOTE_SERVER_PROJECT_ID, _global_task_config_watcher: Self::subscribe_to_global_task_file_changes( fs.clone(), paths::tasks_file().clone(), @@ -704,14 +707,38 @@ impl SettingsObserver { fs: Arc, worktree_store: Entity, task_store: Entity, + upstream_client: Option, cx: &mut Context, ) -> Self { + let mut user_settings_watcher = None; + if cx.try_global::().is_some() { + if let Some(upstream_client) = upstream_client { + let mut user_settings = None; + user_settings_watcher = Some(cx.observe_global::(move |_, cx| { + let new_settings = cx.global::().raw_user_settings(); + if Some(new_settings) != user_settings.as_ref() { + if let Some(new_settings_string) = serde_json::to_string(new_settings).ok() + { + user_settings = Some(new_settings.clone()); + upstream_client + .send(proto::UpdateUserSettings { + project_id: REMOTE_SERVER_PROJECT_ID, + contents: new_settings_string, + }) + .log_err(); + } + } + })); + } + }; + Self { worktree_store, task_store, mode: SettingsObserverMode::Remote, downstream_client: None, - project_id: 0, + project_id: REMOTE_SERVER_PROJECT_ID, + _user_settings_watcher: user_settings_watcher, _global_task_config_watcher: Self::subscribe_to_global_task_file_changes( fs.clone(), paths::tasks_file().clone(), @@ -803,6 +830,24 @@ impl SettingsObserver { Ok(()) } + async fn handle_update_user_settings( + _: Entity, + envelope: TypedEnvelope, + cx: AsyncApp, + ) -> anyhow::Result<()> { + let new_settings = serde_json::from_str::(&envelope.payload.contents) + .with_context(|| { + format!("deserializing {} user settings", envelope.payload.contents) + })?; + cx.update_global(|settings_store: &mut SettingsStore, cx| { + settings_store + .set_raw_user_settings(new_settings, cx) + .context("setting new user settings")?; + anyhow::Ok(()) + })??; + Ok(()) + } + fn on_worktree_store_event( &mut self, _: Entity, @@ -1089,7 +1134,7 @@ impl SettingsObserver { project_id: self.project_id, worktree_id: remote_worktree_id.to_proto(), path: directory.to_proto(), - content: file_content, + content: file_content.clone(), kind: Some(local_settings_kind_to_proto(kind).into()), }) .log_err(); diff --git a/crates/proto/proto/worktree.proto b/crates/proto/proto/worktree.proto index 67bd1925b509c6fc7727fa5cf6338e6cc00a4ae0..19a61cc4bc8d3b04103afe3a6c6b799ab92461e3 100644 --- a/crates/proto/proto/worktree.proto +++ b/crates/proto/proto/worktree.proto @@ -150,3 +150,8 @@ enum LocalSettingsKind { Editorconfig = 2; Debug = 3; } + +message UpdateUserSettings { + uint64 project_id = 1; + string contents = 2; +} diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 2222bdec082759cb75ffcdb2c7a95435f36eba11..4133b4b5eea6f14e2c9359f7318f192a8566d809 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -397,7 +397,9 @@ message Envelope { LspQuery lsp_query = 365; LspQueryResponse lsp_query_response = 366; - ToggleLspLogs toggle_lsp_logs = 367; // current max + ToggleLspLogs toggle_lsp_logs = 367; + + UpdateUserSettings update_user_settings = 368; // current max } reserved 87 to 88; diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 04495fb898b1d9bdbf229bb69e1e44b8afa6d1fb..8f4e836b20ae5bae43617e10391f75c3a069a82f 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -278,6 +278,7 @@ messages!( (UpdateUserChannels, Foreground), (UpdateWorktree, Foreground), (UpdateWorktreeSettings, Foreground), + (UpdateUserSettings, Background), (UpdateRepository, Foreground), (RemoveRepository, Foreground), (UsersResponse, Foreground), @@ -583,6 +584,7 @@ entity_messages!( UpdateRepository, RemoveRepository, UpdateWorktreeSettings, + UpdateUserSettings, LspExtExpandMacro, LspExtOpenDocs, LspExtRunnables, diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 353857f5871551a20315f638aa3d9653b3ed2848..c0ccaf900d18ee176bab7193c2bfb65b8555318d 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -280,7 +280,8 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo AllLanguageSettings::get_global(cx) .language(None, Some(&"Rust".into()), cx) .language_servers, - ["..."] // local settings are ignored + ["from-local-settings"], + "User language settings should be synchronized with the server settings" ) }); @@ -300,7 +301,8 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo AllLanguageSettings::get_global(cx) .language(None, Some(&"Rust".into()), cx) .language_servers, - ["from-server-settings".to_string()] + ["from-server-settings".to_string()], + "Server language settings should take precedence over the user settings" ) }); diff --git a/crates/remote_server/src/unix.rs b/crates/remote_server/src/unix.rs index cb671a72d9beab0983536571e81fcd78f3df21c8..4aef536f0a45b5ea943f861da2be94ab7c2c21c4 100644 --- a/crates/remote_server/src/unix.rs +++ b/crates/remote_server/src/unix.rs @@ -918,29 +918,33 @@ fn initialize_settings( }); let (mut tx, rx) = watch::channel(None); + let mut node_settings = None; cx.observe_global::(move |cx| { - let settings = &ProjectSettings::get_global(cx).node; - log::info!("Got new node settings: {:?}", settings); - let options = NodeBinaryOptions { - allow_path_lookup: !settings.ignore_system_version, - // TODO: Implement this setting - allow_binary_download: true, - use_paths: settings.path.as_ref().map(|node_path| { - let node_path = PathBuf::from(shellexpand::tilde(node_path).as_ref()); - let npm_path = settings - .npm_path - .as_ref() - .map(|path| PathBuf::from(shellexpand::tilde(&path).as_ref())); - ( - node_path.clone(), - npm_path.unwrap_or_else(|| { - let base_path = PathBuf::new(); - node_path.parent().unwrap_or(&base_path).join("npm") - }), - ) - }), - }; - tx.send(Some(options)).log_err(); + let new_node_settings = &ProjectSettings::get_global(cx).node; + if Some(new_node_settings) != node_settings.as_ref() { + log::info!("Got new node settings: {new_node_settings:?}"); + let options = NodeBinaryOptions { + allow_path_lookup: !new_node_settings.ignore_system_version, + // TODO: Implement this setting + allow_binary_download: true, + use_paths: new_node_settings.path.as_ref().map(|node_path| { + let node_path = PathBuf::from(shellexpand::tilde(node_path).as_ref()); + let npm_path = new_node_settings + .npm_path + .as_ref() + .map(|path| PathBuf::from(shellexpand::tilde(&path).as_ref())); + ( + node_path.clone(), + npm_path.unwrap_or_else(|| { + let base_path = PathBuf::new(); + node_path.parent().unwrap_or(&base_path).join("npm") + }), + ) + }), + }; + node_settings = Some(new_node_settings.clone()); + tx.send(Some(options)).ok(); + } }) .detach(); diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 60eb132ad8b4f6419f463f32b1874ea97be07ec1..72df08d14fb61536d147b4d1fb8b9a2466f5f0aa 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -467,6 +467,13 @@ impl SettingsStore { &self.raw_user_settings } + /// Replaces current settings with the values from the given JSON. + pub fn set_raw_user_settings(&mut self, new_settings: Value, cx: &mut App) -> Result<()> { + self.raw_user_settings = new_settings; + self.recompute_values(None, cx)?; + Ok(()) + } + /// Get the configured settings profile names. pub fn configured_settings_profiles(&self) -> impl Iterator { self.raw_user_settings @@ -525,20 +532,6 @@ impl SettingsStore { } } - pub async fn load_global_settings(fs: &Arc) -> Result { - match fs.load(paths::global_settings_file()).await { - result @ Ok(_) => result, - Err(err) => { - if let Some(e) = err.downcast_ref::() - && e.kind() == std::io::ErrorKind::NotFound - { - return Ok("{}".to_string()); - } - Err(err) - } - } - } - fn update_settings_file_inner( &self, fs: Arc, From 5f03202b5cda0baff80212af07f80454c8aca1cd Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Thu, 4 Sep 2025 15:19:02 -0400 Subject: [PATCH 011/109] settings ui: Create settings key trait (#37489) This PR separates out the associated constant `KEY` from the `Settings` trait into a new trait `SettingsKey`. This allows for the key trait to be derived using attributes to specify the path so that the new `SettingsUi` derive macro can use the same attributes to determine top level settings paths thereby removing the need to duplicate the path in both `Settings::KEY` and `#[settings_ui(path = "...")]` Co-authored-by: Ben Kunkle Release Notes: - N/A --------- Co-authored-by: Ben Kunkle --- Cargo.lock | 2 + crates/agent_servers/src/settings.rs | 7 +- crates/agent_settings/src/agent_settings.rs | 9 +- crates/agent_ui/src/slash_command_settings.rs | 7 +- crates/audio/src/audio_settings.rs | 9 +- crates/auto_update/src/auto_update.rs | 7 +- crates/call/src/call_settings.rs | 7 +- crates/client/src/client.rs | 17 +-- crates/collab_ui/src/panel_settings.rs | 22 ++- crates/dap/src/debugger_settings.rs | 9 +- crates/editor/src/editor_settings.rs | 7 +- .../extension_host/src/extension_settings.rs | 7 +- crates/extensions_ui/src/extensions_ui.rs | 2 +- .../file_finder/src/file_finder_settings.rs | 7 +- crates/git_hosting_providers/src/settings.rs | 7 +- crates/git_ui/src/git_panel_settings.rs | 7 +- crates/go_to_line/src/cursor_position.rs | 7 +- .../image_viewer/src/image_viewer_settings.rs | 7 +- crates/journal/src/journal.rs | 7 +- crates/language/src/language_settings.rs | 10 +- crates/language_models/src/settings.rs | 9 +- crates/onboarding/src/ai_setup_page.rs | 10 +- crates/onboarding/src/basics_page.rs | 2 +- .../src/outline_panel_settings.rs | 7 +- crates/project/src/project.rs | 79 +++++++---- crates/project/src/project_settings.rs | 9 +- .../src/project_panel_settings.rs | 7 +- .../recent_projects/src/remote_connections.rs | 7 +- crates/repl/src/jupyter_settings.rs | 7 +- crates/settings/src/base_keymap_setting.rs | 19 ++- crates/settings/src/settings.rs | 6 +- crates/settings/src/settings_store.rs | 97 +++++++++---- .../src/settings_ui_macros.rs | 134 +++++++++++++++++- crates/terminal/src/terminal_settings.rs | 7 +- crates/theme/src/settings.rs | 7 +- crates/title_bar/src/title_bar_settings.rs | 11 +- crates/vim/src/test/vim_test_context.rs | 8 +- crates/vim/src/vim.rs | 11 +- crates/vim_mode_setting/Cargo.toml | 2 + .../vim_mode_setting/src/vim_mode_setting.rs | 78 +++++++--- crates/workspace/src/item.rs | 12 +- crates/workspace/src/workspace_settings.rs | 12 +- crates/worktree/src/worktree_settings.rs | 7 +- crates/zlog_settings/src/zlog_settings.rs | 18 ++- 44 files changed, 474 insertions(+), 256 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1a15d11e664f808d4ba68d61a3ad7a6c557c2420..a99c59a1890080ac220b669b26864d859d2ad377 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -17995,6 +17995,8 @@ version = "0.1.0" dependencies = [ "anyhow", "gpui", + "schemars", + "serde", "settings", "workspace-hack", ] diff --git a/crates/agent_servers/src/settings.rs b/crates/agent_servers/src/settings.rs index 693d7d7b7014b3abbecfbe592bac67210b336872..167753296a1a489128ba916f114f4895c15afcf9 100644 --- a/crates/agent_servers/src/settings.rs +++ b/crates/agent_servers/src/settings.rs @@ -6,13 +6,14 @@ use collections::HashMap; use gpui::{App, SharedString}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; pub fn init(cx: &mut App) { AllAgentServersSettings::register(cx); } -#[derive(Default, Deserialize, Serialize, Clone, JsonSchema, Debug, SettingsUi)] +#[derive(Default, Deserialize, Serialize, Clone, JsonSchema, Debug, SettingsUi, SettingsKey)] +#[settings_key(key = "agent_servers")] pub struct AllAgentServersSettings { pub gemini: Option, pub claude: Option, @@ -75,8 +76,6 @@ pub struct CustomAgentServerSettings { } impl settings::Settings for AllAgentServersSettings { - const KEY: Option<&'static str> = Some("agent_servers"); - type FileContent = Self; fn load(sources: SettingsSources, _: &mut App) -> Result { diff --git a/crates/agent_settings/src/agent_settings.rs b/crates/agent_settings/src/agent_settings.rs index 8aebdcd288c8451d9bc391f1fc1598d6098d55af..8c4a190e1c3135b5bbfbc90544bb92db7a6bdd22 100644 --- a/crates/agent_settings/src/agent_settings.rs +++ b/crates/agent_settings/src/agent_settings.rs @@ -8,7 +8,7 @@ use gpui::{App, Pixels, SharedString}; use language_model::LanguageModel; use schemars::{JsonSchema, json_schema}; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; use std::borrow::Cow; pub use crate::agent_profile::*; @@ -223,7 +223,8 @@ impl AgentSettingsContent { } } -#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default, SettingsUi)] +#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default, SettingsUi, SettingsKey)] +#[settings_key(key = "agent", fallback_key = "assistant")] pub struct AgentSettingsContent { /// Whether the Agent is enabled. /// @@ -399,10 +400,6 @@ pub struct ContextServerPresetContent { } impl Settings for AgentSettings { - const KEY: Option<&'static str> = Some("agent"); - - const FALLBACK_KEY: Option<&'static str> = Some("assistant"); - const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]); type FileContent = AgentSettingsContent; diff --git a/crates/agent_ui/src/slash_command_settings.rs b/crates/agent_ui/src/slash_command_settings.rs index c54a10ed49a77d395c4968e551b1cd30ad1c6e07..9580ffef0f317fbe726c57041fad4f0fa438e143 100644 --- a/crates/agent_ui/src/slash_command_settings.rs +++ b/crates/agent_ui/src/slash_command_settings.rs @@ -2,10 +2,11 @@ use anyhow::Result; use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; /// Settings for slash commands. -#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema, SettingsUi)] +#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema, SettingsUi, SettingsKey)] +#[settings_key(key = "slash_commands")] pub struct SlashCommandSettings { /// Settings for the `/cargo-workspace` slash command. #[serde(default)] @@ -21,8 +22,6 @@ pub struct CargoWorkspaceCommandSettings { } impl Settings for SlashCommandSettings { - const KEY: Option<&'static str> = Some("slash_commands"); - type FileContent = Self; fn load(sources: SettingsSources, _cx: &mut App) -> Result { diff --git a/crates/audio/src/audio_settings.rs b/crates/audio/src/audio_settings.rs index d30d950273f2138f3bd54c573513373574f1ce43..168519030bcbd4a422965580ddbe01121934278d 100644 --- a/crates/audio/src/audio_settings.rs +++ b/crates/audio/src/audio_settings.rs @@ -2,9 +2,9 @@ use anyhow::Result; use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi)] pub struct AudioSettings { /// Opt into the new audio system. #[serde(rename = "experimental.rodio_audio", default)] @@ -12,8 +12,9 @@ pub struct AudioSettings { } /// Configuration of audio in Zed. -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)] #[serde(default)] +#[settings_key(key = "audio")] pub struct AudioSettingsContent { /// Whether to use the experimental audio system #[serde(rename = "experimental.rodio_audio", default)] @@ -21,8 +22,6 @@ pub struct AudioSettingsContent { } impl Settings for AudioSettings { - const KEY: Option<&'static str> = Some("audio"); - type FileContent = AudioSettingsContent; fn load(sources: SettingsSources, _cx: &mut App) -> Result { diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index f0ae3fdb1cfef667a9f737aa6545a42046a9d322..f5d4533a9ee042e62752f26b989bc75561c534ae 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -10,7 +10,7 @@ use paths::remote_servers_dir; use release_channel::{AppCommitSha, ReleaseChannel}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsStore, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsStore, SettingsUi}; use smol::{fs, io::AsyncReadExt}; use smol::{fs::File, process::Command}; use std::{ @@ -118,13 +118,12 @@ struct AutoUpdateSetting(bool); /// Whether or not to automatically check for updates. /// /// Default: true -#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize, SettingsUi)] +#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize, SettingsUi, SettingsKey)] #[serde(transparent)] +#[settings_key(key = "auto_update")] struct AutoUpdateSettingContent(bool); impl Settings for AutoUpdateSetting { - const KEY: Option<&'static str> = Some("auto_update"); - type FileContent = AutoUpdateSettingContent; fn load(sources: SettingsSources, _: &mut App) -> Result { diff --git a/crates/call/src/call_settings.rs b/crates/call/src/call_settings.rs index 7b0838e3a96185c1e4c33b8116fbd6a41b35f3dc..b0677e3c3bcb5112fdd9ad2abc4bf188b225aeac 100644 --- a/crates/call/src/call_settings.rs +++ b/crates/call/src/call_settings.rs @@ -2,7 +2,7 @@ use anyhow::Result; use gpui::App; use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; #[derive(Deserialize, Debug)] pub struct CallSettings { @@ -11,7 +11,8 @@ pub struct CallSettings { } /// Configuration of voice calls in Zed. -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)] +#[settings_key(key = "calls")] pub struct CallSettingsContent { /// Whether the microphone should be muted when joining a channel or a call. /// @@ -25,8 +26,6 @@ pub struct CallSettingsContent { } impl Settings for CallSettings { - const KEY: Option<&'static str> = Some("calls"); - type FileContent = CallSettingsContent; fn load(sources: SettingsSources, _: &mut App) -> Result { diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 85f6aeade69cc04c5f58b72258ac062157094460..cb8185c7ed326ed7d45726a99077c53903118316 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -31,7 +31,7 @@ use release_channel::{AppVersion, ReleaseChannel}; use rpc::proto::{AnyTypedEnvelope, EnvelopedMessage, PeerId, RequestMessage}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; use std::{ any::TypeId, convert::TryFrom, @@ -96,7 +96,8 @@ actions!( ] ); -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)] +#[settings_key(None)] pub struct ClientSettingsContent { server_url: Option, } @@ -107,8 +108,6 @@ pub struct ClientSettings { } impl Settings for ClientSettings { - const KEY: Option<&'static str> = None; - type FileContent = ClientSettingsContent; fn load(sources: SettingsSources, _: &mut App) -> Result { @@ -122,7 +121,8 @@ impl Settings for ClientSettings { fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {} } -#[derive(Default, Clone, Serialize, Deserialize, JsonSchema, SettingsUi)] +#[derive(Default, Clone, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)] +#[settings_key(None)] pub struct ProxySettingsContent { proxy: Option, } @@ -133,8 +133,6 @@ pub struct ProxySettings { } impl Settings for ProxySettings { - const KEY: Option<&'static str> = None; - type FileContent = ProxySettingsContent; fn load(sources: SettingsSources, _: &mut App) -> Result { @@ -527,7 +525,8 @@ pub struct TelemetrySettings { } /// Control what info is collected by Zed. -#[derive(Default, Clone, Serialize, Deserialize, JsonSchema, Debug, SettingsUi)] +#[derive(Default, Clone, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)] +#[settings_key(key = "telemetry")] pub struct TelemetrySettingsContent { /// Send debug info like crash reports. /// @@ -540,8 +539,6 @@ pub struct TelemetrySettingsContent { } impl settings::Settings for TelemetrySettings { - const KEY: Option<&'static str> = Some("telemetry"); - type FileContent = TelemetrySettingsContent; fn load(sources: SettingsSources, _: &mut App) -> Result { diff --git a/crates/collab_ui/src/panel_settings.rs b/crates/collab_ui/src/panel_settings.rs index 64f0a9366df7cdef1f2c05809752fb1cf912111b..bae118d819c2e38e7b77e5aa841c084e4c45d6e8 100644 --- a/crates/collab_ui/src/panel_settings.rs +++ b/crates/collab_ui/src/panel_settings.rs @@ -1,7 +1,7 @@ use gpui::Pixels; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; use workspace::dock::DockPosition; #[derive(Deserialize, Debug)] @@ -27,7 +27,8 @@ pub struct ChatPanelSettings { pub default_width: Pixels, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)] +#[settings_key(key = "chat_panel")] pub struct ChatPanelSettingsContent { /// When to show the panel button in the status bar. /// @@ -43,14 +44,16 @@ pub struct ChatPanelSettingsContent { pub default_width: Option, } -#[derive(Deserialize, Debug)] +#[derive(Deserialize, Debug, SettingsKey)] +#[settings_key(key = "notification_panel")] pub struct NotificationPanelSettings { pub button: bool, pub dock: DockPosition, pub default_width: Pixels, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)] +#[settings_key(key = "collaboration_panel")] pub struct PanelSettingsContent { /// Whether to show the panel button in the status bar. /// @@ -66,7 +69,8 @@ pub struct PanelSettingsContent { pub default_width: Option, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)] +#[settings_key(key = "message_editor")] pub struct MessageEditorSettings { /// Whether to automatically replace emoji shortcodes with emoji characters. /// For example: typing `:wave:` gets replaced with `👋`. @@ -76,8 +80,6 @@ pub struct MessageEditorSettings { } impl Settings for CollaborationPanelSettings { - const KEY: Option<&'static str> = Some("collaboration_panel"); - type FileContent = PanelSettingsContent; fn load( @@ -91,8 +93,6 @@ impl Settings for CollaborationPanelSettings { } impl Settings for ChatPanelSettings { - const KEY: Option<&'static str> = Some("chat_panel"); - type FileContent = ChatPanelSettingsContent; fn load( @@ -106,8 +106,6 @@ impl Settings for ChatPanelSettings { } impl Settings for NotificationPanelSettings { - const KEY: Option<&'static str> = Some("notification_panel"); - type FileContent = PanelSettingsContent; fn load( @@ -121,8 +119,6 @@ impl Settings for NotificationPanelSettings { } impl Settings for MessageEditorSettings { - const KEY: Option<&'static str> = Some("message_editor"); - type FileContent = MessageEditorSettings; fn load( diff --git a/crates/dap/src/debugger_settings.rs b/crates/dap/src/debugger_settings.rs index 929bff747e8685ec9a4b36fa9db63d12a769faa2..8d53fdea8649f1c62fa74cc6f0ddd6aec6ecff6d 100644 --- a/crates/dap/src/debugger_settings.rs +++ b/crates/dap/src/debugger_settings.rs @@ -2,7 +2,7 @@ use dap_types::SteppingGranularity; use gpui::{App, Global}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq, SettingsUi)] #[serde(rename_all = "snake_case")] @@ -12,11 +12,12 @@ pub enum DebugPanelDockPosition { Right, } -#[derive(Serialize, Deserialize, JsonSchema, Clone, Copy, SettingsUi)] +#[derive(Serialize, Deserialize, JsonSchema, Clone, Copy, SettingsUi, SettingsKey)] #[serde(default)] // todo(settings_ui) @ben: I'm pretty sure not having the fields be optional here is a bug, // it means the defaults will override previously set values if a single key is missing -#[settings_ui(group = "Debugger", path = "debugger")] +#[settings_ui(group = "Debugger")] +#[settings_key(key = "debugger")] pub struct DebuggerSettings { /// Determines the stepping granularity. /// @@ -64,8 +65,6 @@ impl Default for DebuggerSettings { } impl Settings for DebuggerSettings { - const KEY: Option<&'static str> = Some("debugger"); - type FileContent = Self; fn load(sources: SettingsSources, _: &mut App) -> anyhow::Result { diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index 44cb0749760e9e3af91bc837df0ef0589e251703..d74244131e6635c7b9eda6ace0723ced96b0e041 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -6,7 +6,7 @@ use language::CursorShape; use project::project_settings::DiagnosticSeverity; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi, VsCodeSettings}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi, VsCodeSettings}; use util::serde::default_true; /// Imports from the VSCode settings at @@ -431,8 +431,9 @@ pub enum SnippetSortOrder { None, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)] #[settings_ui(group = "Editor")] +#[settings_key(None)] pub struct EditorSettingsContent { /// Whether the cursor blinks in the editor. /// @@ -777,8 +778,6 @@ impl EditorSettings { } impl Settings for EditorSettings { - const KEY: Option<&'static str> = None; - type FileContent = EditorSettingsContent; fn load(sources: SettingsSources, _: &mut App) -> anyhow::Result { diff --git a/crates/extension_host/src/extension_settings.rs b/crates/extension_host/src/extension_settings.rs index 6bd760795cec6d1c4208770f1355e8ac7a34eb95..fa5a613c55a76a0b5660b114d49acc17fcf79120 100644 --- a/crates/extension_host/src/extension_settings.rs +++ b/crates/extension_host/src/extension_settings.rs @@ -3,10 +3,11 @@ use collections::HashMap; use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; use std::sync::Arc; -#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema, SettingsUi)] +#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema, SettingsUi, SettingsKey)] +#[settings_key(None)] pub struct ExtensionSettings { /// The extensions that should be automatically installed by Zed. /// @@ -38,8 +39,6 @@ impl ExtensionSettings { } impl Settings for ExtensionSettings { - const KEY: Option<&'static str> = None; - type FileContent = Self; fn load(sources: SettingsSources, _cx: &mut App) -> Result { diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs index fd504764b65826ea74e092ea4c11d5576fa51524..0b925dceb1544d97a77082881626bc1e97f3d1b0 100644 --- a/crates/extensions_ui/src/extensions_ui.rs +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -1345,7 +1345,7 @@ impl ExtensionsPage { this.update_settings::( selection, cx, - |setting, value| *setting = Some(value), + |setting, value| setting.vim_mode = Some(value), ); }), )), diff --git a/crates/file_finder/src/file_finder_settings.rs b/crates/file_finder/src/file_finder_settings.rs index 007af53b1144ed4caa7985d75cdf4707f13ed13e..6a6b98b8ea3e1c7e7f0e3cc0385fdd7f413b659f 100644 --- a/crates/file_finder/src/file_finder_settings.rs +++ b/crates/file_finder/src/file_finder_settings.rs @@ -1,7 +1,7 @@ use anyhow::Result; use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; #[derive(Deserialize, Debug, Clone, Copy, PartialEq)] pub struct FileFinderSettings { @@ -11,7 +11,8 @@ pub struct FileFinderSettings { pub include_ignored: Option, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)] +#[settings_key(key = "file_finder")] pub struct FileFinderSettingsContent { /// Whether to show file icons in the file finder. /// @@ -42,8 +43,6 @@ pub struct FileFinderSettingsContent { } impl Settings for FileFinderSettings { - const KEY: Option<&'static str> = Some("file_finder"); - type FileContent = FileFinderSettingsContent; fn load(sources: SettingsSources, _: &mut gpui::App) -> Result { diff --git a/crates/git_hosting_providers/src/settings.rs b/crates/git_hosting_providers/src/settings.rs index 34e3805a39ea8a13a6a2f79552a6a917c4597692..3249981db91015479bab728484341519db357683 100644 --- a/crates/git_hosting_providers/src/settings.rs +++ b/crates/git_hosting_providers/src/settings.rs @@ -5,7 +5,7 @@ use git::GitHostingProviderRegistry; use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsStore, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsStore, SettingsUi}; use url::Url; use util::ResultExt as _; @@ -78,7 +78,8 @@ pub struct GitHostingProviderConfig { pub name: String, } -#[derive(Default, Debug, Clone, Serialize, Deserialize, JsonSchema, SettingsUi)] +#[derive(Default, Debug, Clone, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)] +#[settings_key(None)] pub struct GitHostingProviderSettings { /// The list of custom Git hosting providers. #[serde(default)] @@ -86,8 +87,6 @@ pub struct GitHostingProviderSettings { } impl Settings for GitHostingProviderSettings { - const KEY: Option<&'static str> = None; - type FileContent = Self; fn load(sources: settings::SettingsSources, _: &mut App) -> Result { diff --git a/crates/git_ui/src/git_panel_settings.rs b/crates/git_ui/src/git_panel_settings.rs index 39d6540db52046845521a23c0290be4e6e595492..be207314acd82446566dffd2eb58339974f177ff 100644 --- a/crates/git_ui/src/git_panel_settings.rs +++ b/crates/git_ui/src/git_panel_settings.rs @@ -2,7 +2,7 @@ use editor::ShowScrollbar; use gpui::Pixels; use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; use workspace::dock::DockPosition; #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] @@ -36,7 +36,8 @@ pub enum StatusStyle { LabelColor, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)] +#[settings_key(key = "git_panel")] pub struct GitPanelSettingsContent { /// Whether to show the panel button in the status bar. /// @@ -90,8 +91,6 @@ pub struct GitPanelSettings { } impl Settings for GitPanelSettings { - const KEY: Option<&'static str> = Some("git_panel"); - type FileContent = GitPanelSettingsContent; fn load( diff --git a/crates/go_to_line/src/cursor_position.rs b/crates/go_to_line/src/cursor_position.rs index 5840993ece84b1e8098ee341395e7f77fb791ace..6af8c79fe9cc4ed0be0d7cb466753fa939355eec 100644 --- a/crates/go_to_line/src/cursor_position.rs +++ b/crates/go_to_line/src/cursor_position.rs @@ -2,7 +2,7 @@ use editor::{Editor, EditorSettings, MultiBufferSnapshot}; use gpui::{App, Entity, FocusHandle, Focusable, Subscription, Task, WeakEntity}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; use std::{fmt::Write, num::NonZeroU32, time::Duration}; use text::{Point, Selection}; use ui::{ @@ -301,13 +301,12 @@ pub(crate) enum LineIndicatorFormat { Long, } -#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize, SettingsUi)] +#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize, SettingsUi, SettingsKey)] #[serde(transparent)] +#[settings_key(key = "line_indicator_format")] pub(crate) struct LineIndicatorFormatContent(LineIndicatorFormat); impl Settings for LineIndicatorFormat { - const KEY: Option<&'static str> = Some("line_indicator_format"); - type FileContent = LineIndicatorFormatContent; fn load(sources: SettingsSources, _: &mut App) -> anyhow::Result { diff --git a/crates/image_viewer/src/image_viewer_settings.rs b/crates/image_viewer/src/image_viewer_settings.rs index 4949b266b4e03c7089d4bc25e2a223a0ce64a081..510de69b522fbb07cb8eedba43edfe3a95e4a591 100644 --- a/crates/image_viewer/src/image_viewer_settings.rs +++ b/crates/image_viewer/src/image_viewer_settings.rs @@ -1,10 +1,11 @@ use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; /// The settings for the image viewer. -#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Default, SettingsUi)] +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Default, SettingsUi, SettingsKey)] +#[settings_key(key = "image_viewer")] pub struct ImageViewerSettings { /// The unit to use for displaying image file sizes. /// @@ -24,8 +25,6 @@ pub enum ImageFileSizeUnit { } impl Settings for ImageViewerSettings { - const KEY: Option<&'static str> = Some("image_viewer"); - type FileContent = Self; fn load(sources: SettingsSources, _: &mut App) -> anyhow::Result { diff --git a/crates/journal/src/journal.rs b/crates/journal/src/journal.rs index ffa24571c88a0f0e06252565261b1a6d285d098c..5cdfa6c1df034deaf06e1c99ea99415757b84c29 100644 --- a/crates/journal/src/journal.rs +++ b/crates/journal/src/journal.rs @@ -5,7 +5,7 @@ use editor::{Editor, SelectionEffects}; use gpui::{App, AppContext as _, Context, Window, actions}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; use std::{ fs::OpenOptions, path::{Path, PathBuf}, @@ -22,7 +22,8 @@ actions!( ); /// Settings specific to journaling -#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, SettingsUi)] +#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)] +#[settings_key(key = "journal")] pub struct JournalSettings { /// The path of the directory where journal entries are stored. /// @@ -52,8 +53,6 @@ pub enum HourFormat { } impl settings::Settings for JournalSettings { - const KEY: Option<&'static str> = Some("journal"); - type FileContent = Self; fn load(sources: SettingsSources, _: &mut App) -> Result { diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index f04b83bc7336143672647a07107fa27bc55f5823..3443ccf592a4138edb61959f0dd82bdb8cc8d418 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -17,7 +17,8 @@ use serde::{ }; use settings::{ - ParameterizedJsonSchema, Settings, SettingsLocation, SettingsSources, SettingsStore, SettingsUi, + ParameterizedJsonSchema, Settings, SettingsKey, SettingsLocation, SettingsSources, + SettingsStore, SettingsUi, }; use shellexpand; use std::{borrow::Cow, num::NonZeroU32, path::Path, slice, sync::Arc}; @@ -292,7 +293,10 @@ pub struct CopilotSettings { } /// The settings for all languages. -#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize, JsonSchema, SettingsUi)] +#[derive( + Debug, Clone, Default, PartialEq, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey, +)] +#[settings_key(None)] pub struct AllLanguageSettingsContent { /// The settings for enabling/disabling features. #[serde(default)] @@ -1213,8 +1217,6 @@ impl InlayHintKind { } impl settings::Settings for AllLanguageSettings { - const KEY: Option<&'static str> = None; - type FileContent = AllLanguageSettingsContent; fn load(sources: SettingsSources, _: &mut App) -> Result { diff --git a/crates/language_models/src/settings.rs b/crates/language_models/src/settings.rs index 8b7ab5fc2547bd0b014238739f1b940dad831f66..cfe66c91a36d4da562cba84363f79bd1d5b4e1ce 100644 --- a/crates/language_models/src/settings.rs +++ b/crates/language_models/src/settings.rs @@ -5,7 +5,7 @@ use collections::HashMap; use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; use crate::provider::{ self, @@ -46,7 +46,10 @@ pub struct AllLanguageModelSettings { pub zed_dot_dev: ZedDotDevSettings, } -#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, SettingsUi)] +#[derive( + Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, SettingsUi, SettingsKey, +)] +#[settings_key(key = "language_models")] pub struct AllLanguageModelSettingsContent { pub anthropic: Option, pub bedrock: Option, @@ -145,8 +148,6 @@ pub struct OpenRouterSettingsContent { } impl settings::Settings for AllLanguageModelSettings { - const KEY: Option<&'static str> = Some("language_models"); - const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]); type FileContent = AllLanguageModelSettingsContent; diff --git a/crates/onboarding/src/ai_setup_page.rs b/crates/onboarding/src/ai_setup_page.rs index 54c49bc72a49309002421c4f8ac3544c86e4dc69..3631ad00dfb8662d5d4142a4cbd11186c1b1b137 100644 --- a/crates/onboarding/src/ai_setup_page.rs +++ b/crates/onboarding/src/ai_setup_page.rs @@ -264,13 +264,9 @@ pub(crate) fn render_ai_setup_page( ); let fs = ::global(cx); - update_settings_file::( - fs, - cx, - move |ai_settings: &mut Option, _| { - *ai_settings = Some(enabled); - }, - ); + update_settings_file::(fs, cx, move |ai_settings, _| { + ai_settings.disable_ai = Some(enabled); + }); }, ) .tab_index({ diff --git a/crates/onboarding/src/basics_page.rs b/crates/onboarding/src/basics_page.rs index 59ec437dcf8d11209e9c73020f1b51e40aa56cce..aef9dcca86ce49a70f1a508c0a43614737a653c7 100644 --- a/crates/onboarding/src/basics_page.rs +++ b/crates/onboarding/src/basics_page.rs @@ -388,7 +388,7 @@ fn render_vim_mode_switch(tab_index: &mut isize, cx: &mut App) -> impl IntoEleme } }; update_settings_file::(fs.clone(), cx, move |setting, _| { - *setting = Some(vim_mode); + setting.vim_mode = Some(vim_mode); }); telemetry::event!( diff --git a/crates/outline_panel/src/outline_panel_settings.rs b/crates/outline_panel/src/outline_panel_settings.rs index 48c6621e3509c1eda69a6a5e92602ba2ab12a484..dc123f2ba5fb38dd80b72aee8fc6ad6a000be23d 100644 --- a/crates/outline_panel/src/outline_panel_settings.rs +++ b/crates/outline_panel/src/outline_panel_settings.rs @@ -2,7 +2,7 @@ use editor::ShowScrollbar; use gpui::Pixels; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Copy, PartialEq)] #[serde(rename_all = "snake_case")] @@ -61,7 +61,8 @@ pub struct IndentGuidesSettingsContent { pub show: Option, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)] +#[settings_key(key = "outline_panel")] pub struct OutlinePanelSettingsContent { /// Whether to show the outline panel button in the status bar. /// @@ -116,8 +117,6 @@ pub struct OutlinePanelSettingsContent { } impl Settings for OutlinePanelSettings { - const KEY: Option<&'static str> = Some("outline_panel"); - type FileContent = OutlinePanelSettingsContent; fn load( diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 4adebabc5a03636ca81fbc3b04a277c2d6d03a66..1e2e52c120f95a7c7540cd6f916d2d401f411af2 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -28,6 +28,7 @@ use context_server_store::ContextServerStore; pub use environment::{EnvironmentErrorMessage, ProjectEnvironmentEvent}; use git::repository::get_git_committer; use git_store::{Repository, RepositoryId}; +use schemars::JsonSchema; pub mod search_history; mod yarn; @@ -94,7 +95,10 @@ use rpc::{ }; use search::{SearchInputKind, SearchQuery, SearchResult}; use search_history::SearchHistory; -use settings::{InvalidSettingsError, Settings, SettingsLocation, SettingsSources, SettingsStore}; +use settings::{ + InvalidSettingsError, Settings, SettingsKey, SettingsLocation, SettingsSources, SettingsStore, + SettingsUi, +}; use smol::channel::Receiver; use snippet::Snippet; use snippet_provider::SnippetProvider; @@ -968,10 +972,26 @@ pub struct DisableAiSettings { pub disable_ai: bool, } -impl settings::Settings for DisableAiSettings { - const KEY: Option<&'static str> = Some("disable_ai"); +#[derive( + Copy, + Clone, + PartialEq, + Eq, + Debug, + Default, + serde::Serialize, + serde::Deserialize, + SettingsUi, + SettingsKey, + JsonSchema, +)] +#[settings_key(None)] +pub struct DisableAiSettingContent { + pub disable_ai: Option, +} - type FileContent = Option; +impl settings::Settings for DisableAiSettings { + type FileContent = DisableAiSettingContent; fn load(sources: SettingsSources, _: &mut App) -> Result { // For security reasons, settings can only make AI restrictions MORE strict, not less. @@ -984,7 +1004,7 @@ impl settings::Settings for DisableAiSettings { .iter() .chain(sources.user.iter()) .chain(sources.server.iter()) - .any(|disabled| **disabled == Some(true)); + .any(|disabled| disabled.disable_ai == Some(true)); Ok(Self { disable_ai }) } @@ -5550,10 +5570,15 @@ mod disable_ai_settings_tests { #[gpui::test] async fn test_disable_ai_settings_security(cx: &mut TestAppContext) { + fn disable_setting(value: Option) -> DisableAiSettingContent { + DisableAiSettingContent { disable_ai: value } + } cx.update(|cx| { // Test 1: Default is false (AI enabled) let sources = SettingsSources { - default: &Some(false), + default: &DisableAiSettingContent { + disable_ai: Some(false), + }, global: None, extensions: None, user: None, @@ -5567,10 +5592,10 @@ mod disable_ai_settings_tests { assert!(!settings.disable_ai, "Default should allow AI"); // Test 2: Global true, local false -> still disabled (local cannot re-enable) - let global_true = Some(true); - let local_false = Some(false); + let global_true = disable_setting(Some(true)); + let local_false = disable_setting(Some(false)); let sources = SettingsSources { - default: &Some(false), + default: &disable_setting(Some(false)), global: None, extensions: None, user: Some(&global_true), @@ -5587,10 +5612,10 @@ mod disable_ai_settings_tests { ); // Test 3: Global false, local true -> disabled (local can make more restrictive) - let global_false = Some(false); - let local_true = Some(true); + let global_false = disable_setting(Some(false)); + let local_true = disable_setting(Some(true)); let sources = SettingsSources { - default: &Some(false), + default: &disable_setting(Some(false)), global: None, extensions: None, user: Some(&global_false), @@ -5604,10 +5629,10 @@ mod disable_ai_settings_tests { assert!(settings.disable_ai, "Local true can override global false"); // Test 4: Server can only make more restrictive (set to true) - let user_false = Some(false); - let server_true = Some(true); + let user_false = disable_setting(Some(false)); + let server_true = disable_setting(Some(true)); let sources = SettingsSources { - default: &Some(false), + default: &disable_setting(Some(false)), global: None, extensions: None, user: Some(&user_false), @@ -5624,10 +5649,10 @@ mod disable_ai_settings_tests { ); // Test 5: Server false cannot override user true - let user_true = Some(true); - let server_false = Some(false); + let user_true = disable_setting(Some(true)); + let server_false = disable_setting(Some(false)); let sources = SettingsSources { - default: &Some(false), + default: &disable_setting(Some(false)), global: None, extensions: None, user: Some(&user_true), @@ -5644,12 +5669,12 @@ mod disable_ai_settings_tests { ); // Test 6: Multiple local settings, any true disables AI - let global_false = Some(false); - let local_false3 = Some(false); - let local_true2 = Some(true); - let local_false4 = Some(false); + let global_false = disable_setting(Some(false)); + let local_false3 = disable_setting(Some(false)); + let local_true2 = disable_setting(Some(true)); + let local_false4 = disable_setting(Some(false)); let sources = SettingsSources { - default: &Some(false), + default: &disable_setting(Some(false)), global: None, extensions: None, user: Some(&global_false), @@ -5663,11 +5688,11 @@ mod disable_ai_settings_tests { assert!(settings.disable_ai, "Any local true should disable AI"); // Test 7: All three sources can independently disable AI - let user_false2 = Some(false); - let server_false2 = Some(false); - let local_true3 = Some(true); + let user_false2 = disable_setting(Some(false)); + let server_false2 = disable_setting(Some(false)); + let local_true3 = disable_setting(Some(true)); let sources = SettingsSources { - default: &Some(false), + default: &disable_setting(Some(false)), global: None, extensions: None, user: Some(&user_false2), diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 57969ec9938602b477293aa3033a31bc8b3deae1..694e244e63e2b2861d640ec32ce0a1f5c50be52f 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -18,8 +18,8 @@ use rpc::{ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{ - InvalidSettingsError, LocalSettingsKind, Settings, SettingsLocation, SettingsSources, - SettingsStore, SettingsUi, parse_json_with_comments, watch_config_file, + InvalidSettingsError, LocalSettingsKind, Settings, SettingsKey, SettingsLocation, + SettingsSources, SettingsStore, SettingsUi, parse_json_with_comments, watch_config_file, }; use std::{ collections::BTreeMap, @@ -36,7 +36,8 @@ use crate::{ worktree_store::{WorktreeStore, WorktreeStoreEvent}, }; -#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi)] +#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)] +#[settings_key(None)] pub struct ProjectSettings { /// Configuration for language servers. /// @@ -568,8 +569,6 @@ impl Default for SessionSettings { } impl Settings for ProjectSettings { - const KEY: Option<&'static str> = None; - type FileContent = Self; fn load(sources: SettingsSources, _: &mut App) -> anyhow::Result { diff --git a/crates/project_panel/src/project_panel_settings.rs b/crates/project_panel/src/project_panel_settings.rs index db9b2b85d545e85a0cff3ec13a8f75e28dac88fa..6c812c294663d1d6fe7915d201f9e8925fa943ab 100644 --- a/crates/project_panel/src/project_panel_settings.rs +++ b/crates/project_panel/src/project_panel_settings.rs @@ -2,7 +2,7 @@ use editor::ShowScrollbar; use gpui::Pixels; use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Copy, PartialEq)] #[serde(rename_all = "snake_case")] @@ -92,7 +92,8 @@ pub enum ShowDiagnostics { All, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)] +#[settings_key(key = "project_panel")] pub struct ProjectPanelSettingsContent { /// Whether to show the project panel button in the status bar. /// @@ -168,8 +169,6 @@ pub struct ProjectPanelSettingsContent { } impl Settings for ProjectPanelSettings { - const KEY: Option<&'static str> = Some("project_panel"); - type FileContent = ProjectPanelSettingsContent; fn load( diff --git a/crates/recent_projects/src/remote_connections.rs b/crates/recent_projects/src/remote_connections.rs index a7f915301f42850b03be951f596a8542842a6877..3e6810239c80c72d74624bcc243157290fcd93fa 100644 --- a/crates/recent_projects/src/remote_connections.rs +++ b/crates/recent_projects/src/remote_connections.rs @@ -21,7 +21,7 @@ use remote::{ }; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; use theme::ThemeSettings; use ui::{ ActiveTheme, Color, CommonAnimationExt, Context, Icon, IconName, IconSize, InteractiveElement, @@ -121,15 +121,14 @@ pub struct SshProject { pub paths: Vec, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)] +#[settings_key(None)] pub struct RemoteSettingsContent { pub ssh_connections: Option>, pub read_ssh_config: Option, } impl Settings for SshSettings { - const KEY: Option<&'static str> = None; - type FileContent = RemoteSettingsContent; fn load(sources: SettingsSources, _: &mut App) -> Result { diff --git a/crates/repl/src/jupyter_settings.rs b/crates/repl/src/jupyter_settings.rs index 6f3d6b1db631267e9b41ae7598d6e573387f2ac6..c89736a03dc6d77dd89bb33c4990b25149189a41 100644 --- a/crates/repl/src/jupyter_settings.rs +++ b/crates/repl/src/jupyter_settings.rs @@ -4,7 +4,7 @@ use editor::EditorSettings; use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; #[derive(Debug, Default)] pub struct JupyterSettings { @@ -20,7 +20,8 @@ impl JupyterSettings { } } -#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, SettingsUi)] +#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)] +#[settings_key(key = "jupyter")] pub struct JupyterSettingsContent { /// Default kernels to select for each language. /// @@ -37,8 +38,6 @@ impl Default for JupyterSettingsContent { } impl Settings for JupyterSettings { - const KEY: Option<&'static str> = Some("jupyter"); - type FileContent = JupyterSettingsContent; fn load( diff --git a/crates/settings/src/base_keymap_setting.rs b/crates/settings/src/base_keymap_setting.rs index fb5b445b49a1fdbfac34ce8bc1a3d17d8241e009..a6bfeecbc3c01eb5309221443d1b9905b99dcd5b 100644 --- a/crates/settings/src/base_keymap_setting.rs +++ b/crates/settings/src/base_keymap_setting.rs @@ -1,10 +1,10 @@ use std::fmt::{Display, Formatter}; -use crate as settings; +use crate::{self as settings}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources, VsCodeSettings}; -use settings_ui_macros::SettingsUi; +use settings_ui_macros::{SettingsKey, SettingsUi}; /// Base key bindings scheme. Base keymaps can be overridden with user keymaps. /// @@ -101,16 +101,25 @@ impl BaseKeymap { } #[derive( - Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq, Default, SettingsUi, + Copy, + Clone, + Debug, + Serialize, + Deserialize, + JsonSchema, + PartialEq, + Eq, + Default, + SettingsUi, + SettingsKey, )] // extracted so that it can be an option, and still work with derive(SettingsUi) +#[settings_key(None)] pub struct BaseKeymapSetting { pub base_keymap: Option, } impl Settings for BaseKeymap { - const KEY: Option<&'static str> = None; - type FileContent = BaseKeymapSetting; fn load( diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index 7e567cc085101713b0f6b100d0b47f6bf4c3531f..8a50b1afe5d0c68365efe0652421937f6dad2783 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -21,12 +21,12 @@ pub use keymap_file::{ pub use settings_file::*; pub use settings_json::*; pub use settings_store::{ - InvalidSettingsError, LocalSettingsKind, Settings, SettingsLocation, SettingsSources, - SettingsStore, + InvalidSettingsError, LocalSettingsKind, Settings, SettingsKey, SettingsLocation, + SettingsSources, SettingsStore, }; pub use settings_ui_core::*; // Re-export the derive macro -pub use settings_ui_macros::SettingsUi; +pub use settings_ui_macros::{SettingsKey, SettingsUi}; pub use vscode_import::{VsCodeSettings, VsCodeSettingsSource}; #[derive(Clone, Debug, PartialEq)] diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 72df08d14fb61536d147b4d1fb8b9a2466f5f0aa..cc0ebf10cd004ce660572d7ea3a44ec945a47432 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -36,17 +36,19 @@ use crate::{ settings_ui_core::SettingsUi, update_value_in_json_text, }; -/// A value that can be defined as a user setting. -/// -/// Settings can be loaded from a combination of multiple JSON files. -pub trait Settings: 'static + Send + Sync { +pub trait SettingsKey: 'static + Send + Sync { /// The name of a key within the JSON file from which this setting should /// be deserialized. If this is `None`, then the setting will be deserialized /// from the root object. const KEY: Option<&'static str>; const FALLBACK_KEY: Option<&'static str> = None; +} +/// A value that can be defined as a user setting. +/// +/// Settings can be loaded from a combination of multiple JSON files. +pub trait Settings: 'static + Send + Sync { /// The name of the keys in the [`FileContent`](Self::FileContent) that should /// always be written to a settings file, even if their value matches the default /// value. @@ -57,8 +59,19 @@ pub trait Settings: 'static + Send + Sync { const PRESERVED_KEYS: Option<&'static [&'static str]> = None; /// The type that is stored in an individual JSON file. - type FileContent: Clone + Default + Serialize + DeserializeOwned + JsonSchema + SettingsUi; - + type FileContent: Clone + + Default + + Serialize + + DeserializeOwned + + JsonSchema + + SettingsUi + + SettingsKey; + + /* + * let path = Settings + * + * + */ /// The logic for combining together values from one or more JSON files into the /// final value for this setting. /// @@ -71,7 +84,7 @@ pub trait Settings: 'static + Send + Sync { Self: Sized; fn missing_default() -> anyhow::Error { - anyhow::anyhow!("missing default") + anyhow::anyhow!("missing default for: {}", std::any::type_name::()) } /// Use [the helpers in the vscode_import module](crate::vscode_import) to apply known @@ -1393,7 +1406,7 @@ impl Debug for SettingsStore { impl AnySettingValue for SettingValue { fn key(&self) -> Option<&'static str> { - T::KEY + T::FileContent::KEY } fn setting_type_name(&self) -> &'static str { @@ -1445,16 +1458,21 @@ impl AnySettingValue for SettingValue { mut json: &Value, ) -> (Option<&'static str>, Result) { let mut key = None; - if let Some(k) = T::KEY { + if let Some(k) = T::FileContent::KEY { if let Some(value) = json.get(k) { json = value; key = Some(k); - } else if let Some((k, value)) = T::FALLBACK_KEY.and_then(|k| Some((k, json.get(k)?))) { + } else if let Some((k, value)) = + T::FileContent::FALLBACK_KEY.and_then(|k| Some((k, json.get(k)?))) + { json = value; key = Some(k); } else { let value = T::FileContent::default(); - return (T::KEY, Ok(DeserializedSetting(Box::new(value)))); + return ( + T::FileContent::KEY, + Ok(DeserializedSetting(Box::new(value))), + ); } } let value = serde_path_to_error::deserialize::<_, T::FileContent>(json) @@ -1498,6 +1516,7 @@ impl AnySettingValue for SettingValue { } } } + self.global_value .as_ref() .unwrap_or_else(|| panic!("no default value for setting {}", self.setting_type_name())) @@ -1570,7 +1589,7 @@ mod tests { // This is so the SettingsUi macro can still work properly use crate as settings; use serde_derive::Deserialize; - use settings_ui_macros::SettingsUi; + use settings_ui_macros::{SettingsKey, SettingsUi}; use unindent::Unindent; #[gpui::test] @@ -2120,7 +2139,8 @@ mod tests { staff: bool, } - #[derive(Default, Clone, Serialize, Deserialize, JsonSchema, SettingsUi)] + #[derive(Default, Clone, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)] + #[settings_key(key = "user")] struct UserSettingsContent { name: Option, age: Option, @@ -2128,7 +2148,6 @@ mod tests { } impl Settings for UserSettings { - const KEY: Option<&'static str> = Some("user"); type FileContent = UserSettingsContent; fn load(sources: SettingsSources, _: &mut App) -> Result { @@ -2143,12 +2162,37 @@ mod tests { #[derive(Debug, Deserialize, PartialEq)] struct TurboSetting(bool); + #[derive( + Copy, + Clone, + PartialEq, + Eq, + Debug, + Default, + serde::Serialize, + serde::Deserialize, + SettingsUi, + SettingsKey, + JsonSchema, + )] + #[serde(default)] + #[settings_key(None)] + pub struct TurboSettingContent { + turbo: Option, + } + impl Settings for TurboSetting { - const KEY: Option<&'static str> = Some("turbo"); - type FileContent = bool; + type FileContent = TurboSettingContent; fn load(sources: SettingsSources, _: &mut App) -> Result { - sources.json_merge() + Ok(Self( + sources + .user + .or(sources.server) + .unwrap_or(sources.default) + .turbo + .unwrap_or_default(), + )) } fn import_from_vscode(_vscode: &VsCodeSettings, _current: &mut Self::FileContent) {} @@ -2162,15 +2206,14 @@ mod tests { key2: String, } - #[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi)] + #[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)] + #[settings_key(None)] struct MultiKeySettingsJson { key1: Option, key2: Option, } impl Settings for MultiKeySettings { - const KEY: Option<&'static str> = None; - type FileContent = MultiKeySettingsJson; fn load(sources: SettingsSources, _: &mut App) -> Result { @@ -2200,15 +2243,16 @@ mod tests { Hour24, } - #[derive(Clone, Default, Debug, Serialize, Deserialize, JsonSchema, SettingsUi)] + #[derive( + Clone, Default, Debug, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey, + )] + #[settings_key(key = "journal")] struct JournalSettingsJson { pub path: Option, pub hour_format: Option, } impl Settings for JournalSettings { - const KEY: Option<&'static str> = Some("journal"); - type FileContent = JournalSettingsJson; fn load(sources: SettingsSources, _: &mut App) -> Result { @@ -2288,7 +2332,10 @@ mod tests { ); } - #[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, SettingsUi)] + #[derive( + Clone, Debug, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey, + )] + #[settings_key(None)] struct LanguageSettings { #[serde(default)] languages: HashMap, @@ -2301,8 +2348,6 @@ mod tests { } impl Settings for LanguageSettings { - const KEY: Option<&'static str> = None; - type FileContent = Self; fn load(sources: SettingsSources, _: &mut App) -> Result { diff --git a/crates/settings_ui_macros/src/settings_ui_macros.rs b/crates/settings_ui_macros/src/settings_ui_macros.rs index c98705d5f8d4de3f42b4756a32353123f5779fbc..1895083508a6a606f4dd9889529719aa12ea0b10 100644 --- a/crates/settings_ui_macros/src/settings_ui_macros.rs +++ b/crates/settings_ui_macros/src/settings_ui_macros.rs @@ -43,10 +43,9 @@ pub fn derive_settings_ui(input: proc_macro::TokenStream) -> proc_macro::TokenSt let lit: LitStr = meta.input.parse()?; group_name = Some(lit.value()); } else if meta.path.is_ident("path") { - // todo(settings_ui) try get KEY from Settings if possible, and once we do, - // if can get key from settings, throw error if path also passed + // todo(settings_ui) rely entirely on settings_key, remove path attribute if path_name.is_some() { - return Err(meta.error("Only one 'path' can be specified")); + return Err(meta.error("Only one 'path' can be specified, either with `path` in `settings_ui` or with `settings_key`")); } meta.input.parse::()?; let lit: LitStr = meta.input.parse()?; @@ -55,6 +54,12 @@ pub fn derive_settings_ui(input: proc_macro::TokenStream) -> proc_macro::TokenSt Ok(()) }) .unwrap_or_else(|e| panic!("in #[settings_ui] attribute: {}", e)); + } else if let Some(settings_key) = parse_setting_key_attr(attr) { + // todo(settings_ui) either remove fallback key or handle it here + if path_name.is_some() && settings_key.key.is_some() { + panic!("Both 'path' and 'settings_key' are specified. Must specify only one"); + } + path_name = settings_key.key; } } @@ -212,3 +217,126 @@ fn generate_ui_item_body( }, } } + +struct SettingsKey { + key: Option, + fallback_key: Option, +} + +fn parse_setting_key_attr(attr: &syn::Attribute) -> Option { + if !attr.path().is_ident("settings_key") { + return None; + } + + let mut settings_key = SettingsKey { + key: None, + fallback_key: None, + }; + + let mut found_none = false; + + attr.parse_nested_meta(|meta| { + if meta.path.is_ident("None") { + found_none = true; + } else if meta.path.is_ident("key") { + if settings_key.key.is_some() { + return Err(meta.error("Only one 'group' path can be specified")); + } + meta.input.parse::()?; + let lit: LitStr = meta.input.parse()?; + settings_key.key = Some(lit.value()); + } else if meta.path.is_ident("fallback_key") { + if found_none { + return Err(meta.error("Cannot specify 'fallback_key' and 'None'")); + } + + if settings_key.fallback_key.is_some() { + return Err(meta.error("Only one 'fallback_key' can be specified")); + } + + meta.input.parse::()?; + let lit: LitStr = meta.input.parse()?; + settings_key.fallback_key = Some(lit.value()); + } + Ok(()) + }) + .unwrap_or_else(|e| panic!("in #[settings_key] attribute: {}", e)); + + if found_none && settings_key.fallback_key.is_some() { + panic!("in #[settings_key] attribute: Cannot specify 'None' and 'fallback_key'"); + } + if found_none && settings_key.key.is_some() { + panic!("in #[settings_key] attribute: Cannot specify 'None' and 'key'"); + } + if !found_none && settings_key.key.is_none() { + panic!("in #[settings_key] attribute: 'key' must be specified"); + } + + return Some(settings_key); +} + +#[proc_macro_derive(SettingsKey, attributes(settings_key))] +pub fn derive_settings_key(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let input = parse_macro_input!(input as DeriveInput); + let name = &input.ident; + + // Handle generic parameters if present + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + + let mut settings_key = Option::::None; + + for attr in &input.attrs { + let parsed_settings_key = parse_setting_key_attr(attr); + if parsed_settings_key.is_some() && settings_key.is_some() { + panic!("Duplicate #[settings_key] attribute"); + } + settings_key = parsed_settings_key; + } + + let Some(SettingsKey { key, fallback_key }) = settings_key else { + panic!("Missing #[settings_key] attribute"); + }; + + let key = key.map_or_else(|| quote! {None}, |key| quote! {Some(#key)}); + let fallback_key = fallback_key.map_or_else( + || quote! {None}, + |fallback_key| quote! {Some(#fallback_key)}, + ); + + let expanded = quote! { + impl #impl_generics settings::SettingsKey for #name #ty_generics #where_clause { + const KEY: Option<&'static str> = #key; + + const FALLBACK_KEY: Option<&'static str> = #fallback_key; + }; + }; + + proc_macro::TokenStream::from(expanded) +} + +#[cfg(test)] +mod tests { + use syn::{Attribute, parse_quote}; + + use super::*; + + #[test] + fn test_extract_key() { + let input: Attribute = parse_quote!( + #[settings_key(key = "my_key")] + ); + let settings_key = parse_setting_key_attr(&input).unwrap(); + assert_eq!(settings_key.key, Some("my_key".to_string())); + assert_eq!(settings_key.fallback_key, None); + } + + #[test] + fn test_empty_key() { + let input: Attribute = parse_quote!( + #[settings_key(None)] + ); + let settings_key = parse_setting_key_attr(&input).unwrap(); + assert_eq!(settings_key.key, None); + assert_eq!(settings_key.fallback_key, None); + } +} diff --git a/crates/terminal/src/terminal_settings.rs b/crates/terminal/src/terminal_settings.rs index c3051e089c68e3df0733c9e6cf7c8a42f56e742d..0ab92a0f26d35710da7fd0a2e88542a98c7affed 100644 --- a/crates/terminal/src/terminal_settings.rs +++ b/crates/terminal/src/terminal_settings.rs @@ -6,7 +6,7 @@ use gpui::{AbsoluteLength, App, FontFallbacks, FontFeatures, FontWeight, Pixels, use schemars::JsonSchema; use serde_derive::{Deserialize, Serialize}; -use settings::{SettingsSources, SettingsUi}; +use settings::{SettingsKey, SettingsSources, SettingsUi}; use std::path::PathBuf; use task::Shell; use theme::FontFamilyName; @@ -135,7 +135,8 @@ pub enum ActivateScript { Pyenv, } -#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, SettingsUi)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)] +#[settings_key(key = "terminal")] pub struct TerminalSettingsContent { /// What shell to use when opening a terminal. /// @@ -253,8 +254,6 @@ pub struct TerminalSettingsContent { } impl settings::Settings for TerminalSettings { - const KEY: Option<&'static str> = Some("terminal"); - type FileContent = TerminalSettingsContent; fn load(sources: SettingsSources, _: &mut App) -> anyhow::Result { diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index 825176a2a0b5e35c60606d0922cef37fe91caea7..8409c60b22b03b8d917b84ae20229dc2db63fe4a 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -13,7 +13,7 @@ use gpui::{ use refineable::Refineable; use schemars::{JsonSchema, json_schema}; use serde::{Deserialize, Serialize}; -use settings::{ParameterizedJsonSchema, Settings, SettingsSources, SettingsUi}; +use settings::{ParameterizedJsonSchema, Settings, SettingsKey, SettingsSources, SettingsUi}; use std::sync::Arc; use util::ResultExt as _; use util::schemars::replace_subschema; @@ -366,7 +366,8 @@ impl IconThemeSelection { } /// Settings for rendering text in UI and text buffers. -#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, SettingsUi)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)] +#[settings_key(None)] pub struct ThemeSettingsContent { /// The default font size for text in the UI. #[serde(default)] @@ -818,8 +819,6 @@ fn clamp_font_weight(weight: f32) -> FontWeight { } impl settings::Settings for ThemeSettings { - const KEY: Option<&'static str> = None; - type FileContent = ThemeSettingsContent; fn load(sources: SettingsSources, cx: &mut App) -> Result { diff --git a/crates/title_bar/src/title_bar_settings.rs b/crates/title_bar/src/title_bar_settings.rs index 0dc301f7eef6789bf1c0a2ad51cb63dff77d0337..38e529098bd3e97a11ecefac684c1734302f4261 100644 --- a/crates/title_bar/src/title_bar_settings.rs +++ b/crates/title_bar/src/title_bar_settings.rs @@ -1,7 +1,7 @@ use db::anyhow; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; #[derive(Copy, Clone, Deserialize, Debug)] pub struct TitleBarSettings { @@ -14,8 +14,11 @@ pub struct TitleBarSettings { pub show_menus: bool, } -#[derive(Copy, Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi)] -#[settings_ui(group = "Title Bar", path = "title_bar")] +#[derive( + Copy, Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey, +)] +#[settings_ui(group = "Title Bar")] +#[settings_key(key = "title_bar")] pub struct TitleBarSettingsContent { /// Whether to show the branch icon beside branch switcher in the title bar. /// @@ -48,8 +51,6 @@ pub struct TitleBarSettingsContent { } impl Settings for TitleBarSettings { - const KEY: Option<&'static str> = Some("title_bar"); - type FileContent = TitleBarSettingsContent; fn load(sources: SettingsSources, _: &mut gpui::App) -> anyhow::Result diff --git a/crates/vim/src/test/vim_test_context.rs b/crates/vim/src/test/vim_test_context.rs index ef9588acae181bad2b079d7c89458458bb851a64..4f1173a188b6d3113234c79f02a55d2c34cf12d9 100644 --- a/crates/vim/src/test/vim_test_context.rs +++ b/crates/vim/src/test/vim_test_context.rs @@ -68,7 +68,7 @@ impl VimTestContext { pub fn init_keybindings(enabled: bool, cx: &mut App) { SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings::(cx, |s| *s = Some(enabled)); + store.update_user_settings::(cx, |s| s.vim_mode = Some(enabled)); }); let default_key_bindings = settings::KeymapFile::load_asset_allow_partial_failure( "keymaps/default-macos.json", @@ -134,7 +134,7 @@ impl VimTestContext { pub fn enable_vim(&mut self) { self.cx.update(|_, cx| { SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings::(cx, |s| *s = Some(true)); + store.update_user_settings::(cx, |s| s.vim_mode = Some(true)); }); }) } @@ -142,7 +142,7 @@ impl VimTestContext { pub fn disable_vim(&mut self) { self.cx.update(|_, cx| { SettingsStore::update_global(cx, |store, cx| { - store.update_user_settings::(cx, |s| *s = Some(false)); + store.update_user_settings::(cx, |s| s.vim_mode = Some(false)); }); }) } @@ -151,7 +151,7 @@ impl VimTestContext { self.cx.update(|_, cx| { SettingsStore::update_global(cx, |store, cx| { store.update_user_settings::(cx, |s| { - *s = Some(true) + s.helix_mode = Some(true) }); }); }) diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 5a4ac425183e1843db7075c0f5054a16f82948f9..f4f8de2e7800732bb0a278bbc37928c58002ec7d 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -39,7 +39,9 @@ use object::Object; use schemars::JsonSchema; use serde::Deserialize; use serde_derive::Serialize; -use settings::{Settings, SettingsSources, SettingsStore, SettingsUi, update_settings_file}; +use settings::{ + Settings, SettingsKey, SettingsSources, SettingsStore, SettingsUi, update_settings_file, +}; use state::{Mode, Operator, RecordedSelection, SearchState, VimGlobals}; use std::{mem, ops::Range, sync::Arc}; use surrounds::SurroundsType; @@ -247,7 +249,7 @@ pub fn init(cx: &mut App) { let fs = workspace.app_state().fs.clone(); let currently_enabled = Vim::enabled(cx); update_settings_file::(fs, cx, move |setting, _| { - *setting = Some(!currently_enabled) + setting.vim_mode = Some(!currently_enabled) }) }); @@ -1785,7 +1787,8 @@ struct VimSettings { pub cursor_shape: CursorShapeSettings, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)] +#[settings_key(key = "vim")] struct VimSettingsContent { pub default_mode: Option, pub toggle_relative_line_numbers: Option, @@ -1824,8 +1827,6 @@ impl From for Mode { } impl Settings for VimSettings { - const KEY: Option<&'static str> = Some("vim"); - type FileContent = VimSettingsContent; fn load(sources: SettingsSources, _: &mut App) -> Result { diff --git a/crates/vim_mode_setting/Cargo.toml b/crates/vim_mode_setting/Cargo.toml index fbb7f30b4c2a03aca48ad5db26283c33aedb885b..61d265b958b10fac700bd78577ac5fefb19b7d09 100644 --- a/crates/vim_mode_setting/Cargo.toml +++ b/crates/vim_mode_setting/Cargo.toml @@ -14,5 +14,7 @@ path = "src/vim_mode_setting.rs" [dependencies] anyhow.workspace = true gpui.workspace = true +schemars.workspace = true +serde.workspace = true settings.workspace = true workspace-hack.workspace = true diff --git a/crates/vim_mode_setting/src/vim_mode_setting.rs b/crates/vim_mode_setting/src/vim_mode_setting.rs index 7fb39ef4f6f10370f1a0fb2cf83dcb3a88b80d81..660520a307dbef1e73174aa5449417d766c04235 100644 --- a/crates/vim_mode_setting/src/vim_mode_setting.rs +++ b/crates/vim_mode_setting/src/vim_mode_setting.rs @@ -6,7 +6,8 @@ use anyhow::Result; use gpui::App; -use settings::{Settings, SettingsSources, SettingsUi}; +use schemars::JsonSchema; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; /// Initializes the `vim_mode_setting` crate. pub fn init(cx: &mut App) { @@ -14,25 +15,40 @@ pub fn init(cx: &mut App) { HelixModeSetting::register(cx); } -/// Whether or not to enable Vim mode. -/// -/// Default: false -#[derive(SettingsUi)] pub struct VimModeSetting(pub bool); -impl Settings for VimModeSetting { - const KEY: Option<&'static str> = Some("vim_mode"); +#[derive( + Copy, + Clone, + PartialEq, + Eq, + Debug, + Default, + serde::Serialize, + serde::Deserialize, + SettingsUi, + SettingsKey, + JsonSchema, +)] +#[settings_key(None)] +pub struct VimModeSettingContent { + /// Whether or not to enable Vim mode. + /// + /// Default: false + pub vim_mode: Option, +} - type FileContent = Option; +impl Settings for VimModeSetting { + type FileContent = VimModeSettingContent; fn load(sources: SettingsSources, _: &mut App) -> Result { Ok(Self( sources .user - .or(sources.server) - .copied() - .flatten() - .unwrap_or(sources.default.ok_or_else(Self::missing_default)?), + .and_then(|mode| mode.vim_mode) + .or(sources.server.and_then(|mode| mode.vim_mode)) + .or(sources.default.vim_mode) + .ok_or_else(Self::missing_default)?, )) } @@ -41,25 +57,41 @@ impl Settings for VimModeSetting { } } -/// Whether or not to enable Helix mode. -/// -/// Default: false -#[derive(SettingsUi)] +#[derive(Debug)] pub struct HelixModeSetting(pub bool); -impl Settings for HelixModeSetting { - const KEY: Option<&'static str> = Some("helix_mode"); +#[derive( + Copy, + Clone, + PartialEq, + Eq, + Debug, + Default, + serde::Serialize, + serde::Deserialize, + SettingsUi, + SettingsKey, + JsonSchema, +)] +#[settings_key(None)] +pub struct HelixModeSettingContent { + /// Whether or not to enable Helix mode. + /// + /// Default: false + pub helix_mode: Option, +} - type FileContent = Option; +impl Settings for HelixModeSetting { + type FileContent = HelixModeSettingContent; fn load(sources: SettingsSources, _: &mut App) -> Result { Ok(Self( sources .user - .or(sources.server) - .copied() - .flatten() - .unwrap_or(sources.default.ok_or_else(Self::missing_default)?), + .and_then(|mode| mode.helix_mode) + .or(sources.server.and_then(|mode| mode.helix_mode)) + .or(sources.default.helix_mode) + .ok_or_else(Self::missing_default)?, )) } diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index f37be0f154f736b021b0fcf5f29cf26074e3299f..23fbec470c4d2e305bf7b51679bbe56f6dfeaa95 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -17,7 +17,7 @@ use gpui::{ use project::{Project, ProjectEntryId, ProjectPath}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsLocation, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsLocation, SettingsSources, SettingsUi}; use smallvec::SmallVec; use std::{ any::{Any, TypeId}, @@ -101,7 +101,8 @@ pub enum ActivateOnClose { LeftNeighbour, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)] +#[settings_key(key = "tabs")] pub struct ItemSettingsContent { /// Whether to show the Git file status on a tab item. /// @@ -130,7 +131,8 @@ pub struct ItemSettingsContent { show_close_button: Option, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)] +#[settings_key(key = "preview_tabs")] pub struct PreviewTabsSettingsContent { /// Whether to show opened editors as preview tabs. /// Preview tabs do not stay open, are reused until explicitly set to be kept open opened (via double-click or editing) and show file names in italic. @@ -148,8 +150,6 @@ pub struct PreviewTabsSettingsContent { } impl Settings for ItemSettings { - const KEY: Option<&'static str> = Some("tabs"); - type FileContent = ItemSettingsContent; fn load(sources: SettingsSources, _: &mut App) -> Result { @@ -187,8 +187,6 @@ impl Settings for ItemSettings { } impl Settings for PreviewTabsSettings { - const KEY: Option<&'static str> = Some("preview_tabs"); - type FileContent = PreviewTabsSettingsContent; fn load(sources: SettingsSources, _: &mut App) -> Result { diff --git a/crates/workspace/src/workspace_settings.rs b/crates/workspace/src/workspace_settings.rs index 1a7e548e4eda1f41e36c6ad0883cdd57be8828d7..8868f3190575ac4b861e0619732890f477d83b69 100644 --- a/crates/workspace/src/workspace_settings.rs +++ b/crates/workspace/src/workspace_settings.rs @@ -6,7 +6,7 @@ use collections::HashMap; use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; #[derive(Deserialize)] pub struct WorkspaceSettings { @@ -118,7 +118,8 @@ pub enum RestoreOnStartupBehavior { LastSession, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)] +#[settings_key(None)] pub struct WorkspaceSettingsContent { /// Active pane styling settings. pub active_pane_modifiers: Option, @@ -223,7 +224,8 @@ pub struct TabBarSettings { pub show_tab_bar_buttons: bool, } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)] +#[settings_key(key = "tab_bar")] pub struct TabBarSettingsContent { /// Whether or not to show the tab bar in the editor. /// @@ -282,8 +284,6 @@ pub struct CenteredLayoutSettings { } impl Settings for WorkspaceSettings { - const KEY: Option<&'static str> = None; - type FileContent = WorkspaceSettingsContent; fn load(sources: SettingsSources, _: &mut App) -> Result { @@ -373,8 +373,6 @@ impl Settings for WorkspaceSettings { } impl Settings for TabBarSettings { - const KEY: Option<&'static str> = Some("tab_bar"); - type FileContent = TabBarSettingsContent; fn load(sources: SettingsSources, _: &mut App) -> Result { diff --git a/crates/worktree/src/worktree_settings.rs b/crates/worktree/src/worktree_settings.rs index 6a8e2b5d89b0201b81f45817adb439fe85e24d91..41eb3ab6f6aa971d44009c1cbb00567a4f3448ea 100644 --- a/crates/worktree/src/worktree_settings.rs +++ b/crates/worktree/src/worktree_settings.rs @@ -4,7 +4,7 @@ use anyhow::Context as _; use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsSources, SettingsUi}; use util::paths::PathMatcher; #[derive(Clone, PartialEq, Eq)] @@ -31,7 +31,8 @@ impl WorktreeSettings { } } -#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi)] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey)] +#[settings_key(None)] pub struct WorktreeSettingsContent { /// Completely ignore files matching globs from `file_scan_exclusions`. Overrides /// `file_scan_inclusions`. @@ -65,8 +66,6 @@ pub struct WorktreeSettingsContent { } impl Settings for WorktreeSettings { - const KEY: Option<&'static str> = None; - type FileContent = WorktreeSettingsContent; fn load(sources: SettingsSources, _: &mut App) -> anyhow::Result { diff --git a/crates/zlog_settings/src/zlog_settings.rs b/crates/zlog_settings/src/zlog_settings.rs index 0cdc784489b47d89388edc9ed20aed6f3c2f9959..dd74fc574ff23dc78beca1feafeb34d874a68c22 100644 --- a/crates/zlog_settings/src/zlog_settings.rs +++ b/crates/zlog_settings/src/zlog_settings.rs @@ -3,7 +3,7 @@ use anyhow::Result; use gpui::App; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsStore, SettingsUi}; +use settings::{Settings, SettingsKey, SettingsStore, SettingsUi}; pub fn init(cx: &mut App) { ZlogSettings::register(cx); @@ -15,15 +15,25 @@ pub fn init(cx: &mut App) { .detach(); } -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi)] +#[derive( + Clone, + Debug, + Default, + Serialize, + Deserialize, + PartialEq, + Eq, + JsonSchema, + SettingsUi, + SettingsKey, +)] +#[settings_key(key = "log")] pub struct ZlogSettings { #[serde(default, flatten)] pub scopes: std::collections::HashMap, } impl Settings for ZlogSettings { - const KEY: Option<&'static str> = Some("log"); - type FileContent = Self; fn load(sources: settings::SettingsSources, _: &mut App) -> Result From c2fa9d79814c2124da0d4f8a0c3dfcf075505ac0 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 4 Sep 2025 15:25:52 -0400 Subject: [PATCH 012/109] docs: Add configuration example for `simple-completion-language-server` (#37566) This PR adds a configuration example for the `simple-completion-language-server`. We show the user how to re-enable the `feature_paths` option, as we're now disabling it by default (https://github.com/zed-industries/zed/pull/37565). Release Notes: - N/A --- docs/src/snippets.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docs/src/snippets.md b/docs/src/snippets.md index 3514d08340e8f1d04287ffde0150281149625476..b5d09c6c37b6f869c11dbbc524a8515fde7d4142 100644 --- a/docs/src/snippets.md +++ b/docs/src/snippets.md @@ -40,4 +40,20 @@ To create JSX snippets you have to use `javascript.json` snippets file, instead ## See also +The `feature_paths` option in `simple-completion-language-server` is disabled by default. + +If you want to enable it you can add the following to your `settings.json`: + +```json +{ + "lsp": { + "snippet-completion-server": { + "settings": { + "feature_paths": true + } + } + } +} +``` + For more configuration information, see the [`simple-completion-language-server` instructions](https://github.com/zed-industries/simple-completion-language-server/tree/main). From ccae033d8519cffa7f61a265f05837f2ecf599de Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 4 Sep 2025 22:34:23 +0300 Subject: [PATCH 013/109] Make fallback open picker more intuitive (#37564) Closes https://github.com/zed-industries/zed/issues/34991 Before, the picker did not allow to open the current directory that was just completed: image pressing `enter` here would open `assets`; pressing `tab` would append the `assets/` segment to the query. Only backspace, removing `/` would allow to open the current directory. After: image The first item is now a placeholder for opening the current directory with `enter`. Any time a fuzzy query is appended, the placeholder goes away; `tab` selects the entry below the placeholder. Release Notes: - Made fallback open picker more intuitive --------- Co-authored-by: Peter Tripp Co-authored-by: David Kleingeld --- crates/file_finder/src/open_path_prompt.rs | 66 +++++++++- .../file_finder/src/open_path_prompt_tests.rs | 118 ++++++++++++------ 2 files changed, 146 insertions(+), 38 deletions(-) diff --git a/crates/file_finder/src/open_path_prompt.rs b/crates/file_finder/src/open_path_prompt.rs index 4625872e46c690701b304351c6648a8e380f181a..51e8f5c437ab1aa86433f91022a01e8a2e09f664 100644 --- a/crates/file_finder/src/open_path_prompt.rs +++ b/crates/file_finder/src/open_path_prompt.rs @@ -1,7 +1,7 @@ use crate::file_finder_settings::FileFinderSettings; use file_icons::FileIcons; use futures::channel::oneshot; -use fuzzy::{StringMatch, StringMatchCandidate}; +use fuzzy::{CharBag, StringMatch, StringMatchCandidate}; use gpui::{HighlightStyle, StyledText, Task}; use picker::{Picker, PickerDelegate}; use project::{DirectoryItem, DirectoryLister}; @@ -125,6 +125,13 @@ impl OpenPathDelegate { DirectoryState::None { .. } => Vec::new(), } } + + fn current_dir(&self) -> &'static str { + match self.path_style { + PathStyle::Posix => "./", + PathStyle::Windows => ".\\", + } + } } #[derive(Debug)] @@ -233,6 +240,7 @@ impl PickerDelegate for OpenPathDelegate { cx: &mut Context>, ) -> Task<()> { let lister = &self.lister; + let input_is_empty = query.is_empty(); let (dir, suffix) = get_dir_and_suffix(query, self.path_style); let query = match &self.directory_state { @@ -263,6 +271,7 @@ impl PickerDelegate for OpenPathDelegate { let cancel_flag = self.cancel_flag.clone(); let parent_path_is_root = self.prompt_root == dir; + let current_dir = self.current_dir(); cx.spawn_in(window, async move |this, cx| { if let Some(query) = query { let paths = query.await; @@ -353,10 +362,38 @@ impl PickerDelegate for OpenPathDelegate { return; }; + let mut max_id = 0; if !suffix.starts_with('.') { - new_entries.retain(|entry| !entry.path.string.starts_with('.')); + new_entries.retain(|entry| { + max_id = max_id.max(entry.path.id); + !entry.path.string.starts_with('.') + }); } + if suffix.is_empty() { + let should_prepend_with_current_dir = this + .read_with(cx, |picker, _| { + !input_is_empty + && !matches!( + picker.delegate.directory_state, + DirectoryState::Create { .. } + ) + }) + .unwrap_or(false); + if should_prepend_with_current_dir { + new_entries.insert( + 0, + CandidateInfo { + path: StringMatchCandidate { + id: max_id + 1, + string: current_dir.to_string(), + char_bag: CharBag::from(current_dir), + }, + is_dir: true, + }, + ); + } + this.update(cx, |this, cx| { this.delegate.selected_index = 0; this.delegate.string_matches = new_entries @@ -485,6 +522,10 @@ impl PickerDelegate for OpenPathDelegate { _: &mut Context>, ) -> Option { let candidate = self.get_entry(self.selected_index)?; + if candidate.path.string.is_empty() || candidate.path.string == self.current_dir() { + return None; + } + let path_style = self.path_style; Some( maybe!({ @@ -629,12 +670,18 @@ impl PickerDelegate for OpenPathDelegate { DirectoryState::None { .. } => Vec::new(), }; + let is_current_dir_candidate = candidate.path.string == self.current_dir(); + let file_icon = maybe!({ if !settings.file_icons { return None; } let icon = if candidate.is_dir { - FileIcons::get_folder_icon(false, cx)? + if is_current_dir_candidate { + return Some(Icon::new(IconName::ReplyArrowRight).color(Color::Muted)); + } else { + FileIcons::get_folder_icon(false, cx)? + } } else { let path = path::Path::new(&candidate.path.string); FileIcons::get_icon(path, cx)? @@ -652,6 +699,8 @@ impl PickerDelegate for OpenPathDelegate { .child(HighlightedLabel::new( if parent_path == &self.prompt_root { format!("{}{}", self.prompt_root, candidate.path.string) + } else if is_current_dir_candidate { + "open this directory".to_string() } else { candidate.path.string }, @@ -747,6 +796,17 @@ impl PickerDelegate for OpenPathDelegate { fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { Arc::from(format!("[directory{MAIN_SEPARATOR_STR}]filename.ext")) } + + fn separators_after_indices(&self) -> Vec { + let Some(m) = self.string_matches.first() else { + return Vec::new(); + }; + if m.string == self.current_dir() { + vec![0] + } else { + Vec::new() + } + } } fn path_candidates( diff --git a/crates/file_finder/src/open_path_prompt_tests.rs b/crates/file_finder/src/open_path_prompt_tests.rs index a69ac6992dc280fd6537b16087302c2fbb9f8f4c..1f47c4e80adc598c505cf130519623fd6e578035 100644 --- a/crates/file_finder/src/open_path_prompt_tests.rs +++ b/crates/file_finder/src/open_path_prompt_tests.rs @@ -43,12 +43,17 @@ async fn test_open_path_prompt(cx: &mut TestAppContext) { insert_query(query, &picker, cx).await; assert_eq!(collect_match_candidates(&picker, cx), vec!["root"]); + #[cfg(not(windows))] + let expected_separator = "./"; + #[cfg(windows)] + let expected_separator = ".\\"; + // If the query ends with a slash, the picker should show the contents of the directory. let query = path!("/root/"); insert_query(query, &picker, cx).await; assert_eq!( collect_match_candidates(&picker, cx), - vec!["a1", "a2", "a3", "dir1", "dir2"] + vec![expected_separator, "a1", "a2", "a3", "dir1", "dir2"] ); // Show candidates for the query "a". @@ -72,7 +77,7 @@ async fn test_open_path_prompt(cx: &mut TestAppContext) { insert_query(query, &picker, cx).await; assert_eq!( collect_match_candidates(&picker, cx), - vec!["c", "d1", "d2", "d3", "dir3", "dir4"] + vec![expected_separator, "c", "d1", "d2", "d3", "dir3", "dir4"] ); // Show candidates for the query "d". @@ -116,71 +121,86 @@ async fn test_open_path_prompt_completion(cx: &mut TestAppContext) { // Confirm completion for the query "/root", since it's a directory, it should add a trailing slash. let query = path!("/root"); insert_query(query, &picker, cx).await; - assert_eq!(confirm_completion(query, 0, &picker, cx), path!("/root/")); + assert_eq!( + confirm_completion(query, 0, &picker, cx).unwrap(), + path!("/root/") + ); // Confirm completion for the query "/root/", selecting the first candidate "a", since it's a file, it should not add a trailing slash. let query = path!("/root/"); insert_query(query, &picker, cx).await; - assert_eq!(confirm_completion(query, 0, &picker, cx), path!("/root/a")); + assert_eq!( + confirm_completion(query, 0, &picker, cx), + None, + "First entry is `./` and when we confirm completion, it is tabbed below" + ); + assert_eq!( + confirm_completion(query, 1, &picker, cx).unwrap(), + path!("/root/a"), + "Second entry is the first entry of a directory that we want to be completed" + ); // Confirm completion for the query "/root/", selecting the second candidate "dir1", since it's a directory, it should add a trailing slash. let query = path!("/root/"); insert_query(query, &picker, cx).await; assert_eq!( - confirm_completion(query, 1, &picker, cx), + confirm_completion(query, 2, &picker, cx).unwrap(), path!("/root/dir1/") ); let query = path!("/root/a"); insert_query(query, &picker, cx).await; - assert_eq!(confirm_completion(query, 0, &picker, cx), path!("/root/a")); + assert_eq!( + confirm_completion(query, 0, &picker, cx).unwrap(), + path!("/root/a") + ); let query = path!("/root/d"); insert_query(query, &picker, cx).await; assert_eq!( - confirm_completion(query, 1, &picker, cx), + confirm_completion(query, 1, &picker, cx).unwrap(), path!("/root/dir2/") ); let query = path!("/root/dir2"); insert_query(query, &picker, cx).await; assert_eq!( - confirm_completion(query, 0, &picker, cx), + confirm_completion(query, 0, &picker, cx).unwrap(), path!("/root/dir2/") ); let query = path!("/root/dir2/"); insert_query(query, &picker, cx).await; assert_eq!( - confirm_completion(query, 0, &picker, cx), + confirm_completion(query, 1, &picker, cx).unwrap(), path!("/root/dir2/c") ); let query = path!("/root/dir2/"); insert_query(query, &picker, cx).await; assert_eq!( - confirm_completion(query, 2, &picker, cx), + confirm_completion(query, 3, &picker, cx).unwrap(), path!("/root/dir2/dir3/") ); let query = path!("/root/dir2/d"); insert_query(query, &picker, cx).await; assert_eq!( - confirm_completion(query, 0, &picker, cx), + confirm_completion(query, 0, &picker, cx).unwrap(), path!("/root/dir2/d") ); let query = path!("/root/dir2/d"); insert_query(query, &picker, cx).await; assert_eq!( - confirm_completion(query, 1, &picker, cx), + confirm_completion(query, 1, &picker, cx).unwrap(), path!("/root/dir2/dir3/") ); let query = path!("/root/dir2/di"); insert_query(query, &picker, cx).await; assert_eq!( - confirm_completion(query, 1, &picker, cx), + confirm_completion(query, 1, &picker, cx).unwrap(), path!("/root/dir2/dir4/") ); } @@ -211,42 +231,63 @@ async fn test_open_path_prompt_on_windows(cx: &mut TestAppContext) { insert_query(query, &picker, cx).await; assert_eq!( collect_match_candidates(&picker, cx), - vec!["a", "dir1", "dir2"] + vec![".\\", "a", "dir1", "dir2"] + ); + assert_eq!( + confirm_completion(query, 0, &picker, cx), + None, + "First entry is `.\\` and when we confirm completion, it is tabbed below" + ); + assert_eq!( + confirm_completion(query, 1, &picker, cx).unwrap(), + "C:/root/a", + "Second entry is the first entry of a directory that we want to be completed" ); - assert_eq!(confirm_completion(query, 0, &picker, cx), "C:/root/a"); let query = "C:\\root/"; insert_query(query, &picker, cx).await; assert_eq!( collect_match_candidates(&picker, cx), - vec!["a", "dir1", "dir2"] + vec![".\\", "a", "dir1", "dir2"] + ); + assert_eq!( + confirm_completion(query, 1, &picker, cx).unwrap(), + "C:\\root/a" ); - assert_eq!(confirm_completion(query, 0, &picker, cx), "C:\\root/a"); let query = "C:\\root\\"; insert_query(query, &picker, cx).await; assert_eq!( collect_match_candidates(&picker, cx), - vec!["a", "dir1", "dir2"] + vec![".\\", "a", "dir1", "dir2"] + ); + assert_eq!( + confirm_completion(query, 1, &picker, cx).unwrap(), + "C:\\root\\a" ); - assert_eq!(confirm_completion(query, 0, &picker, cx), "C:\\root\\a"); // Confirm completion for the query "C:/root/d", selecting the second candidate "dir2", since it's a directory, it should add a trailing slash. let query = "C:/root/d"; insert_query(query, &picker, cx).await; assert_eq!(collect_match_candidates(&picker, cx), vec!["dir1", "dir2"]); - assert_eq!(confirm_completion(query, 1, &picker, cx), "C:/root/dir2\\"); + assert_eq!( + confirm_completion(query, 1, &picker, cx).unwrap(), + "C:/root/dir2\\" + ); let query = "C:\\root/d"; insert_query(query, &picker, cx).await; assert_eq!(collect_match_candidates(&picker, cx), vec!["dir1", "dir2"]); - assert_eq!(confirm_completion(query, 0, &picker, cx), "C:\\root/dir1\\"); + assert_eq!( + confirm_completion(query, 0, &picker, cx).unwrap(), + "C:\\root/dir1\\" + ); let query = "C:\\root\\d"; insert_query(query, &picker, cx).await; assert_eq!(collect_match_candidates(&picker, cx), vec!["dir1", "dir2"]); assert_eq!( - confirm_completion(query, 0, &picker, cx), + confirm_completion(query, 0, &picker, cx).unwrap(), "C:\\root\\dir1\\" ); } @@ -276,20 +317,29 @@ async fn test_open_path_prompt_on_windows_with_remote(cx: &mut TestAppContext) { insert_query(query, &picker, cx).await; assert_eq!( collect_match_candidates(&picker, cx), - vec!["a", "dir1", "dir2"] + vec!["./", "a", "dir1", "dir2"] + ); + assert_eq!( + confirm_completion(query, 1, &picker, cx).unwrap(), + "/root/a" ); - assert_eq!(confirm_completion(query, 0, &picker, cx), "/root/a"); // Confirm completion for the query "/root/d", selecting the second candidate "dir2", since it's a directory, it should add a trailing slash. let query = "/root/d"; insert_query(query, &picker, cx).await; assert_eq!(collect_match_candidates(&picker, cx), vec!["dir1", "dir2"]); - assert_eq!(confirm_completion(query, 1, &picker, cx), "/root/dir2/"); + assert_eq!( + confirm_completion(query, 1, &picker, cx).unwrap(), + "/root/dir2/" + ); let query = "/root/d"; insert_query(query, &picker, cx).await; assert_eq!(collect_match_candidates(&picker, cx), vec!["dir1", "dir2"]); - assert_eq!(confirm_completion(query, 0, &picker, cx), "/root/dir1/"); + assert_eq!( + confirm_completion(query, 0, &picker, cx).unwrap(), + "/root/dir1/" + ); } #[gpui::test] @@ -396,15 +446,13 @@ fn confirm_completion( select: usize, picker: &Entity>, cx: &mut VisualTestContext, -) -> String { - picker - .update_in(cx, |f, window, cx| { - if f.delegate.selected_index() != select { - f.delegate.set_selected_index(select, window, cx); - } - f.delegate.confirm_completion(query.to_string(), window, cx) - }) - .unwrap() +) -> Option { + picker.update_in(cx, |f, window, cx| { + if f.delegate.selected_index() != select { + f.delegate.set_selected_index(select, window, cx); + } + f.delegate.confirm_completion(query.to_string(), window, cx) + }) } fn collect_match_candidates( From 4c32d5bf138171a52b00dc5f2be233c718c800ae Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 4 Sep 2025 15:35:48 -0400 Subject: [PATCH 014/109] snippets: Disable `feature_paths` by default (#37565) This PR updates the default configuration of the `snippets` extension to disable suggesting paths (`feature_paths`). If users want to enable it, it can be done via the settings: ```json { "lsp": { "snippet-completion-server": { "settings": { "feature_paths": true } } } } ``` Release Notes: - N/A --- extensions/snippets/src/snippets.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/extensions/snippets/src/snippets.rs b/extensions/snippets/src/snippets.rs index 05e1ebca38ddfa576795e6040ccd2b3dde20cc3e..1efe4c234002b5c8b3d26b9bdb0b30095e212ea6 100644 --- a/extensions/snippets/src/snippets.rs +++ b/extensions/snippets/src/snippets.rs @@ -120,7 +120,9 @@ impl zed::Extension for SnippetExtension { "snippets_first": true, "feature_words": false, "feature_snippets": true, - "feature_paths": true + // We disable `feature_paths` by default, because it's bad UX to assume that any `/` that is typed + // is the start of a path. + "feature_paths": false }) }); Ok(Some(settings)) From 1b865a60f854eb7dbc0aad98718d1785adceb8b6 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 4 Sep 2025 16:08:49 -0400 Subject: [PATCH 015/109] snippets: Bump to v0.0.6 (#37567) This PR bumps the snippets extension to v0.0.6. Changes: - https://github.com/zed-industries/zed/pull/37565 Release Notes: - N/A --- Cargo.lock | 2 +- extensions/snippets/Cargo.toml | 2 +- extensions/snippets/extension.toml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a99c59a1890080ac220b669b26864d859d2ad377..c5e6c8588137b87e00b15e0655a53cdefc518d4f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -20642,7 +20642,7 @@ dependencies = [ [[package]] name = "zed_snippets" -version = "0.0.5" +version = "0.0.6" dependencies = [ "serde_json", "zed_extension_api 0.1.0", diff --git a/extensions/snippets/Cargo.toml b/extensions/snippets/Cargo.toml index 80a3d4f31ebd628f03b077c727527b5aa0057ebf..ab5ac7244a3acbe25246588f4fe4ad1a35f1964f 100644 --- a/extensions/snippets/Cargo.toml +++ b/extensions/snippets/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_snippets" -version = "0.0.5" +version = "0.0.6" edition.workspace = true publish.workspace = true license = "Apache-2.0" diff --git a/extensions/snippets/extension.toml b/extensions/snippets/extension.toml index c2b4178a614220872bca37e4c2a12f4b16bba82f..01dc587d77af8a9ca074b49a247ef8f6cfffb516 100644 --- a/extensions/snippets/extension.toml +++ b/extensions/snippets/extension.toml @@ -1,9 +1,9 @@ id = "snippets" name = "Snippets" description = "Support for language-agnostic snippets, provided by simple-completion-language-server" -version = "0.0.5" +version = "0.0.6" schema_version = 1 -authors = [] +authors = ["Zed Industries "] repository = "https://github.com/zed-industries/zed" [language_servers.snippet-completion-server] From e982cb824a94abc32392861c0753373a8df1684e Mon Sep 17 00:00:00 2001 From: morgankrey Date: Thu, 4 Sep 2025 15:57:00 -0500 Subject: [PATCH 016/109] docs: Claude Authentication (#37573) Release Notes: - N/A --- docs/src/ai/agent-panel.md | 2 +- docs/src/ai/external-agents.md | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/docs/src/ai/agent-panel.md b/docs/src/ai/agent-panel.md index ce91ca3401d07aba552b1ca007b3809e301071de..b6b748e2f58493cd62abbd3c6e7dc443182e992f 100644 --- a/docs/src/ai/agent-panel.md +++ b/docs/src/ai/agent-panel.md @@ -23,7 +23,7 @@ From this point on, you can interact with the many supported features outlined b > Note that for external agents, like [Gemini CLI](./external-agents.md#gemini-cli) or [Claude Code](./external-agents.md#claude-code), some of the features outlined below are _not_ currently supported—for example, _restoring threads from history_, _checkpoints_, _token usage display_, _model selection_, and others. All of them should hopefully be supported in the future. -### Creating New Threads +### Creating New Threads {#new-thread} By default, the Agent Panel uses Zed's first-party agent. diff --git a/docs/src/ai/external-agents.md b/docs/src/ai/external-agents.md index 963e41d42f53ad68ef70de3466913b71b11bd38e..e05849ef1aa54c8ea2c3fff09c8008f58b8a01b7 100644 --- a/docs/src/ai/external-agents.md +++ b/docs/src/ai/external-agents.md @@ -85,6 +85,12 @@ If you'd like to bind this to a keyboard shortcut, you can do so by editing your ] ``` +### Authentication + +As of version `0.202.7` (stable) and `0.203.2` (preview), authentication to Zed's Claude Code installation is decoupled entirely from Zed's agent. That is to say, an Anthropic API key added via the [Zed Agent's settings](./llm-providers.md#anthropic) will _not_ be utilized by Claude Code for authentication and billing. + +To ensure you're using your billing method of choice, [open a new Claude Code thread](./agent-panel.md#new-thread)`. Then, run `/login`, and authenticate either via API key, or via `Log in with Claude Code` to use a Claude Pro/Max subscription. + #### Installation The first time you create a Claude Code thread, Zed will install [@zed-industries/claude-code-acp](https://github.com/zed-industries/claude-code-acp). This installation is only available to Zed and is kept up to date as you use the agent. From 3c0183fa5e8fda26d300974a19ae229e251ed4fa Mon Sep 17 00:00:00 2001 From: morgankrey Date: Thu, 4 Sep 2025 16:14:57 -0500 Subject: [PATCH 017/109] Extraneous backtick (#37576) Release Notes: - N/A --- docs/src/ai/external-agents.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/ai/external-agents.md b/docs/src/ai/external-agents.md index e05849ef1aa54c8ea2c3fff09c8008f58b8a01b7..bc7768c6081ad7a32eb1fd780750a48c4b9200f0 100644 --- a/docs/src/ai/external-agents.md +++ b/docs/src/ai/external-agents.md @@ -89,7 +89,7 @@ If you'd like to bind this to a keyboard shortcut, you can do so by editing your As of version `0.202.7` (stable) and `0.203.2` (preview), authentication to Zed's Claude Code installation is decoupled entirely from Zed's agent. That is to say, an Anthropic API key added via the [Zed Agent's settings](./llm-providers.md#anthropic) will _not_ be utilized by Claude Code for authentication and billing. -To ensure you're using your billing method of choice, [open a new Claude Code thread](./agent-panel.md#new-thread)`. Then, run `/login`, and authenticate either via API key, or via `Log in with Claude Code` to use a Claude Pro/Max subscription. +To ensure you're using your billing method of choice, [open a new Claude Code thread](./agent-panel.md#new-thread). Then, run `/login`, and authenticate either via API key, or via `Log in with Claude Code` to use a Claude Pro/Max subscription. #### Installation From c7902478c18a42138c6129aa3fc50aa0165337c8 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Thu, 4 Sep 2025 18:16:25 -0400 Subject: [PATCH 018/109] acp: Pass project environment to external agent servers (#37568) Closes #37469 Release Notes: - agent: The project shell environment is now passed to external agent processes. Co-authored-by: Richard Feldman Co-authored-by: Nia Espera --- crates/agent_servers/src/claude.rs | 9 +++++++++ crates/agent_servers/src/gemini.rs | 14 ++++++++++---- 2 files changed, 19 insertions(+), 4 deletions(-) diff --git a/crates/agent_servers/src/claude.rs b/crates/agent_servers/src/claude.rs index 48d3e33775d98dfe89801813c6926ff40f48ed87..15352ce216f52dfd7a9f372a43c0ec401eb540af 100644 --- a/crates/agent_servers/src/claude.rs +++ b/crates/agent_servers/src/claude.rs @@ -80,8 +80,15 @@ impl AgentServer for ClaudeCode { let settings = cx.read_global(|settings: &SettingsStore, _| { settings.get::(None).claude.clone() }); + let project = delegate.project().clone(); cx.spawn(async move |cx| { + let mut project_env = project + .update(cx, |project, cx| { + project.directory_environment(root_dir.as_path().into(), cx) + })? + .await + .unwrap_or_default(); let mut command = if let Some(settings) = settings { settings.command } else { @@ -97,6 +104,8 @@ impl AgentServer for ClaudeCode { })? .await? }; + project_env.extend(command.env.take().unwrap_or_default()); + command.env = Some(project_env); command .env diff --git a/crates/agent_servers/src/gemini.rs b/crates/agent_servers/src/gemini.rs index b58ad703cda496c4413f30decbfa5e0b1d1b0735..7e40d85767b7ed407a22ece55580bee7317a5e6d 100644 --- a/crates/agent_servers/src/gemini.rs +++ b/crates/agent_servers/src/gemini.rs @@ -41,12 +41,19 @@ impl AgentServer for Gemini { let settings = cx.read_global(|settings: &SettingsStore, _| { settings.get::(None).gemini.clone() }); + let project = delegate.project().clone(); cx.spawn(async move |cx| { let ignore_system_version = settings .as_ref() .and_then(|settings| settings.ignore_system_version) .unwrap_or(true); + let mut project_env = project + .update(cx, |project, cx| { + project.directory_environment(root_dir.as_path().into(), cx) + })? + .await + .unwrap_or_default(); let mut command = if let Some(settings) = settings && let Some(command) = settings.custom_command() { @@ -67,13 +74,12 @@ impl AgentServer for Gemini { if !command.args.contains(&ACP_ARG.into()) { command.args.push(ACP_ARG.into()); } - if let Some(api_key) = cx.update(GoogleLanguageModelProvider::api_key)?.await.ok() { - command - .env - .get_or_insert_default() + project_env .insert("GEMINI_API_KEY".to_owned(), api_key.key); } + project_env.extend(command.env.take().unwrap_or_default()); + command.env = Some(project_env); let root_dir_exists = fs.is_dir(&root_dir).await; anyhow::ensure!( From 0cb8a8983cef1f3e015fa0f2fc37e8325f3d201d Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Thu, 4 Sep 2025 18:30:48 -0400 Subject: [PATCH 019/109] settings ui: Improve setting proc macro and add scroll to UI (#37581) This PR improves the settings_ui proc macro by taking into account more serde attributes 1. rename_all 2. rename 3. flatten We also pass field documentation to the UI layer now too. This allows ui elements to have more information like the switch field description. We got the scrollbar working and started getting language settings to show up. Release Notes: - N/A --------- Co-authored-by: Ben Kunkle --- crates/editor/src/editor_settings.rs | 1 + crates/language/src/language_settings.rs | 38 ++-- crates/settings/src/settings_ui_core.rs | 9 + crates/settings_ui/src/settings_ui.rs | 123 ++++++++----- .../src/settings_ui_macros.rs | 170 +++++++++++++++--- 5 files changed, 258 insertions(+), 83 deletions(-) diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index d74244131e6635c7b9eda6ace0723ced96b0e041..7f4d024e57c4831aa4c512e6dcb3a9ab35d4f610 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -748,6 +748,7 @@ pub struct ScrollbarAxesContent { #[derive( Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq, Eq, SettingsUi, )] +#[settings_ui(group = "Gutter")] pub struct GutterContent { /// Whether to show line numbers in the gutter. /// diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 3443ccf592a4138edb61959f0dd82bdb8cc8d418..cb519e32eca964cee4a742085714b233a424dd3c 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -208,7 +208,9 @@ impl LanguageSettings { } /// The provider that supplies edit predictions. -#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize, JsonSchema)] +#[derive( + Copy, Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize, JsonSchema, SettingsUi, +)] #[serde(rename_all = "snake_case")] pub enum EditPredictionProvider { None, @@ -231,13 +233,14 @@ impl EditPredictionProvider { /// The settings for edit predictions, such as [GitHub Copilot](https://github.com/features/copilot) /// or [Supermaven](https://supermaven.com). -#[derive(Clone, Debug, Default)] +#[derive(Clone, Debug, Default, SettingsUi)] pub struct EditPredictionSettings { /// The provider that supplies edit predictions. pub provider: EditPredictionProvider, /// A list of globs representing files that edit predictions should be disabled for. /// This list adds to a pre-existing, sensible default set of globs. /// Any additional ones you add are combined with them. + #[settings_ui(skip)] pub disabled_globs: Vec, /// Configures how edit predictions are displayed in the buffer. pub mode: EditPredictionsMode, @@ -269,7 +272,9 @@ pub struct DisabledGlob { } /// The mode in which edit predictions should be displayed. -#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize, JsonSchema)] +#[derive( + Copy, Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize, JsonSchema, SettingsUi, +)] #[serde(rename_all = "snake_case")] pub enum EditPredictionsMode { /// If provider supports it, display inline when holding modifier key (e.g., alt). @@ -282,13 +287,15 @@ pub enum EditPredictionsMode { Eager, } -#[derive(Clone, Debug, Default)] +#[derive(Clone, Debug, Default, SettingsUi)] pub struct CopilotSettings { /// HTTP/HTTPS proxy to use for Copilot. + #[settings_ui(skip)] pub proxy: Option, /// Disable certificate verification for proxy (not recommended). pub proxy_no_verify: Option, /// Enterprise URI for Copilot. + #[settings_ui(skip)] pub enterprise_uri: Option, } @@ -297,6 +304,7 @@ pub struct CopilotSettings { Debug, Clone, Default, PartialEq, Serialize, Deserialize, JsonSchema, SettingsUi, SettingsKey, )] #[settings_key(None)] +#[settings_ui(group = "Default Language Settings")] pub struct AllLanguageSettingsContent { /// The settings for enabling/disabling features. #[serde(default)] @@ -309,10 +317,12 @@ pub struct AllLanguageSettingsContent { pub defaults: LanguageSettingsContent, /// The settings for individual languages. #[serde(default)] + #[settings_ui(skip)] pub languages: LanguageToSettingsMap, /// Settings for associating file extensions and filenames /// with languages. #[serde(default)] + #[settings_ui(skip)] pub file_types: HashMap, Vec>, } @@ -345,7 +355,7 @@ inventory::submit! { } /// Controls how completions are processed for this language. -#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] +#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi)] #[serde(rename_all = "snake_case")] pub struct CompletionSettings { /// Controls how words are completed. @@ -420,7 +430,7 @@ fn default_3() -> usize { } /// The settings for a particular language. -#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize, JsonSchema, SettingsUi)] pub struct LanguageSettingsContent { /// How many columns a tab should occupy. /// @@ -617,12 +627,13 @@ pub enum RewrapBehavior { } /// The contents of the edit prediction settings. -#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq, SettingsUi)] pub struct EditPredictionSettingsContent { /// A list of globs representing files that edit predictions should be disabled for. /// This list adds to a pre-existing, sensible default set of globs. /// Any additional ones you add are combined with them. #[serde(default)] + #[settings_ui(skip)] pub disabled_globs: Option>, /// The mode used to display edit predictions in the buffer. /// Provider support required. @@ -637,12 +648,13 @@ pub struct EditPredictionSettingsContent { pub enabled_in_text_threads: bool, } -#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, PartialEq, SettingsUi)] pub struct CopilotSettingsContent { /// HTTP/HTTPS proxy to use for Copilot. /// /// Default: none #[serde(default)] + #[settings_ui(skip)] pub proxy: Option, /// Disable certificate verification for the proxy (not recommended). /// @@ -653,19 +665,21 @@ pub struct CopilotSettingsContent { /// /// Default: none #[serde(default)] + #[settings_ui(skip)] pub enterprise_uri: Option, } /// The settings for enabling/disabling features. -#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize, JsonSchema, SettingsUi)] #[serde(rename_all = "snake_case")] +#[settings_ui(group = "Features")] pub struct FeaturesContent { /// Determines which edit prediction provider to use. pub edit_prediction_provider: Option, } /// Controls the soft-wrapping behavior in the editor. -#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] +#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi)] #[serde(rename_all = "snake_case")] pub enum SoftWrap { /// Prefer a single line generally, unless an overly long line is encountered. @@ -934,7 +948,9 @@ pub enum Formatter { } /// The settings for indent guides. -#[derive(Default, Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] +#[derive( + Default, Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema, SettingsUi, +)] pub struct IndentGuideSettings { /// Whether to display indent guides in the editor. /// diff --git a/crates/settings/src/settings_ui_core.rs b/crates/settings/src/settings_ui_core.rs index 9086d3c7454465e8abcaf2d30d01a4f928e4ddef..896a8bc038bdd8e495cdb6161212f9c722d54f14 100644 --- a/crates/settings/src/settings_ui_core.rs +++ b/crates/settings/src/settings_ui_core.rs @@ -19,6 +19,7 @@ pub trait SettingsUi { path: None, title: "None entry", item: SettingsUiItem::None, + documentation: None, } } } @@ -29,6 +30,8 @@ pub struct SettingsUiEntry { pub path: Option<&'static str>, /// What is displayed for the text for this entry pub title: &'static str, + /// documentation for this entry. Constructed from the documentation comment above the struct or field + pub documentation: Option<&'static str>, pub item: SettingsUiItem, } @@ -54,6 +57,7 @@ pub enum SettingsUiItemSingle { pub struct SettingsValue { pub title: &'static str, + pub documentation: Option<&'static str>, pub path: SmallVec<[&'static str; 1]>, pub value: Option, pub default_value: T, @@ -128,7 +132,9 @@ pub enum NumType { U64 = 0, U32 = 1, F32 = 2, + USIZE = 3, } + pub static NUM_TYPE_NAMES: std::sync::LazyLock<[&'static str; NumType::COUNT]> = std::sync::LazyLock::new(|| NumType::ALL.map(NumType::type_name)); pub static NUM_TYPE_IDS: std::sync::LazyLock<[TypeId; NumType::COUNT]> = @@ -143,6 +149,7 @@ impl NumType { NumType::U64 => TypeId::of::(), NumType::U32 => TypeId::of::(), NumType::F32 => TypeId::of::(), + NumType::USIZE => TypeId::of::(), } } @@ -151,6 +158,7 @@ impl NumType { NumType::U64 => std::any::type_name::(), NumType::U32 => std::any::type_name::(), NumType::F32 => std::any::type_name::(), + NumType::USIZE => std::any::type_name::(), } } } @@ -175,3 +183,4 @@ numeric_stepper_for_num_type!(u64, U64); numeric_stepper_for_num_type!(u32, U32); // todo(settings_ui) is there a better ui for f32? numeric_stepper_for_num_type!(f32, F32); +numeric_stepper_for_num_type!(usize, USIZE); diff --git a/crates/settings_ui/src/settings_ui.rs b/crates/settings_ui/src/settings_ui.rs index f316a318785c7f56d465c2d39e6b6ea9bbbd1bfa..d736f0e174ba13d368794d8f5b623a44845d561b 100644 --- a/crates/settings_ui/src/settings_ui.rs +++ b/crates/settings_ui/src/settings_ui.rs @@ -1,14 +1,13 @@ mod appearance_settings_controls; use std::any::TypeId; -use std::collections::VecDeque; use std::ops::{Not, Range}; use anyhow::Context as _; use command_palette_hooks::CommandPaletteFilter; use editor::EditorSettingsControls; use feature_flags::{FeatureFlag, FeatureFlagViewExt}; -use gpui::{App, Entity, EventEmitter, FocusHandle, Focusable, ReadGlobal, actions}; +use gpui::{App, Entity, EventEmitter, FocusHandle, Focusable, ReadGlobal, ScrollHandle, actions}; use settings::{ NumType, SettingsStore, SettingsUiEntry, SettingsUiItem, SettingsUiItemDynamic, SettingsUiItemGroup, SettingsUiItemSingle, SettingsValue, @@ -138,6 +137,7 @@ impl Item for SettingsPage { struct UiEntry { title: &'static str, path: Option<&'static str>, + documentation: Option<&'static str>, _depth: usize, // a // b < a descendant range < a total descendant range @@ -195,6 +195,7 @@ fn build_tree_item( tree.push(UiEntry { title: entry.title, path: entry.path, + documentation: entry.documentation, _depth: depth, descendant_range: index + 1..index + 1, total_descendant_range: index + 1..index + 1, @@ -354,32 +355,29 @@ fn render_content( tree: &SettingsUiTree, window: &mut Window, cx: &mut Context, -) -> impl IntoElement { - let Some(active_entry) = tree.entries.get(tree.active_entry_index) else { - return div() - .size_full() - .child(Label::new(SharedString::new_static("No settings found")).color(Color::Error)); - }; - let mut content = v_flex().size_full().gap_4().overflow_hidden(); +) -> Div { + let content = v_flex().size_full().gap_4(); let mut path = smallvec::smallvec![]; - if let Some(active_entry_path) = active_entry.path { - path.push(active_entry_path); - } - let mut entry_index_queue = VecDeque::new(); - - if let Some(child_index) = active_entry.first_descendant_index() { - entry_index_queue.push_back(child_index); - let mut index = child_index; - while let Some(next_sibling_index) = tree.entries[index].next_sibling { - entry_index_queue.push_back(next_sibling_index); - index = next_sibling_index; - } - }; - while let Some(index) = entry_index_queue.pop_front() { + fn render_recursive( + tree: &SettingsUiTree, + index: usize, + path: &mut SmallVec<[&'static str; 1]>, + mut element: Div, + window: &mut Window, + cx: &mut App, + ) -> Div { + let Some(child) = tree.entries.get(index) else { + return element.child( + Label::new(SharedString::new_static("No settings found")).color(Color::Error), + ); + }; + + element = + element.child(Label::new(SharedString::new_static(child.title)).size(LabelSize::Large)); + // todo(settings_ui): subgroups? - let child = &tree.entries[index]; let mut pushed_path = false; if let Some(child_path) = child.path { path.push(child_path); @@ -388,37 +386,56 @@ fn render_content( let settings_value = settings_value_from_settings_and_path( path.clone(), child.title, + child.documentation, // PERF: how to structure this better? There feels like there's a way to avoid the clone // and every value lookup SettingsStore::global(cx).raw_user_settings(), SettingsStore::global(cx).raw_default_settings(), ); if let Some(select_descendant) = child.select_descendant { - let selected_descendant = select_descendant(settings_value.read(), cx); - if let Some(descendant_index) = - child.nth_descendant_index(&tree.entries, selected_descendant) - { - entry_index_queue.push_front(descendant_index); + let selected_descendant = child + .nth_descendant_index(&tree.entries, select_descendant(settings_value.read(), cx)); + if let Some(descendant_index) = selected_descendant { + element = render_recursive(&tree, descendant_index, path, element, window, cx); } } + if let Some(child_render) = child.render.as_ref() { + element = element.child(div().child(render_item_single( + settings_value, + child_render, + window, + cx, + ))); + } else if let Some(child_index) = child.first_descendant_index() { + let mut index = Some(child_index); + while let Some(sub_child_index) = index { + element = render_recursive(tree, sub_child_index, path, element, window, cx); + index = tree.entries[sub_child_index].next_sibling; + } + } else { + element = + element.child(div().child(Label::new("// skipped (for now)").color(Color::Muted))) + } + if pushed_path { path.pop(); } - let Some(child_render) = child.render.as_ref() else { - continue; - }; - content = content.child( - div() - .child(Label::new(SharedString::new_static(child.title)).size(LabelSize::Large)) - .child(render_item_single(settings_value, child_render, window, cx)), - ); + return element; } - return content; + return render_recursive( + tree, + tree.active_entry_index, + &mut path, + content, + window, + cx, + ); } impl Render for SettingsPage { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let scroll_handle = window.use_state(cx, |_, _| ScrollHandle::new()); div() .grid() .grid_cols(16) @@ -427,15 +444,19 @@ impl Render for SettingsPage { .size_full() .child( div() + .id("settings-ui-nav") .col_span(2) .h_full() .child(render_nav(&self.settings_tree, window, cx)), ) - .child(div().col_span(4).h_full().child(render_content( - &self.settings_tree, - window, - cx, - ))) + .child( + div().col_span(6).h_full().child( + render_content(&self.settings_tree, window, cx) + .id("settings-ui-content") + .track_scroll(scroll_handle.read(cx)) + .overflow_y_scroll(), + ), + ) } } @@ -530,6 +551,7 @@ fn downcast_any_item( let deserialized_setting_value = SettingsValue { title: settings_value.title, path: settings_value.path, + documentation: settings_value.documentation, value, default_value, }; @@ -586,6 +608,17 @@ fn render_any_numeric_stepper( window, cx, ), + NumType::USIZE => render_numeric_stepper::( + downcast_any_item(settings_value), + usize::saturating_sub, + usize::saturating_add, + |n| { + serde_json::Number::try_from(n) + .context("Failed to convert usize to serde_json::Number") + }, + window, + cx, + ), } } @@ -640,7 +673,7 @@ fn render_switch_field( SwitchField::new( id, SharedString::new_static(value.title), - None, + value.documentation.map(SharedString::new_static), match value.read() { true => ToggleState::Selected, false => ToggleState::Unselected, @@ -731,6 +764,7 @@ fn render_toggle_button_group( fn settings_value_from_settings_and_path( path: SmallVec<[&'static str; 1]>, title: &'static str, + documentation: Option<&'static str>, user_settings: &serde_json::Value, default_settings: &serde_json::Value, ) -> SettingsValue { @@ -743,6 +777,7 @@ fn settings_value_from_settings_and_path( let settings_value = SettingsValue { default_value, value, + documentation, path: path.clone(), // todo(settings_ui) is title required inside SettingsValue? title, diff --git a/crates/settings_ui_macros/src/settings_ui_macros.rs b/crates/settings_ui_macros/src/settings_ui_macros.rs index 1895083508a6a606f4dd9889529719aa12ea0b10..076f9c0f04e2963e9f4732a1fc7177f9ab85c723 100644 --- a/crates/settings_ui_macros/src/settings_ui_macros.rs +++ b/crates/settings_ui_macros/src/settings_ui_macros.rs @@ -1,3 +1,5 @@ +use std::ops::Not; + use heck::{ToSnakeCase as _, ToTitleCase as _}; use proc_macro2::TokenStream; use quote::{ToTokens, quote}; @@ -63,12 +65,19 @@ pub fn derive_settings_ui(input: proc_macro::TokenStream) -> proc_macro::TokenSt } } + let doc_str = parse_documentation_from_attrs(&input.attrs); + let ui_item_fn_body = generate_ui_item_body(group_name.as_ref(), path_name.as_ref(), &input); // todo(settings_ui): make group name optional, repurpose group as tag indicating item is group, and have "title" tag for custom title let title = group_name.unwrap_or(input.ident.to_string().to_title_case()); - let ui_entry_fn_body = map_ui_item_to_entry(path_name.as_deref(), &title, quote! { Self }); + let ui_entry_fn_body = map_ui_item_to_entry( + path_name.as_deref(), + &title, + doc_str.as_deref(), + quote! { Self }, + ); let expanded = quote! { impl #impl_generics settings::SettingsUi for #name #ty_generics #where_clause { @@ -111,14 +120,22 @@ fn option_inner_type(ty: TokenStream) -> Option { return Some(ty.to_token_stream()); } -fn map_ui_item_to_entry(path: Option<&str>, title: &str, ty: TokenStream) -> TokenStream { +fn map_ui_item_to_entry( + path: Option<&str>, + title: &str, + doc_str: Option<&str>, + ty: TokenStream, +) -> TokenStream { let ty = extract_type_from_option(ty); + // todo(settings_ui): does quote! just work with options? let path = path.map_or_else(|| quote! {None}, |path| quote! {Some(#path)}); + let doc_str = doc_str.map_or_else(|| quote! {None}, |doc_str| quote! {Some(#doc_str)}); quote! { settings::SettingsUiEntry { title: #title, path: #path, item: #ty::settings_ui_item(), + documentation: #doc_str, } } } @@ -134,6 +151,7 @@ fn generate_ui_item_body( settings::SettingsUiItem::None }, (Some(_), _, Data::Struct(data_struct)) => { + let struct_serde_attrs = parse_serde_attributes(&input.attrs); let fields = data_struct .fields .iter() @@ -153,48 +171,37 @@ fn generate_ui_item_body( }) }) .map(|field| { + let field_serde_attrs = parse_serde_attributes(&field.attrs); + let name = field.ident.clone().expect("tuple fields").to_string(); + let doc_str = parse_documentation_from_attrs(&field.attrs); + ( - field.ident.clone().expect("tuple fields").to_string(), + name.to_title_case(), + doc_str, + field_serde_attrs.flatten.not().then(|| { + struct_serde_attrs.apply_rename_to_field(&field_serde_attrs, &name) + }), field.ty.to_token_stream(), ) }) // todo(settings_ui): Re-format field name as nice title, and support setting different title with attr - .map(|(name, ty)| map_ui_item_to_entry(Some(&name), &name.to_title_case(), ty)); + .map(|(title, doc_str, path, ty)| { + map_ui_item_to_entry(path.as_deref(), &title, doc_str.as_deref(), ty) + }); quote! { settings::SettingsUiItem::Group(settings::SettingsUiItemGroup{ items: vec![#(#fields),*] }) } } (None, _, Data::Enum(data_enum)) => { - let mut lowercase = false; - let mut snake_case = false; - for attr in &input.attrs { - if attr.path().is_ident("serde") { - attr.parse_nested_meta(|meta| { - if meta.path.is_ident("rename_all") { - meta.input.parse::()?; - let lit = meta.input.parse::()?.value(); - lowercase = lit == "lowercase"; - snake_case = lit == "snake_case"; - } - Ok(()) - }) - .ok(); - } - } + let serde_attrs = parse_serde_attributes(&input.attrs); let length = data_enum.variants.len(); let variants = data_enum.variants.iter().map(|variant| { let string = variant.ident.clone().to_string(); let title = string.to_title_case(); - let string = if lowercase { - string.to_lowercase() - } else if snake_case { - string.to_snake_case() - } else { - string - }; + let string = serde_attrs.rename_all.apply(&string); (string, title) }); @@ -218,6 +225,113 @@ fn generate_ui_item_body( } } +struct SerdeOptions { + rename_all: SerdeRenameAll, + rename: Option, + flatten: bool, + _alias: Option, // todo(settings_ui) +} + +#[derive(PartialEq)] +enum SerdeRenameAll { + Lowercase, + SnakeCase, + None, +} + +impl SerdeRenameAll { + fn apply(&self, name: &str) -> String { + match self { + SerdeRenameAll::Lowercase => name.to_lowercase(), + SerdeRenameAll::SnakeCase => name.to_snake_case(), + SerdeRenameAll::None => name.to_string(), + } + } +} + +impl SerdeOptions { + fn apply_rename_to_field(&self, field_options: &Self, name: &str) -> String { + // field renames take precedence over struct rename all cases + if let Some(rename) = &field_options.rename { + return rename.clone(); + } + return self.rename_all.apply(name); + } +} + +fn parse_serde_attributes(attrs: &[syn::Attribute]) -> SerdeOptions { + let mut options = SerdeOptions { + rename_all: SerdeRenameAll::None, + rename: None, + flatten: false, + _alias: None, + }; + + for attr in attrs { + if !attr.path().is_ident("serde") { + continue; + } + attr.parse_nested_meta(|meta| { + if meta.path.is_ident("rename_all") { + meta.input.parse::()?; + let lit = meta.input.parse::()?.value(); + + if options.rename_all != SerdeRenameAll::None { + return Err(meta.error("duplicate `rename_all` attribute")); + } else if lit == "lowercase" { + options.rename_all = SerdeRenameAll::Lowercase; + } else if lit == "snake_case" { + options.rename_all = SerdeRenameAll::SnakeCase; + } else { + return Err(meta.error(format!("invalid `rename_all` attribute: {}", lit))); + } + // todo(settings_ui): Other options? + } else if meta.path.is_ident("flatten") { + options.flatten = true; + } else if meta.path.is_ident("rename") { + if options.rename.is_some() { + return Err(meta.error("Can only have one rename attribute")); + } + + meta.input.parse::()?; + let lit = meta.input.parse::()?.value(); + options.rename = Some(lit); + } + Ok(()) + }) + .unwrap(); + } + + return options; +} + +fn parse_documentation_from_attrs(attrs: &[syn::Attribute]) -> Option { + let mut doc_str = Option::::None; + for attr in attrs { + if attr.path().is_ident("doc") { + // /// ... + // becomes + // #[doc = "..."] + use syn::{Expr::Lit, ExprLit, Lit::Str, Meta, MetaNameValue}; + if let Meta::NameValue(MetaNameValue { + value: + Lit(ExprLit { + lit: Str(ref lit_str), + .. + }), + .. + }) = attr.meta + { + let doc = lit_str.value(); + let doc_str = doc_str.get_or_insert_default(); + doc_str.push_str(doc.trim()); + doc_str.push('\n'); + } + } + } + return doc_str; +} + struct SettingsKey { key: Option, fallback_key: Option, @@ -290,7 +404,7 @@ pub fn derive_settings_key(input: proc_macro::TokenStream) -> proc_macro::TokenS if parsed_settings_key.is_some() && settings_key.is_some() { panic!("Duplicate #[settings_key] attribute"); } - settings_key = parsed_settings_key; + settings_key = settings_key.or(parsed_settings_key); } let Some(SettingsKey { key, fallback_key }) = settings_key else { From a6605270365e7db0ced654c290d62643d6d84672 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Thu, 4 Sep 2025 18:26:37 -0600 Subject: [PATCH 020/109] Make entry_for_path return a reference instead of cloning (#37591) Release Notes: - N/A --- crates/agent_ui/src/acp/message_editor.rs | 7 +++---- crates/agent_ui/src/acp/thread_view.rs | 14 ++++++-------- crates/agent_ui/src/context_picker.rs | 3 ++- crates/project/src/project.rs | 2 +- crates/project/src/worktree_store.rs | 3 +-- crates/repl/src/notebook/notebook_ui.rs | 7 ++++--- crates/search/src/project_search.rs | 1 + 7 files changed, 18 insertions(+), 19 deletions(-) diff --git a/crates/agent_ui/src/acp/message_editor.rs b/crates/agent_ui/src/acp/message_editor.rs index da121bb7a486d80f15125d2ecc526b3b01e059d3..4f57c6161d8f5fae3aa0b5762ed85e49dfd20b43 100644 --- a/crates/agent_ui/src/acp/message_editor.rs +++ b/crates/agent_ui/src/acp/message_editor.rs @@ -493,14 +493,13 @@ impl MessageEditor { let Some(entry) = self.project.read(cx).entry_for_path(&project_path, cx) else { return Task::ready(Err(anyhow!("project entry not found"))); }; - let Some(worktree) = self.project.read(cx).worktree_for_entry(entry.id, cx) else { + let directory_path = entry.path.clone(); + let worktree_id = project_path.worktree_id; + let Some(worktree) = self.project.read(cx).worktree_for_id(worktree_id, cx) else { return Task::ready(Err(anyhow!("worktree not found"))); }; let project = self.project.clone(); cx.spawn(async move |_, cx| { - let directory_path = entry.path.clone(); - - let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id())?; let file_paths = worktree.read_with(cx, |worktree, _cx| { collect_files_in_path(worktree, &directory_path) })?; diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index b4d56ad05be1a66e9740c2432a9bd08b1adfee0e..441b4aa06fdad65fce079ea36dc3d2e59cf4644f 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -4070,15 +4070,15 @@ impl AcpThreadView { MentionUri::PastedImage => {} MentionUri::Directory { abs_path } => { let project = workspace.project(); - let Some(entry) = project.update(cx, |project, cx| { + let Some(entry_id) = project.update(cx, |project, cx| { let path = project.find_project_path(abs_path, cx)?; - project.entry_for_path(&path, cx) + project.entry_for_path(&path, cx).map(|entry| entry.id) }) else { return; }; project.update(cx, |_, cx| { - cx.emit(project::Event::RevealInProjectPanel(entry.id)); + cx.emit(project::Event::RevealInProjectPanel(entry_id)); }); } MentionUri::Symbol { @@ -4091,11 +4091,9 @@ impl AcpThreadView { line_range, } => { let project = workspace.project(); - let Some((path, _)) = project.update(cx, |project, cx| { - let path = project.find_project_path(path, cx)?; - let entry = project.entry_for_path(&path, cx)?; - Some((path, entry)) - }) else { + let Some(path) = + project.update(cx, |project, cx| project.find_project_path(path, cx)) + else { return; }; diff --git a/crates/agent_ui/src/context_picker.rs b/crates/agent_ui/src/context_picker.rs index 405b5ed90ba1606ef97b8b048b959bfc354bc5cd..b225fbf34058604cfb3f306a9cee14f69bb5edaa 100644 --- a/crates/agent_ui/src/context_picker.rs +++ b/crates/agent_ui/src/context_picker.rs @@ -987,7 +987,8 @@ impl MentionLink { .read(cx) .project() .read(cx) - .entry_for_path(&project_path, cx)?; + .entry_for_path(&project_path, cx)? + .clone(); Some(MentionLink::File(project_path, entry)) } Self::SYMBOL => { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 1e2e52c120f95a7c7540cd6f916d2d401f411af2..66924f159a0a97dce558d742ca3ee80456542305 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -4352,7 +4352,7 @@ impl Project { self.active_entry } - pub fn entry_for_path(&self, path: &ProjectPath, cx: &App) -> Option { + pub fn entry_for_path<'a>(&'a self, path: &ProjectPath, cx: &'a App) -> Option<&'a Entry> { self.worktree_store.read(cx).entry_for_path(path, cx) } diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 1eeeefc40ad09012e5d280c0821052cd6f8db098..b37e1ef8026b643444b3ca0ba67cdb953a959a36 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -203,11 +203,10 @@ impl WorktreeStore { }) } - pub fn entry_for_path(&self, path: &ProjectPath, cx: &App) -> Option { + pub fn entry_for_path<'a>(&'a self, path: &ProjectPath, cx: &'a App) -> Option<&'a Entry> { self.worktree_for_id(path.worktree_id, cx)? .read(cx) .entry_for_path(&path.path) - .cloned() } pub fn create_worktree( diff --git a/crates/repl/src/notebook/notebook_ui.rs b/crates/repl/src/notebook/notebook_ui.rs index 325d262d9eddc164093f088d0e4790d0fa581167..081c474cdad86a5340520ef09345bd456f55b5ba 100644 --- a/crates/repl/src/notebook/notebook_ui.rs +++ b/crates/repl/src/notebook/notebook_ui.rs @@ -594,9 +594,10 @@ impl project::ProjectItem for NotebookItem { }; let id = project - .update(cx, |project, cx| project.entry_for_path(&path, cx))? - .context("Entry not found")? - .id; + .update(cx, |project, cx| { + project.entry_for_path(&path, cx).map(|entry| entry.id) + })? + .context("Entry not found")?; cx.new(|_| NotebookItem { path: abs_path, diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 4a2dbf31fc96b43db34bd9977fafb09cc5ad60d1..33458a3a88fb717ba047b57564c8804f7ebea928 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -3201,6 +3201,7 @@ pub mod tests { .read(cx) .entry_for_path(&(worktree_id, "a").into(), cx) .expect("no entry for /a/ directory") + .clone() }); assert!(a_dir_entry.is_dir()); window From fded3fbcdb6861e0913bda8c86adaab6256ed254 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Thu, 4 Sep 2025 19:15:59 -0600 Subject: [PATCH 021/109] zeta: Scope edit prediction event history to current project (#37595) This change also causes Zeta to not do anything for editors that are not associated with a project. In practice, this shouldn't affect any behavior - those editors shouldn't have edit predictions anyway. Release Notes: - Edit Prediction: Requests no longer include recent edits from other projects (other Zed windows). --- .../zed/src/zed/edit_prediction_registry.rs | 3 +- crates/zeta/src/zeta.rs | 347 ++++++------------ crates/zeta_cli/src/main.rs | 58 +-- 3 files changed, 149 insertions(+), 259 deletions(-) diff --git a/crates/zed/src/zed/edit_prediction_registry.rs b/crates/zed/src/zed/edit_prediction_registry.rs index 7b8b98018e6d6c608574ab81e912e8a98e363046..4f009ccb0b1197f11b034ac48b89dd37b6f41278 100644 --- a/crates/zed/src/zed/edit_prediction_registry.rs +++ b/crates/zed/src/zed/edit_prediction_registry.rs @@ -207,9 +207,10 @@ fn assign_edit_prediction_provider( if let Some(buffer) = &singleton_buffer && buffer.read(cx).file().is_some() + && let Some(project) = editor.project() { zeta.update(cx, |zeta, cx| { - zeta.register_buffer(buffer, cx); + zeta.register_buffer(buffer, project, cx); }); } diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index e0cfd23dd26cd7ea49181b5aabc16f00f4fd826a..3851d16755783209fd9da4f468a494779a7d9fe7 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -35,12 +35,13 @@ use language_model::{LlmApiToken, RefreshLlmTokenListener}; use project::{Project, ProjectPath}; use release_channel::AppVersion; use settings::WorktreeId; +use std::collections::hash_map; +use std::mem; use std::str::FromStr; use std::{ cmp, fmt::Write, future::Future, - mem, ops::Range, path::Path, rc::Rc, @@ -211,9 +212,8 @@ impl std::fmt::Debug for EditPrediction { } pub struct Zeta { + projects: HashMap, client: Arc, - events: VecDeque, - registered_buffers: HashMap, shown_completions: VecDeque, rated_completions: HashSet, data_collection_choice: Entity, @@ -225,6 +225,11 @@ pub struct Zeta { license_detection_watchers: HashMap>, } +struct ZetaProject { + events: VecDeque, + registered_buffers: HashMap, +} + impl Zeta { pub fn global(cx: &mut App) -> Option> { cx.try_global::().map(|global| global.0.clone()) @@ -255,7 +260,9 @@ impl Zeta { } pub fn clear_history(&mut self) { - self.events.clear(); + for zeta_project in self.projects.values_mut() { + zeta_project.events.clear(); + } } pub fn usage(&self, cx: &App) -> Option { @@ -269,11 +276,10 @@ impl Zeta { let data_collection_choice = cx.new(|_| data_collection_choice); Self { + projects: HashMap::default(), client, - events: VecDeque::new(), shown_completions: VecDeque::new(), rated_completions: HashSet::default(), - registered_buffers: HashMap::default(), data_collection_choice, llm_token: LlmApiToken::default(), _llm_token_subscription: cx.subscribe( @@ -294,12 +300,35 @@ impl Zeta { } } - fn push_event(&mut self, event: Event) { + fn get_or_init_zeta_project( + &mut self, + project: &Entity, + cx: &mut Context, + ) -> &mut ZetaProject { + let project_id = project.entity_id(); + match self.projects.entry(project_id) { + hash_map::Entry::Occupied(entry) => entry.into_mut(), + hash_map::Entry::Vacant(entry) => { + cx.observe_release(project, move |this, _, _cx| { + this.projects.remove(&project_id); + }) + .detach(); + entry.insert(ZetaProject { + events: VecDeque::with_capacity(MAX_EVENT_COUNT), + registered_buffers: HashMap::default(), + }) + } + } + } + + fn push_event(zeta_project: &mut ZetaProject, event: Event) { + let events = &mut zeta_project.events; + if let Some(Event::BufferChange { new_snapshot: last_new_snapshot, timestamp: last_timestamp, .. - }) = self.events.back_mut() + }) = events.back_mut() { // Coalesce edits for the same buffer when they happen one after the other. let Event::BufferChange { @@ -318,50 +347,65 @@ impl Zeta { } } - self.events.push_back(event); - if self.events.len() >= MAX_EVENT_COUNT { + if events.len() >= MAX_EVENT_COUNT { // These are halved instead of popping to improve prompt caching. - self.events.drain(..MAX_EVENT_COUNT / 2); + events.drain(..MAX_EVENT_COUNT / 2); } - } - - pub fn register_buffer(&mut self, buffer: &Entity, cx: &mut Context) { - let buffer_id = buffer.entity_id(); - let weak_buffer = buffer.downgrade(); - - if let std::collections::hash_map::Entry::Vacant(entry) = - self.registered_buffers.entry(buffer_id) - { - let snapshot = buffer.read(cx).snapshot(); - entry.insert(RegisteredBuffer { - snapshot, - _subscriptions: [ - cx.subscribe(buffer, move |this, buffer, event, cx| { - this.handle_buffer_event(buffer, event, cx); - }), - cx.observe_release(buffer, move |this, _buffer, _cx| { - this.registered_buffers.remove(&weak_buffer.entity_id()); - }), - ], - }); - }; + events.push_back(event); } - fn handle_buffer_event( + pub fn register_buffer( &mut self, - buffer: Entity, - event: &language::BufferEvent, + buffer: &Entity, + project: &Entity, cx: &mut Context, ) { - if let language::BufferEvent::Edited = event { - self.report_changes_for_buffer(&buffer, cx); + let zeta_project = self.get_or_init_zeta_project(project, cx); + Self::register_buffer_impl(zeta_project, buffer, project, cx); + } + + fn register_buffer_impl<'a>( + zeta_project: &'a mut ZetaProject, + buffer: &Entity, + project: &Entity, + cx: &mut Context, + ) -> &'a mut RegisteredBuffer { + let buffer_id = buffer.entity_id(); + match zeta_project.registered_buffers.entry(buffer_id) { + hash_map::Entry::Occupied(entry) => entry.into_mut(), + hash_map::Entry::Vacant(entry) => { + let snapshot = buffer.read(cx).snapshot(); + let project_entity_id = project.entity_id(); + entry.insert(RegisteredBuffer { + snapshot, + _subscriptions: [ + cx.subscribe(buffer, { + let project = project.downgrade(); + move |this, buffer, event, cx| { + if let language::BufferEvent::Edited = event + && let Some(project) = project.upgrade() + { + this.report_changes_for_buffer(&buffer, &project, cx); + } + } + }), + cx.observe_release(buffer, move |this, _buffer, _cx| { + let Some(zeta_project) = this.projects.get_mut(&project_entity_id) + else { + return; + }; + zeta_project.registered_buffers.remove(&buffer_id); + }), + ], + }) + } } } fn request_completion_impl( &mut self, - project: Option<&Entity>, + project: &Entity, buffer: &Entity, cursor: language::Anchor, can_collect_data: bool, @@ -376,16 +420,14 @@ impl Zeta { { let buffer = buffer.clone(); let buffer_snapshotted_at = Instant::now(); - let snapshot = self.report_changes_for_buffer(&buffer, cx); + let snapshot = self.report_changes_for_buffer(&buffer, project, cx); let zeta = cx.entity(); - let events = self.events.clone(); + let events = self.get_or_init_zeta_project(project, cx).events.clone(); let client = self.client.clone(); let llm_token = self.llm_token.clone(); let app_version = AppVersion::global(cx); - let git_info = if let (true, Some(project), Some(file)) = - (can_collect_data, project, snapshot.file()) - { + let git_info = if let (true, Some(file)) = (can_collect_data, snapshot.file()) { git_info_for_file(project, &ProjectPath::from_file(file.as_ref(), cx), cx) } else { None @@ -512,163 +554,10 @@ impl Zeta { }) } - // Generates several example completions of various states to fill the Zeta completion modal - #[cfg(any(test, feature = "test-support"))] - pub fn fill_with_fake_completions(&mut self, cx: &mut Context) -> Task<()> { - use language::Point; - - let test_buffer_text = indoc::indoc! {r#"a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line - And maybe a short line - - Then a few lines - - and then another - "#}; - - let project = None; - let buffer = cx.new(|cx| Buffer::local(test_buffer_text, cx)); - let position = buffer.read(cx).anchor_before(Point::new(1, 0)); - - let completion_tasks = vec![ - self.fake_completion( - project, - &buffer, - position, - PredictEditsResponse { - request_id: Uuid::parse_str("e7861db5-0cea-4761-b1c5-ad083ac53a80").unwrap(), - output_excerpt: format!("{EDITABLE_REGION_START_MARKER} -a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line -[here's an edit] -And maybe a short line -Then a few lines -and then another -{EDITABLE_REGION_END_MARKER} - ", ), - }, - cx, - ), - self.fake_completion( - project, - &buffer, - position, - PredictEditsResponse { - request_id: Uuid::parse_str("077c556a-2c49-44e2-bbc6-dafc09032a5e").unwrap(), - output_excerpt: format!(r#"{EDITABLE_REGION_START_MARKER} -a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line -And maybe a short line -[and another edit] -Then a few lines -and then another -{EDITABLE_REGION_END_MARKER} - "#), - }, - cx, - ), - self.fake_completion( - project, - &buffer, - position, - PredictEditsResponse { - request_id: Uuid::parse_str("df8c7b23-3d1d-4f99-a306-1f6264a41277").unwrap(), - output_excerpt: format!(r#"{EDITABLE_REGION_START_MARKER} -a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line -And maybe a short line - -Then a few lines - -and then another -{EDITABLE_REGION_END_MARKER} - "#), - }, - cx, - ), - self.fake_completion( - project, - &buffer, - position, - PredictEditsResponse { - request_id: Uuid::parse_str("c743958d-e4d8-44a8-aa5b-eb1e305c5f5c").unwrap(), - output_excerpt: format!(r#"{EDITABLE_REGION_START_MARKER} -a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line -And maybe a short line - -Then a few lines - -and then another -{EDITABLE_REGION_END_MARKER} - "#), - }, - cx, - ), - self.fake_completion( - project, - &buffer, - position, - PredictEditsResponse { - request_id: Uuid::parse_str("ff5cd7ab-ad06-4808-986e-d3391e7b8355").unwrap(), - output_excerpt: format!(r#"{EDITABLE_REGION_START_MARKER} -a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line -And maybe a short line -Then a few lines -[a third completion] -and then another -{EDITABLE_REGION_END_MARKER} - "#), - }, - cx, - ), - self.fake_completion( - project, - &buffer, - position, - PredictEditsResponse { - request_id: Uuid::parse_str("83cafa55-cdba-4b27-8474-1865ea06be94").unwrap(), - output_excerpt: format!(r#"{EDITABLE_REGION_START_MARKER} -a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line -And maybe a short line -and then another -[fourth completion example] -{EDITABLE_REGION_END_MARKER} - "#), - }, - cx, - ), - self.fake_completion( - project, - &buffer, - position, - PredictEditsResponse { - request_id: Uuid::parse_str("d5bd3afd-8723-47c7-bd77-15a3a926867b").unwrap(), - output_excerpt: format!(r#"{EDITABLE_REGION_START_MARKER} -a longggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg line -And maybe a short line -Then a few lines -and then another -[fifth and final completion] -{EDITABLE_REGION_END_MARKER} - "#), - }, - cx, - ), - ]; - - cx.spawn(async move |zeta, cx| { - for task in completion_tasks { - task.await.unwrap(); - } - - zeta.update(cx, |zeta, _cx| { - zeta.shown_completions.get_mut(2).unwrap().edits = Arc::new([]); - zeta.shown_completions.get_mut(3).unwrap().edits = Arc::new([]); - }) - .ok(); - }) - } - #[cfg(any(test, feature = "test-support"))] pub fn fake_completion( &mut self, - project: Option<&Entity>, + project: &Entity, buffer: &Entity, position: language::Anchor, response: PredictEditsResponse, @@ -683,7 +572,7 @@ and then another pub fn request_completion( &mut self, - project: Option<&Entity>, + project: &Entity, buffer: &Entity, position: language::Anchor, can_collect_data: bool, @@ -1043,23 +932,23 @@ and then another fn report_changes_for_buffer( &mut self, buffer: &Entity, + project: &Entity, cx: &mut Context, ) -> BufferSnapshot { - self.register_buffer(buffer, cx); + let zeta_project = self.get_or_init_zeta_project(project, cx); + let registered_buffer = Self::register_buffer_impl(zeta_project, buffer, project, cx); - let registered_buffer = self - .registered_buffers - .get_mut(&buffer.entity_id()) - .unwrap(); let new_snapshot = buffer.read(cx).snapshot(); - if new_snapshot.version != registered_buffer.snapshot.version { let old_snapshot = mem::replace(&mut registered_buffer.snapshot, new_snapshot.clone()); - self.push_event(Event::BufferChange { - old_snapshot, - new_snapshot: new_snapshot.clone(), - timestamp: Instant::now(), - }); + Self::push_event( + zeta_project, + Event::BufferChange { + old_snapshot, + new_snapshot: new_snapshot.clone(), + timestamp: Instant::now(), + }, + ); } new_snapshot @@ -1140,7 +1029,7 @@ pub struct GatherContextOutput { } pub fn gather_context( - project: Option<&Entity>, + project: &Entity, full_path_str: String, snapshot: &BufferSnapshot, cursor_point: language::Point, @@ -1149,8 +1038,7 @@ pub fn gather_context( git_info: Option, cx: &App, ) -> Task> { - let local_lsp_store = - project.and_then(|project| project.read(cx).lsp_store().read(cx).as_local()); + let local_lsp_store = project.read(cx).lsp_store().read(cx).as_local(); let diagnostic_groups: Vec<(String, serde_json::Value)> = if can_collect_data && let Some(local_lsp_store) = local_lsp_store { snapshot @@ -1540,6 +1428,9 @@ impl edit_prediction::EditPredictionProvider for ZetaEditPredictionProvider { if self.zeta.read(cx).update_required { return; } + let Some(project) = project else { + return; + }; if self .zeta @@ -1578,13 +1469,7 @@ impl edit_prediction::EditPredictionProvider for ZetaEditPredictionProvider { let completion_request = this.update(cx, |this, cx| { this.last_request_timestamp = Instant::now(); this.zeta.update(cx, |zeta, cx| { - zeta.request_completion( - project.as_ref(), - &buffer, - position, - can_collect_data, - cx, - ) + zeta.request_completion(&project, &buffer, position, can_collect_data, cx) }) }); @@ -1762,7 +1647,6 @@ fn tokens_for_bytes(bytes: usize) -> usize { #[cfg(test)] mod tests { - use client::UserStore; use client::test::FakeServer; use clock::FakeSystemClock; use cloud_api_types::{CreateLlmTokenResponse, LlmToken}; @@ -1771,6 +1655,7 @@ mod tests { use indoc::indoc; use language::Point; use settings::SettingsStore; + use util::path; use super::*; @@ -1897,6 +1782,7 @@ mod tests { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); client::init_settings(cx); + Project::init_settings(cx); }); let edits = edits_for_prediction( @@ -1961,6 +1847,7 @@ mod tests { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); client::init_settings(cx); + Project::init_settings(cx); }); let buffer_content = "lorem\n"; @@ -2010,13 +1897,14 @@ mod tests { }); // Construct the fake server to authenticate. let _server = FakeServer::for_client(42, &client, cx).await; - let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - let zeta = cx.new(|cx| Zeta::new(client, user_store.clone(), cx)); - + let fs = project::FakeFs::new(cx.executor()); + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; let buffer = cx.new(|cx| Buffer::local(buffer_content, cx)); let cursor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(1, 0))); + + let zeta = cx.new(|cx| Zeta::new(client, project.read(cx).user_store(), cx)); let completion_task = zeta.update(cx, |zeta, cx| { - zeta.request_completion(None, &buffer, cursor, false, cx) + zeta.request_completion(&project, &buffer, cursor, false, cx) }); let completion = completion_task.await.unwrap().unwrap(); @@ -2074,14 +1962,15 @@ mod tests { }); // Construct the fake server to authenticate. let _server = FakeServer::for_client(42, &client, cx).await; - let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - let zeta = cx.new(|cx| Zeta::new(client, user_store.clone(), cx)); - + let fs = project::FakeFs::new(cx.executor()); + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; let buffer = cx.new(|cx| Buffer::local(buffer_content, cx)); let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); let cursor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(1, 0))); + + let zeta = cx.new(|cx| Zeta::new(client, project.read(cx).user_store(), cx)); let completion_task = zeta.update(cx, |zeta, cx| { - zeta.request_completion(None, &buffer, cursor, false, cx) + zeta.request_completion(&project, &buffer, cursor, false, cx) }); let completion = completion_task.await.unwrap().unwrap(); diff --git a/crates/zeta_cli/src/main.rs b/crates/zeta_cli/src/main.rs index 5b2d4cf615be67d9493d617ae7de38fdc8fa4b2f..e66eeed80920a0c31c5c06e119e17d418fbc294c 100644 --- a/crates/zeta_cli/src/main.rs +++ b/crates/zeta_cli/src/main.rs @@ -10,7 +10,7 @@ use language::Bias; use language::Buffer; use language::Point; use language_model::LlmApiToken; -use project::{Project, ProjectPath}; +use project::{Project, ProjectPath, Worktree}; use release_channel::AppVersion; use reqwest_client::ReqwestClient; use std::path::{Path, PathBuf}; @@ -129,15 +129,33 @@ async fn get_context( return Err(anyhow!("Absolute paths are not supported in --cursor")); } - let (project, _lsp_open_handle, buffer) = if use_language_server { - let (project, lsp_open_handle, buffer) = - open_buffer_with_language_server(&worktree_path, &cursor.path, app_state, cx).await?; - (Some(project), Some(lsp_open_handle), buffer) + let project = cx.update(|cx| { + Project::local( + app_state.client.clone(), + app_state.node_runtime.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + None, + cx, + ) + })?; + + let worktree = project + .update(cx, |project, cx| { + project.create_worktree(&worktree_path, true, cx) + })? + .await?; + + let (_lsp_open_handle, buffer) = if use_language_server { + let (lsp_open_handle, buffer) = + open_buffer_with_language_server(&project, &worktree, &cursor.path, cx).await?; + (Some(lsp_open_handle), buffer) } else { let abs_path = worktree_path.join(&cursor.path); let content = smol::fs::read_to_string(&abs_path).await?; let buffer = cx.new(|cx| Buffer::local(content, cx))?; - (None, None, buffer) + (None, buffer) }; let worktree_name = worktree_path @@ -177,7 +195,7 @@ async fn get_context( let mut gather_context_output = cx .update(|cx| { gather_context( - project.as_ref(), + &project, full_path_str, &snapshot, clipped_cursor, @@ -198,29 +216,11 @@ async fn get_context( } pub async fn open_buffer_with_language_server( - worktree_path: &Path, + project: &Entity, + worktree: &Entity, path: &Path, - app_state: &Arc, cx: &mut AsyncApp, -) -> Result<(Entity, Entity>, Entity)> { - let project = cx.update(|cx| { - Project::local( - app_state.client.clone(), - app_state.node_runtime.clone(), - app_state.user_store.clone(), - app_state.languages.clone(), - app_state.fs.clone(), - None, - cx, - ) - })?; - - let worktree = project - .update(cx, |project, cx| { - project.create_worktree(worktree_path, true, cx) - })? - .await?; - +) -> Result<(Entity>, Entity)> { let project_path = worktree.read_with(cx, |worktree, _cx| ProjectPath { worktree_id: worktree.id(), path: path.to_path_buf().into(), @@ -237,7 +237,7 @@ pub async fn open_buffer_with_language_server( let log_prefix = path.to_string_lossy().to_string(); wait_for_lang_server(&project, &buffer, log_prefix, cx).await?; - Ok((project, lsp_open_handle, buffer)) + Ok((lsp_open_handle, buffer)) } // TODO: Dedupe with similar function in crates/eval/src/instance.rs From 57c6dbd71e483646cd0409894547e97f664ebed3 Mon Sep 17 00:00:00 2001 From: Smit Barmase Date: Fri, 5 Sep 2025 09:10:50 +0530 Subject: [PATCH 022/109] linux: Fix IME positioning on scaled display on Wayland (#37600) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Removes IME bounds scaling on Wayland since it uses logical pixels, unlike X11. We now scale only on X11. Windows and macOS don’t use these bounds for IME anyway. Release Notes: - Fixed an issue where the IME popover could appear outside the window or fail to show on Wayland. --- crates/gpui/src/platform.rs | 8 +++---- .../gpui/src/platform/linux/wayland/client.rs | 6 ++--- .../gpui/src/platform/linux/wayland/window.rs | 7 +++--- crates/gpui/src/platform/linux/x11/client.rs | 22 +++++++++---------- crates/gpui/src/platform/linux/x11/window.rs | 7 +++--- crates/gpui/src/platform/mac/window.rs | 9 ++++---- crates/gpui/src/platform/test/window.rs | 6 ++--- crates/gpui/src/platform/windows/window.rs | 2 +- crates/gpui/src/window.rs | 4 +--- 9 files changed, 34 insertions(+), 37 deletions(-) diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index d3425c8835bb474ffbed6bc79371340d569d1bfb..444b60ac154424c423c3cd6a827b22cd7024694f 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -39,9 +39,9 @@ use crate::{ Action, AnyWindowHandle, App, AsyncWindowContext, BackgroundExecutor, Bounds, DEFAULT_WINDOW_SIZE, DevicePixels, DispatchEventResult, Font, FontId, FontMetrics, FontRun, ForegroundExecutor, GlyphId, GpuSpecs, ImageSource, Keymap, LineLayout, Pixels, PlatformInput, - Point, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, ScaledPixels, Scene, - ShapedGlyph, ShapedRun, SharedString, Size, SvgRenderer, SvgSize, SystemWindowTab, Task, - TaskLabel, Window, WindowControlArea, hash, point, px, size, + Point, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, Scene, ShapedGlyph, + ShapedRun, SharedString, Size, SvgRenderer, SvgSize, SystemWindowTab, Task, TaskLabel, Window, + WindowControlArea, hash, point, px, size, }; use anyhow::Result; use async_task::Runnable; @@ -548,7 +548,7 @@ pub(crate) trait PlatformWindow: HasWindowHandle + HasDisplayHandle { fn set_client_inset(&self, _inset: Pixels) {} fn gpu_specs(&self) -> Option; - fn update_ime_position(&self, _bounds: Bounds); + fn update_ime_position(&self, _bounds: Bounds); #[cfg(any(test, feature = "test-support"))] fn as_test(&mut self) -> Option<&mut TestWindow> { diff --git a/crates/gpui/src/platform/linux/wayland/client.rs b/crates/gpui/src/platform/linux/wayland/client.rs index 189cfa19545f052cf8ebc75b89c1f955d3396859..8596bddc8dd821426982d618f661d6da621096bb 100644 --- a/crates/gpui/src/platform/linux/wayland/client.rs +++ b/crates/gpui/src/platform/linux/wayland/client.rs @@ -75,8 +75,8 @@ use crate::{ FileDropEvent, ForegroundExecutor, KeyDownEvent, KeyUpEvent, Keystroke, LinuxCommon, LinuxKeyboardLayout, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseExitEvent, MouseMoveEvent, MouseUpEvent, NavigationDirection, Pixels, PlatformDisplay, - PlatformInput, PlatformKeyboardLayout, Point, SCROLL_LINES, ScaledPixels, ScrollDelta, - ScrollWheelEvent, Size, TouchPhase, WindowParams, point, px, size, + PlatformInput, PlatformKeyboardLayout, Point, SCROLL_LINES, ScrollDelta, ScrollWheelEvent, + Size, TouchPhase, WindowParams, point, px, size, }; use crate::{ SharedString, @@ -323,7 +323,7 @@ impl WaylandClientStatePtr { } } - pub fn update_ime_position(&self, bounds: Bounds) { + pub fn update_ime_position(&self, bounds: Bounds) { let client = self.get_client(); let mut state = client.borrow_mut(); if state.composing || state.text_input.is_none() || state.pre_edit_text.is_some() { diff --git a/crates/gpui/src/platform/linux/wayland/window.rs b/crates/gpui/src/platform/linux/wayland/window.rs index 7570c58c09e8d5c63091174fa51bc30c54c005e1..76dd89c940c615d726af1cf5922be226d91dfd41 100644 --- a/crates/gpui/src/platform/linux/wayland/window.rs +++ b/crates/gpui/src/platform/linux/wayland/window.rs @@ -25,9 +25,8 @@ use crate::scene::Scene; use crate::{ AnyWindowHandle, Bounds, Decorations, Globals, GpuSpecs, Modifiers, Output, Pixels, PlatformDisplay, PlatformInput, Point, PromptButton, PromptLevel, RequestFrameOptions, - ResizeEdge, ScaledPixels, Size, Tiling, WaylandClientStatePtr, WindowAppearance, - WindowBackgroundAppearance, WindowBounds, WindowControlArea, WindowControls, WindowDecorations, - WindowParams, px, size, + ResizeEdge, Size, Tiling, WaylandClientStatePtr, WindowAppearance, WindowBackgroundAppearance, + WindowBounds, WindowControlArea, WindowControls, WindowDecorations, WindowParams, px, size, }; use crate::{ Capslock, @@ -1078,7 +1077,7 @@ impl PlatformWindow for WaylandWindow { } } - fn update_ime_position(&self, bounds: Bounds) { + fn update_ime_position(&self, bounds: Bounds) { let state = self.borrow(); state.client.update_ime_position(bounds); } diff --git a/crates/gpui/src/platform/linux/x11/client.rs b/crates/gpui/src/platform/linux/x11/client.rs index 9a43bd64706ec21905b18b8837af2ddc785cba87..42c59701d3ee644b99bc8bb58002b429265c1a45 100644 --- a/crates/gpui/src/platform/linux/x11/client.rs +++ b/crates/gpui/src/platform/linux/x11/client.rs @@ -62,8 +62,7 @@ use crate::{ AnyWindowHandle, Bounds, ClipboardItem, CursorStyle, DisplayId, FileDropEvent, Keystroke, LinuxKeyboardLayout, Modifiers, ModifiersChangedEvent, MouseButton, Pixels, Platform, PlatformDisplay, PlatformInput, PlatformKeyboardLayout, Point, RequestFrameOptions, - ScaledPixels, ScrollDelta, Size, TouchPhase, WindowParams, X11Window, - modifiers_from_xinput_info, point, px, + ScrollDelta, Size, TouchPhase, WindowParams, X11Window, modifiers_from_xinput_info, point, px, }; /// Value for DeviceId parameters which selects all devices. @@ -252,7 +251,7 @@ impl X11ClientStatePtr { } } - pub fn update_ime_position(&self, bounds: Bounds) { + pub fn update_ime_position(&self, bounds: Bounds) { let Some(client) = self.get_client() else { return; }; @@ -270,6 +269,7 @@ impl X11ClientStatePtr { state.ximc = Some(ximc); return; }; + let scaled_bounds = bounds.scale(state.scale_factor); let ic_attributes = ximc .build_ic_attributes() .push( @@ -282,8 +282,8 @@ impl X11ClientStatePtr { b.push( xim::AttributeName::SpotLocation, xim::Point { - x: u32::from(bounds.origin.x + bounds.size.width) as i16, - y: u32::from(bounds.origin.y + bounds.size.height) as i16, + x: u32::from(scaled_bounds.origin.x + scaled_bounds.size.width) as i16, + y: u32::from(scaled_bounds.origin.y + scaled_bounds.size.height) as i16, }, ); }) @@ -703,14 +703,14 @@ impl X11Client { state.xim_handler = Some(xim_handler); return; }; - if let Some(area) = window.get_ime_area() { + if let Some(scaled_area) = window.get_ime_area() { ic_attributes = ic_attributes.nested_list(xim::AttributeName::PreeditAttributes, |b| { b.push( xim::AttributeName::SpotLocation, xim::Point { - x: u32::from(area.origin.x + area.size.width) as i16, - y: u32::from(area.origin.y + area.size.height) as i16, + x: u32::from(scaled_area.origin.x + scaled_area.size.width) as i16, + y: u32::from(scaled_area.origin.y + scaled_area.size.height) as i16, }, ); }); @@ -1351,7 +1351,7 @@ impl X11Client { drop(state); window.handle_ime_preedit(text); - if let Some(area) = window.get_ime_area() { + if let Some(scaled_area) = window.get_ime_area() { let ic_attributes = ximc .build_ic_attributes() .push( @@ -1364,8 +1364,8 @@ impl X11Client { b.push( xim::AttributeName::SpotLocation, xim::Point { - x: u32::from(area.origin.x + area.size.width) as i16, - y: u32::from(area.origin.y + area.size.height) as i16, + x: u32::from(scaled_area.origin.x + scaled_area.size.width) as i16, + y: u32::from(scaled_area.origin.y + scaled_area.size.height) as i16, }, ); }) diff --git a/crates/gpui/src/platform/linux/x11/window.rs b/crates/gpui/src/platform/linux/x11/window.rs index 6af943b31761dc26b2cde4090cad4ce6574dd5c9..79a43837252f7dc702b43176d2f06172a3acec18 100644 --- a/crates/gpui/src/platform/linux/x11/window.rs +++ b/crates/gpui/src/platform/linux/x11/window.rs @@ -1019,8 +1019,9 @@ impl X11WindowStatePtr { } } - pub fn get_ime_area(&self) -> Option> { + pub fn get_ime_area(&self) -> Option> { let mut state = self.state.borrow_mut(); + let scale_factor = state.scale_factor; let mut bounds: Option> = None; if let Some(mut input_handler) = state.input_handler.take() { drop(state); @@ -1030,7 +1031,7 @@ impl X11WindowStatePtr { let mut state = self.state.borrow_mut(); state.input_handler = Some(input_handler); }; - bounds + bounds.map(|b| b.scale(scale_factor)) } pub fn set_bounds(&self, bounds: Bounds) -> anyhow::Result<()> { @@ -1618,7 +1619,7 @@ impl PlatformWindow for X11Window { } } - fn update_ime_position(&self, bounds: Bounds) { + fn update_ime_position(&self, bounds: Bounds) { let mut state = self.0.state.borrow_mut(); let client = state.client.clone(); drop(state); diff --git a/crates/gpui/src/platform/mac/window.rs b/crates/gpui/src/platform/mac/window.rs index 686cfb314e58c4e10e916a07931fb5f4248ea54e..1230a704062ba835bceb5db5d2ecf05b688e34df 100644 --- a/crates/gpui/src/platform/mac/window.rs +++ b/crates/gpui/src/platform/mac/window.rs @@ -4,10 +4,9 @@ use crate::{ ForegroundExecutor, KeyDownEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformWindow, Point, PromptButton, PromptLevel, RequestFrameOptions, - ScaledPixels, SharedString, Size, SystemWindowTab, Timer, WindowAppearance, - WindowBackgroundAppearance, WindowBounds, WindowControlArea, WindowKind, WindowParams, - dispatch_get_main_queue, dispatch_sys::dispatch_async_f, platform::PlatformInputHandler, point, - px, size, + SharedString, Size, SystemWindowTab, Timer, WindowAppearance, WindowBackgroundAppearance, + WindowBounds, WindowControlArea, WindowKind, WindowParams, dispatch_get_main_queue, + dispatch_sys::dispatch_async_f, platform::PlatformInputHandler, point, px, size, }; use block::ConcreteBlock; use cocoa::{ @@ -1480,7 +1479,7 @@ impl PlatformWindow for MacWindow { None } - fn update_ime_position(&self, _bounds: Bounds) { + fn update_ime_position(&self, _bounds: Bounds) { let executor = self.0.lock().executor.clone(); executor .spawn(async move { diff --git a/crates/gpui/src/platform/test/window.rs b/crates/gpui/src/platform/test/window.rs index e15bd7aeecec5932eb6386bd47d168eda906dd63..9e87f4504ddd61e34b645ea69ea394c4940f9d55 100644 --- a/crates/gpui/src/platform/test/window.rs +++ b/crates/gpui/src/platform/test/window.rs @@ -1,8 +1,8 @@ use crate::{ AnyWindowHandle, AtlasKey, AtlasTextureId, AtlasTile, Bounds, DispatchEventResult, GpuSpecs, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformInputHandler, PlatformWindow, - Point, PromptButton, RequestFrameOptions, ScaledPixels, Size, TestPlatform, TileId, - WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControlArea, WindowParams, + Point, PromptButton, RequestFrameOptions, Size, TestPlatform, TileId, WindowAppearance, + WindowBackgroundAppearance, WindowBounds, WindowControlArea, WindowParams, }; use collections::HashMap; use parking_lot::Mutex; @@ -289,7 +289,7 @@ impl PlatformWindow for TestWindow { unimplemented!() } - fn update_ime_position(&self, _bounds: Bounds) {} + fn update_ime_position(&self, _bounds: Bounds) {} fn gpu_specs(&self) -> Option { None diff --git a/crates/gpui/src/platform/windows/window.rs b/crates/gpui/src/platform/windows/window.rs index 9d001da822315c76aa9a16b010a38407c5730386..aa907c8d734973fc4fc795b6d8ebf7654d1b40de 100644 --- a/crates/gpui/src/platform/windows/window.rs +++ b/crates/gpui/src/platform/windows/window.rs @@ -839,7 +839,7 @@ impl PlatformWindow for WindowsWindow { self.0.state.borrow().renderer.gpu_specs().log_err() } - fn update_ime_position(&self, _bounds: Bounds) { + fn update_ime_position(&self, _bounds: Bounds) { // There is no such thing on Windows. } } diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 0ec73c4b0040e6c65cd8819ecf5d20a9ec1900d0..61d15cb3ed41751ce08c00599bbe28fc0c0cadb2 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -4096,9 +4096,7 @@ impl Window { self.on_next_frame(|window, cx| { if let Some(mut input_handler) = window.platform_window.take_input_handler() { if let Some(bounds) = input_handler.selected_bounds(window, cx) { - window - .platform_window - .update_ime_position(bounds.scale(window.scale_factor())); + window.platform_window.update_ime_position(bounds); } window.platform_window.set_input_handler(input_handler); } From 4124bedab796d2ac0a1e57f8b94f72500969797a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 5 Sep 2025 08:54:08 +0200 Subject: [PATCH 023/109] gpui: Skip `test` attribute expansion for rust-analyzer (#37611) The `test` attribute doesn't really matter to rust-analyzer, so we can make use of its cfg to have it think its just the standard test attribute which should make rust-analyzer slightly less resource intensive in zed. It also should prevent some IDE features from possibly failing within tests. Notably this has no effect outside of this repo, as the `rust-analyzer` cfg only takes effect on workspace member crates. Ideally we'd use the ignored proc macro config here but rust-analyzer still doesn't have toml configs working unfortunately. Release Notes: - N/A --- crates/gpui/src/gpui.rs | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index 3c4ee41c16ab7cfc5e42007291e330282b330ecb..0858cb014e33da354eb8a6488982b913b76d2b52 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -121,6 +121,14 @@ mod seal { pub trait Sealed {} } +// This allows r-a to skip expanding the gpui test macro which should +// reduce resource usage a bit as the test attribute is special cased +// to be treated as a no-op. +#[cfg(rust_analyzer)] +pub use core::prelude::v1::test; +#[cfg(not(rust_analyzer))] +pub use gpui_macros::test; + pub use action::*; pub use anyhow::Result; pub use app::*; @@ -134,7 +142,7 @@ pub use elements::*; pub use executor::*; pub use geometry::*; pub use global::*; -pub use gpui_macros::{AppContext, IntoElement, Render, VisualContext, register_action, test}; +pub use gpui_macros::{AppContext, IntoElement, Render, VisualContext, register_action}; pub use http_client; pub use input::*; pub use inspector::*; From bed358718b6693fb32e63ea5d6f3c4d41cbf1277 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 5 Sep 2025 09:56:53 +0200 Subject: [PATCH 024/109] agent_ui: Fix index panic in `SlashCommandCompletion::try_parse` (#37612) Release Notes: - N/A --- .../agent_ui/src/acp/completion_provider.rs | 57 +++++++++---------- 1 file changed, 28 insertions(+), 29 deletions(-) diff --git a/crates/agent_ui/src/acp/completion_provider.rs b/crates/agent_ui/src/acp/completion_provider.rs index 44e81433ab5a9d904f329e238b24960e2d568750..ecaa9cd45072191177d3fe15ec16d500b34fb489 100644 --- a/crates/agent_ui/src/acp/completion_provider.rs +++ b/crates/agent_ui/src/acp/completion_provider.rs @@ -1025,43 +1025,31 @@ impl SlashCommandCompletion { return None; } - let last_command_start = line.rfind('/')?; - if last_command_start >= line.len() { - return Some(Self::default()); - } - if last_command_start > 0 - && line - .chars() - .nth(last_command_start - 1) - .is_some_and(|c| !c.is_whitespace()) + let (prefix, last_command) = line.rsplit_once('/')?; + if prefix.chars().last().is_some_and(|c| !c.is_whitespace()) + || last_command.starts_with(char::is_whitespace) { return None; } - let rest_of_line = &line[last_command_start + 1..]; - - let mut command = None; let mut argument = None; - let mut end = last_command_start + 1; - - if let Some(command_text) = rest_of_line.split_whitespace().next() { - command = Some(command_text.to_string()); - end += command_text.len(); - - // Find the start of arguments after the command - if let Some(args_start) = - rest_of_line[command_text.len()..].find(|c: char| !c.is_whitespace()) - { - let args = &rest_of_line[command_text.len() + args_start..].trim_end(); - if !args.is_empty() { - argument = Some(args.to_string()); - end += args.len() + 1; - } + let mut command = None; + if let Some((command_text, args)) = last_command.split_once(char::is_whitespace) { + if !args.is_empty() { + argument = Some(args.trim_end().to_string()); } - } + command = Some(command_text.to_string()); + } else if !last_command.is_empty() { + command = Some(last_command.to_string()); + }; Some(Self { - source_range: last_command_start + offset_to_line..end + offset_to_line, + source_range: prefix.len() + offset_to_line + ..line + .rfind(|c: char| !c.is_whitespace()) + .unwrap_or_else(|| line.len()) + + 1 + + offset_to_line, command, argument, }) @@ -1180,6 +1168,15 @@ mod tests { }) ); + assert_eq!( + SlashCommandCompletion::try_parse("/拿不到命令 拿不到命令 ", 0), + Some(SlashCommandCompletion { + source_range: 0..30, + command: Some("拿不到命令".to_string()), + argument: Some("拿不到命令".to_string()), + }) + ); + assert_eq!(SlashCommandCompletion::try_parse("Lorem Ipsum", 0), None); assert_eq!(SlashCommandCompletion::try_parse("Lorem /", 0), None); @@ -1187,6 +1184,8 @@ mod tests { assert_eq!(SlashCommandCompletion::try_parse("Lorem /help", 0), None); assert_eq!(SlashCommandCompletion::try_parse("Lorem/", 0), None); + + assert_eq!(SlashCommandCompletion::try_parse("/ ", 0), None); } #[test] From ec58adca131ce2232ccd186947213e6255e6987d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 5 Sep 2025 13:16:15 +0200 Subject: [PATCH 025/109] languages: Invoke conda activate in conda environments (#37627) This isn't quite right, but using the env manager path causes conda to scream and I am not yet sure why, either way this is an improvement over the status quo Release Notes: - N/A\ --- crates/languages/src/python.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 5e6f5e414f001209d3b4447ae8326a12953c45ac..06fb49293f838fca2d54de076139ac8c4ebacfc2 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -902,6 +902,13 @@ impl ToolchainLister for PythonToolchainProvider { let env = toolchain.name.as_deref().unwrap_or("default"); activation_script.push(format!("pixi shell -e {env}")) } + Some(PythonEnvironmentKind::Conda) => { + if let Some(name) = &toolchain.name { + activation_script.push(format!("conda activate {name}")); + } else { + activation_script.push("conda activate".to_string()); + } + } Some(PythonEnvironmentKind::Venv | PythonEnvironmentKind::VirtualEnv) => { if let Some(prefix) = &toolchain.prefix { let activate_keyword = match shell { From 16c4fd4fc563eeedc645a50931129908bc3bfb07 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Fri, 5 Sep 2025 13:19:57 +0200 Subject: [PATCH 026/109] gpui: move Option -> Result conversion out of closure in App::update_window_id (#37624) Doesn't fix anything, but it seems that we do not need to assert and convert into an error until after the closure run to completion, especially since this is the only error we throw. Release Notes: - N/A --- crates/gpui/src/app.rs | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 69d5c0ee4375443ad42a7b25a64a138406ac95a2..8b0b404d1dffbf8a27de1f29437ce9cc2ba63f0f 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -1358,12 +1358,7 @@ impl App { F: FnOnce(AnyView, &mut Window, &mut App) -> T, { self.update(|cx| { - let mut window = cx - .windows - .get_mut(id) - .context("window not found")? - .take() - .context("window not found")?; + let mut window = cx.windows.get_mut(id)?.take()?; let root_view = window.root.clone().unwrap(); @@ -1380,15 +1375,14 @@ impl App { true }); } else { - cx.windows - .get_mut(id) - .context("window not found")? - .replace(window); + cx.windows.get_mut(id)?.replace(window); } - Ok(result) + Some(result) }) + .context("window not found") } + /// Creates an `AsyncApp`, which can be cloned and has a static lifetime /// so it can be held across `await` points. pub fn to_async(&self) -> AsyncApp { From e30f45cf64dcac1943cf726fad2ff55f8018057b Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Fri, 5 Sep 2025 14:22:32 +0200 Subject: [PATCH 027/109] Syntax tree view improvements (#37570) In an effort to improve the experience while developing extensions and improving themes, this PR updates the syntax tree views behavior slightly. Before, the view would always update to the current active editor whilst being used. This was quite painful for improving extension scheme files, as you would always have to change back and forth between editors to have a view at the relevant syntax tree. With this PR, the syntax tree view will now stay attached to the editor it was opened in, similar to preview views. Once the view is shown, the `UseActiveEditor` will become available in the command palette and enable the user to update the view to the last focused editor. On file close, the view will also be updated accordingly. https://github.com/user-attachments/assets/922075e5-9da0-4c1d-9e1a-51e024bf41ea A button is also shown whenever switching is possible. Futhermore, improved the empty state of the view. Lastly, a drive-by cleanup of the `show_action_types` method so there is no need to call `iter()` when calling the method. Release Notes: - The syntax tree view will now stay attached to the buffer it was opened in, similar to the Markdown preview. Use the `UseActiveEditor` action when the view is shown to change it to the last focused editor. --- Cargo.lock | 1 + crates/agent_ui/src/agent_ui.rs | 3 +- .../src/command_palette_hooks.rs | 4 +- crates/copilot/src/copilot.rs | 2 +- crates/language_tools/Cargo.toml | 1 + crates/language_tools/src/syntax_tree_view.rs | 395 +++++++++++++----- crates/settings_ui/src/settings_ui.rs | 2 +- crates/workspace/src/workspace.rs | 6 + crates/zed/src/zed.rs | 3 +- crates/zeta/src/init.rs | 4 +- 10 files changed, 302 insertions(+), 119 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c5e6c8588137b87e00b15e0655a53cdefc518d4f..b0fb3b6f49a90bc92d4dff35a6e76574625cc531 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9247,6 +9247,7 @@ dependencies = [ "anyhow", "client", "collections", + "command_palette_hooks", "copilot", "editor", "futures 0.3.31", diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index 93a4a8f748eefc933f809669af841f443888f7ed..e60c0baff99d1f615cbe439aed754a35f2a5c8db 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -337,8 +337,7 @@ fn update_command_palette_filter(cx: &mut App) { ]; filter.show_action_types(edit_prediction_actions.iter()); - filter - .show_action_types([TypeId::of::()].iter()); + filter.show_action_types(&[TypeId::of::()]); } }); } diff --git a/crates/command_palette_hooks/src/command_palette_hooks.rs b/crates/command_palette_hooks/src/command_palette_hooks.rs index df64d53874b4907b3bf586ee7935302c2e6979ae..f1344c5ba6d46fce966ace60d483e3c0fc717f80 100644 --- a/crates/command_palette_hooks/src/command_palette_hooks.rs +++ b/crates/command_palette_hooks/src/command_palette_hooks.rs @@ -76,7 +76,7 @@ impl CommandPaletteFilter { } /// Hides all actions with the given types. - pub fn hide_action_types(&mut self, action_types: &[TypeId]) { + pub fn hide_action_types<'a>(&mut self, action_types: impl IntoIterator) { for action_type in action_types { self.hidden_action_types.insert(*action_type); self.shown_action_types.remove(action_type); @@ -84,7 +84,7 @@ impl CommandPaletteFilter { } /// Shows all actions with the given types. - pub fn show_action_types<'a>(&mut self, action_types: impl Iterator) { + pub fn show_action_types<'a>(&mut self, action_types: impl IntoIterator) { for action_type in action_types { self.shown_action_types.insert(*action_type); self.hidden_action_types.remove(action_type); diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index d0a57735ab5a0342b245aa8db72e6b021b3943de..61b7a4e18e4e679c29e26185735352737983c4d1 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -1095,7 +1095,7 @@ impl Copilot { _ => { filter.hide_action_types(&signed_in_actions); filter.hide_action_types(&auth_actions); - filter.show_action_types(no_auth_actions.iter()); + filter.show_action_types(&no_auth_actions); } } } diff --git a/crates/language_tools/Cargo.toml b/crates/language_tools/Cargo.toml index b8f85d8d90068be9ad6849528f28522a96206cc8..bbac900cded75e9ca680a1813734f57423ce0ee9 100644 --- a/crates/language_tools/Cargo.toml +++ b/crates/language_tools/Cargo.toml @@ -16,6 +16,7 @@ doctest = false anyhow.workspace = true client.workspace = true collections.workspace = true +command_palette_hooks.workspace = true copilot.workspace = true editor.workspace = true futures.workspace = true diff --git a/crates/language_tools/src/syntax_tree_view.rs b/crates/language_tools/src/syntax_tree_view.rs index cf84ac34c4af6d04895ba5d1e22c262a1ef8f03c..5700d8d487e990937597295fb5bab761a46f2ba3 100644 --- a/crates/language_tools/src/syntax_tree_view.rs +++ b/crates/language_tools/src/syntax_tree_view.rs @@ -1,17 +1,22 @@ +use command_palette_hooks::CommandPaletteFilter; use editor::{Anchor, Editor, ExcerptId, SelectionEffects, scroll::Autoscroll}; use gpui::{ - App, AppContext as _, Context, Div, Entity, EventEmitter, FocusHandle, Focusable, Hsla, - InteractiveElement, IntoElement, MouseButton, MouseDownEvent, MouseMoveEvent, ParentElement, - Render, ScrollStrategy, SharedString, Styled, UniformListScrollHandle, WeakEntity, Window, - actions, div, rems, uniform_list, + App, AppContext as _, Context, Div, Entity, EntityId, EventEmitter, FocusHandle, Focusable, + Hsla, InteractiveElement, IntoElement, MouseButton, MouseDownEvent, MouseMoveEvent, + ParentElement, Render, ScrollStrategy, SharedString, Styled, UniformListScrollHandle, + WeakEntity, Window, actions, div, rems, uniform_list, }; use language::{Buffer, OwnedSyntaxLayer}; -use std::{mem, ops::Range}; +use std::{any::TypeId, mem, ops::Range}; use theme::ActiveTheme; use tree_sitter::{Node, TreeCursor}; -use ui::{ButtonLike, Color, ContextMenu, Label, LabelCommon, PopoverMenu, h_flex}; +use ui::{ + ButtonCommon, ButtonLike, Clickable, Color, ContextMenu, FluentBuilder as _, IconButton, + IconName, Label, LabelCommon, LabelSize, PopoverMenu, StyledExt, Tooltip, h_flex, v_flex, +}; use workspace::{ - SplitDirection, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, + Event as WorkspaceEvent, SplitDirection, ToolbarItemEvent, ToolbarItemLocation, + ToolbarItemView, Workspace, item::{Item, ItemHandle}, }; @@ -19,17 +24,51 @@ actions!( dev, [ /// Opens the syntax tree view for the current file. - OpenSyntaxTreeView + OpenSyntaxTreeView, + ] +); + +actions!( + syntax_tree_view, + [ + /// Update the syntax tree view to show the last focused file. + UseActiveEditor ] ); pub fn init(cx: &mut App) { - cx.observe_new(|workspace: &mut Workspace, _, _| { - workspace.register_action(|workspace, _: &OpenSyntaxTreeView, window, cx| { + let syntax_tree_actions = [TypeId::of::()]; + + CommandPaletteFilter::update_global(cx, |this, _| { + this.hide_action_types(&syntax_tree_actions); + }); + + cx.observe_new(move |workspace: &mut Workspace, _, _| { + workspace.register_action(move |workspace, _: &OpenSyntaxTreeView, window, cx| { + CommandPaletteFilter::update_global(cx, |this, _| { + this.show_action_types(&syntax_tree_actions); + }); + let active_item = workspace.active_item(cx); let workspace_handle = workspace.weak_handle(); - let syntax_tree_view = - cx.new(|cx| SyntaxTreeView::new(workspace_handle, active_item, window, cx)); + let syntax_tree_view = cx.new(|cx| { + cx.on_release(move |view: &mut SyntaxTreeView, cx| { + if view + .workspace_handle + .read_with(cx, |workspace, cx| { + workspace.item_of_type::(cx).is_none() + }) + .unwrap_or_default() + { + CommandPaletteFilter::update_global(cx, |this, _| { + this.hide_action_types(&syntax_tree_actions); + }); + } + }) + .detach(); + + SyntaxTreeView::new(workspace_handle, active_item, window, cx) + }); workspace.split_item( SplitDirection::Right, Box::new(syntax_tree_view), @@ -37,6 +76,13 @@ pub fn init(cx: &mut App) { cx, ) }); + workspace.register_action(|workspace, _: &UseActiveEditor, window, cx| { + if let Some(tree_view) = workspace.item_of_type::(cx) { + tree_view.update(cx, |view, cx| { + view.update_active_editor(&Default::default(), window, cx) + }) + } + }); }) .detach(); } @@ -45,6 +91,9 @@ pub struct SyntaxTreeView { workspace_handle: WeakEntity, editor: Option, list_scroll_handle: UniformListScrollHandle, + /// The last active editor in the workspace. Note that this is specifically not the + /// currently shown editor. + last_active_editor: Option>, selected_descendant_ix: Option, hovered_descendant_ix: Option, focus_handle: FocusHandle, @@ -61,6 +110,14 @@ struct EditorState { _subscription: gpui::Subscription, } +impl EditorState { + fn has_language(&self) -> bool { + self.active_buffer + .as_ref() + .is_some_and(|buffer| buffer.active_layer.is_some()) + } +} + #[derive(Clone)] struct BufferState { buffer: Entity, @@ -79,17 +136,25 @@ impl SyntaxTreeView { workspace_handle: workspace_handle.clone(), list_scroll_handle: UniformListScrollHandle::new(), editor: None, + last_active_editor: None, hovered_descendant_ix: None, selected_descendant_ix: None, focus_handle: cx.focus_handle(), }; - this.workspace_updated(active_item, window, cx); - cx.observe_in( + this.handle_item_updated(active_item, window, cx); + + cx.subscribe_in( &workspace_handle.upgrade().unwrap(), window, - |this, workspace, window, cx| { - this.workspace_updated(workspace.read(cx).active_item(cx), window, cx); + move |this, workspace, event, window, cx| match event { + WorkspaceEvent::ItemAdded { .. } | WorkspaceEvent::ActiveItemChanged => { + this.handle_item_updated(workspace.read(cx).active_item(cx), window, cx) + } + WorkspaceEvent::ItemRemoved { item_id } => { + this.handle_item_removed(item_id, window, cx); + } + _ => {} }, ) .detach(); @@ -97,20 +162,56 @@ impl SyntaxTreeView { this } - fn workspace_updated( + fn handle_item_updated( &mut self, active_item: Option>, window: &mut Window, cx: &mut Context, ) { - if let Some(item) = active_item - && item.item_id() != cx.entity_id() - && let Some(editor) = item.act_as::(cx) - { + let Some(editor) = active_item + .filter(|item| item.item_id() != cx.entity_id()) + .and_then(|item| item.act_as::(cx)) + else { + return; + }; + + if let Some(editor_state) = self.editor.as_ref().filter(|state| state.has_language()) { + self.last_active_editor = (editor_state.editor != editor).then_some(editor); + } else { self.set_editor(editor, window, cx); } } + fn handle_item_removed( + &mut self, + item_id: &EntityId, + window: &mut Window, + cx: &mut Context, + ) { + if self + .editor + .as_ref() + .is_some_and(|state| state.editor.entity_id() == *item_id) + { + self.editor = None; + // Try activating the last active editor if there is one + self.update_active_editor(&Default::default(), window, cx); + cx.notify(); + } + } + + fn update_active_editor( + &mut self, + _: &UseActiveEditor, + window: &mut Window, + cx: &mut Context, + ) { + let Some(editor) = self.last_active_editor.take() else { + return; + }; + self.set_editor(editor, window, cx); + } + fn set_editor(&mut self, editor: Entity, window: &mut Window, cx: &mut Context) { if let Some(state) = &self.editor { if state.editor == editor { @@ -294,101 +395,153 @@ impl SyntaxTreeView { .pl(rems(depth as f32)) .hover(|style| style.bg(colors.element_hover)) } -} - -impl Render for SyntaxTreeView { - fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - let mut rendered = div().flex_1().bg(cx.theme().colors().editor_background); - if let Some(layer) = self - .editor - .as_ref() - .and_then(|editor| editor.active_buffer.as_ref()) - .and_then(|buffer| buffer.active_layer.as_ref()) - { - let layer = layer.clone(); - rendered = rendered.child(uniform_list( - "SyntaxTreeView", - layer.node().descendant_count(), - cx.processor(move |this, range: Range, _, cx| { - let mut items = Vec::new(); - let mut cursor = layer.node().walk(); - let mut descendant_ix = range.start; - cursor.goto_descendant(descendant_ix); - let mut depth = cursor.depth(); - let mut visited_children = false; - while descendant_ix < range.end { - if visited_children { - if cursor.goto_next_sibling() { - visited_children = false; - } else if cursor.goto_parent() { - depth -= 1; - } else { - break; - } - } else { - items.push( - Self::render_node( - &cursor, - depth, - Some(descendant_ix) == this.selected_descendant_ix, + fn compute_items( + &mut self, + layer: &OwnedSyntaxLayer, + range: Range, + cx: &Context, + ) -> Vec
{ + let mut items = Vec::new(); + let mut cursor = layer.node().walk(); + let mut descendant_ix = range.start; + cursor.goto_descendant(descendant_ix); + let mut depth = cursor.depth(); + let mut visited_children = false; + while descendant_ix < range.end { + if visited_children { + if cursor.goto_next_sibling() { + visited_children = false; + } else if cursor.goto_parent() { + depth -= 1; + } else { + break; + } + } else { + items.push( + Self::render_node( + &cursor, + depth, + Some(descendant_ix) == self.selected_descendant_ix, + cx, + ) + .on_mouse_down( + MouseButton::Left, + cx.listener(move |tree_view, _: &MouseDownEvent, window, cx| { + tree_view.update_editor_with_range_for_descendant_ix( + descendant_ix, + window, + cx, + |editor, mut range, window, cx| { + // Put the cursor at the beginning of the node. + mem::swap(&mut range.start, &mut range.end); + + editor.change_selections( + SelectionEffects::scroll(Autoscroll::newest()), + window, + cx, + |selections| { + selections.select_ranges(vec![range]); + }, + ); + }, + ); + }), + ) + .on_mouse_move(cx.listener( + move |tree_view, _: &MouseMoveEvent, window, cx| { + if tree_view.hovered_descendant_ix != Some(descendant_ix) { + tree_view.hovered_descendant_ix = Some(descendant_ix); + tree_view.update_editor_with_range_for_descendant_ix( + descendant_ix, + window, cx, - ) - .on_mouse_down( - MouseButton::Left, - cx.listener(move |tree_view, _: &MouseDownEvent, window, cx| { - tree_view.update_editor_with_range_for_descendant_ix( - descendant_ix, - window, cx, - |editor, mut range, window, cx| { - // Put the cursor at the beginning of the node. - mem::swap(&mut range.start, &mut range.end); - - editor.change_selections( - SelectionEffects::scroll(Autoscroll::newest()), - window, cx, - |selections| { - selections.select_ranges(vec![range]); - }, - ); + |editor, range, _, cx| { + editor.clear_background_highlights::(cx); + editor.highlight_background::( + &[range], + |theme| { + theme + .colors() + .editor_document_highlight_write_background }, + cx, ); - }), - ) - .on_mouse_move(cx.listener( - move |tree_view, _: &MouseMoveEvent, window, cx| { - if tree_view.hovered_descendant_ix != Some(descendant_ix) { - tree_view.hovered_descendant_ix = Some(descendant_ix); - tree_view.update_editor_with_range_for_descendant_ix(descendant_ix, window, cx, |editor, range, _, cx| { - editor.clear_background_highlights::( cx); - editor.highlight_background::( - &[range], - |theme| theme.colors().editor_document_highlight_write_background, - cx, - ); - }); - cx.notify(); - } }, - )), - ); - descendant_ix += 1; - if cursor.goto_first_child() { - depth += 1; - } else { - visited_children = true; + ); + cx.notify(); } - } - } - items - }), - ) - .size_full() - .track_scroll(self.list_scroll_handle.clone()) - .text_bg(cx.theme().colors().background).into_any_element()); + }, + )), + ); + descendant_ix += 1; + if cursor.goto_first_child() { + depth += 1; + } else { + visited_children = true; + } + } } + items + } +} - rendered +impl Render for SyntaxTreeView { + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { + div() + .flex_1() + .bg(cx.theme().colors().editor_background) + .map(|this| { + let editor_state = self.editor.as_ref(); + + if let Some(layer) = editor_state + .and_then(|editor| editor.active_buffer.as_ref()) + .and_then(|buffer| buffer.active_layer.as_ref()) + { + let layer = layer.clone(); + this.child( + uniform_list( + "SyntaxTreeView", + layer.node().descendant_count(), + cx.processor(move |this, range: Range, _, cx| { + this.compute_items(&layer, range, cx) + }), + ) + .size_full() + .track_scroll(self.list_scroll_handle.clone()) + .text_bg(cx.theme().colors().background) + .into_any_element(), + ) + } else { + let inner_content = v_flex() + .items_center() + .text_center() + .gap_2() + .max_w_3_5() + .map(|this| { + if editor_state.is_some_and(|state| !state.has_language()) { + this.child(Label::new("Current editor has no associated language")) + .child( + Label::new(concat!( + "Try assigning a language or", + "switching to a different buffer" + )) + .size(LabelSize::Small), + ) + } else { + this.child(Label::new("Not attached to an editor")).child( + Label::new("Focus an editor to show a new tree view") + .size(LabelSize::Small), + ) + } + }); + + this.h_flex() + .size_full() + .justify_center() + .child(inner_content) + } + }) } } @@ -506,6 +659,26 @@ impl SyntaxTreeToolbarItemView { .child(Label::new(active_layer.language.name())) .child(Label::new(format_node_range(active_layer.node()))) } + + fn render_update_button(&mut self, cx: &mut Context) -> Option { + self.tree_view.as_ref().and_then(|view| { + view.update(cx, |view, cx| { + view.last_active_editor.as_ref().map(|editor| { + IconButton::new("syntax-view-update", IconName::RotateCw) + .tooltip({ + let active_tab_name = editor.read_with(cx, |editor, cx| { + editor.tab_content_text(Default::default(), cx) + }); + + Tooltip::text(format!("Update view to '{active_tab_name}'")) + }) + .on_click(cx.listener(|this, _, window, cx| { + this.update_active_editor(&Default::default(), window, cx); + })) + }) + }) + }) + } } fn format_node_range(node: Node) -> String { @@ -522,8 +695,10 @@ fn format_node_range(node: Node) -> String { impl Render for SyntaxTreeToolbarItemView { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - self.render_menu(cx) - .unwrap_or_else(|| PopoverMenu::new("Empty Syntax Tree")) + h_flex() + .gap_1() + .children(self.render_menu(cx)) + .children(self.render_update_button(cx)) } } diff --git a/crates/settings_ui/src/settings_ui.rs b/crates/settings_ui/src/settings_ui.rs index d736f0e174ba13d368794d8f5b623a44845d561b..5fea6dfcebb21be4351172ed4d8a17452b5601ba 100644 --- a/crates/settings_ui/src/settings_ui.rs +++ b/crates/settings_ui/src/settings_ui.rs @@ -70,7 +70,7 @@ pub fn init(cx: &mut App) { move |is_enabled, _workspace, _, cx| { if is_enabled { CommandPaletteFilter::update_global(cx, |filter, _cx| { - filter.show_action_types(settings_ui_actions.iter()); + filter.show_action_types(&settings_ui_actions); }); } else { CommandPaletteFilter::update_global(cx, |filter, _cx| { diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index af86517bb452c1cea77a72f2cf2350ef1e2eb030..0bfcaaf593eca73baa2a6a57def5af17b6ee93b3 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1031,6 +1031,9 @@ pub enum Event { item: Box, }, ActiveItemChanged, + ItemRemoved { + item_id: EntityId, + }, UserSavedItem { pane: WeakEntity, item: Box, @@ -3945,6 +3948,9 @@ impl Workspace { { entry.remove(); } + cx.emit(Event::ItemRemoved { + item_id: item.item_id(), + }); } pane::Event::Focus => { window.invalidate_character_coordinates(); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 96f0f261dcce9268976f92ec028f0581fb648913..864f6badeb6941aa2d6bd17a43977f84a77461b1 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -4502,6 +4502,7 @@ mod tests { "snippets", "supermaven", "svg", + "syntax_tree_view", "tab_switcher", "task", "terminal", @@ -4511,11 +4512,11 @@ mod tests { "toolchain", "variable_list", "vim", + "window", "workspace", "zed", "zed_predict_onboarding", "zeta", - "window", ]; assert_eq!( all_namespaces, diff --git a/crates/zeta/src/init.rs b/crates/zeta/src/init.rs index 6e5b31f99a76cb0e066348150e962396cf1ad9c6..f27667de6332bf4c3b8d2d705f281c9e3ba96a83 100644 --- a/crates/zeta/src/init.rs +++ b/crates/zeta/src/init.rs @@ -86,7 +86,7 @@ fn feature_gate_predict_edits_actions(cx: &mut App) { if is_ai_disabled { filter.hide_action_types(&zeta_all_action_types); } else if has_feature_flag { - filter.show_action_types(rate_completion_action_types.iter()); + filter.show_action_types(&rate_completion_action_types); } else { filter.hide_action_types(&rate_completion_action_types); } @@ -98,7 +98,7 @@ fn feature_gate_predict_edits_actions(cx: &mut App) { if !DisableAiSettings::get_global(cx).disable_ai { if is_enabled { CommandPaletteFilter::update_global(cx, |filter, _cx| { - filter.show_action_types(rate_completion_action_types.iter()); + filter.show_action_types(&rate_completion_action_types); }); } else { CommandPaletteFilter::update_global(cx, |filter, _cx| { From 74e8afe9a8f72b1ff0a1f5fd62d78f2eb15f7e15 Mon Sep 17 00:00:00 2001 From: Isaac Hales Date: Fri, 5 Sep 2025 08:57:58 -0600 Subject: [PATCH 028/109] Fix logic for default values for task variables (#37588) This is a small fix for default values in task variables. The [documentation](https://zed.dev/docs/tasks) states > You can also use verbose syntax that allows specifying a default if a given variable is not available: ${ZED_FILE:default_value} I found, however, that this doesn't actually work. Instead, the Zed variable and the default value are just appended in the output. For example, if I run a task `echo ${ZED_ROW:100}` the result I get is `447:100` (in this case it should just be `447`). This PR fixes that. I also added a new test case for handling default values. I also tested the fix in a dev build and it seems to work. There are no UI adjustments. AI disclosure: I used Claude Code to write the code, including the fix and the tests. This is actually my first open-source PR ever, so if I did something wrong, I'd appreciate any tips and I'll make it right! Release Notes: - Fixed task variable substitution always appending the default --- crates/task/src/task_template.rs | 92 +++++++++++++++++++++++++++++--- 1 file changed, 85 insertions(+), 7 deletions(-) diff --git a/crates/task/src/task_template.rs b/crates/task/src/task_template.rs index 3d1d180557fc457e4200a5b246f2a08e2f5dfcf0..a57f5a175af3fd79ce6b8ef818e3fb97acdc32c2 100644 --- a/crates/task/src/task_template.rs +++ b/crates/task/src/task_template.rs @@ -333,15 +333,16 @@ fn substitute_all_template_variables_in_str>( if let Some(substituted_variable) = variable_names.get(variable_name) { substituted_variables.insert(substituted_variable.clone()); } - - let mut name = name.as_ref().to_owned(); - // Got a task variable hit + // Got a task variable hit - use the variable value, ignore default + return Ok(Some(name.as_ref().to_owned())); + } else if variable_name.starts_with(ZED_VARIABLE_NAME_PREFIX) { + // Unknown ZED variable - use default if available if !default.is_empty() { - name.push_str(default); + // Strip the colon and return the default value + return Ok(Some(default[1..].to_owned())); + } else { + bail!("Unknown variable name: {variable_name}"); } - return Ok(Some(name)); - } else if variable_name.starts_with(ZED_VARIABLE_NAME_PREFIX) { - bail!("Unknown variable name: {variable_name}"); } // This is an unknown variable. // We should not error out, as they may come from user environment (e.g. $PATH). That means that the variable substitution might not be perfect. @@ -892,4 +893,81 @@ mod tests { "overwritten" ); } + + #[test] + fn test_variable_default_values() { + let task_with_defaults = TaskTemplate { + label: "test with defaults".to_string(), + command: format!( + "echo ${{{}}}", + VariableName::File.to_string() + ":fallback.txt" + ), + args: vec![ + "${ZED_MISSING_VAR:default_value}".to_string(), + format!("${{{}}}", VariableName::Row.to_string() + ":42"), + ], + ..TaskTemplate::default() + }; + + // Test 1: When ZED_FILE exists, should use actual value and ignore default + let context_with_file = TaskContext { + cwd: None, + task_variables: TaskVariables::from_iter(vec![ + (VariableName::File, "actual_file.rs".to_string()), + (VariableName::Row, "123".to_string()), + ]), + project_env: HashMap::default(), + }; + + let resolved = task_with_defaults + .resolve_task(TEST_ID_BASE, &context_with_file) + .expect("Should resolve task with existing variables"); + + assert_eq!( + resolved.resolved.command.unwrap(), + "echo actual_file.rs", + "Should use actual ZED_FILE value, not default" + ); + assert_eq!( + resolved.resolved.args, + vec!["default_value", "123"], + "Should use default for missing var, actual value for existing var" + ); + + // Test 2: When ZED_FILE doesn't exist, should use default value + let context_without_file = TaskContext { + cwd: None, + task_variables: TaskVariables::from_iter(vec![(VariableName::Row, "456".to_string())]), + project_env: HashMap::default(), + }; + + let resolved = task_with_defaults + .resolve_task(TEST_ID_BASE, &context_without_file) + .expect("Should resolve task using default values"); + + assert_eq!( + resolved.resolved.command.unwrap(), + "echo fallback.txt", + "Should use default value when ZED_FILE is missing" + ); + assert_eq!( + resolved.resolved.args, + vec!["default_value", "456"], + "Should use defaults for missing vars" + ); + + // Test 3: Missing ZED variable without default should fail + let task_no_default = TaskTemplate { + label: "test no default".to_string(), + command: "${ZED_MISSING_NO_DEFAULT}".to_string(), + ..TaskTemplate::default() + }; + + assert!( + task_no_default + .resolve_task(TEST_ID_BASE, &TaskContext::default()) + .is_none(), + "Should fail when ZED variable has no default and doesn't exist" + ); + } } From 360e372b57d2913ed670bd0edbca73adfe5956f4 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Fri, 5 Sep 2025 11:09:32 -0400 Subject: [PATCH 029/109] linux: Restore ctrl-escape to keymap (#37636) Closes: https://github.com/zed-industries/zed/issues/37628 Follow-up to: https://github.com/zed-industries/zed/pull/36712 Release Notes: - linux: Fix for ctrl-escape not escaping the tab switcher. --- assets/keymaps/default-linux.json | 1 + 1 file changed, 1 insertion(+) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 28518490ccbe9d3a4e8161ffbc32ed5c27ae0d84..44234b819abdf10231e4cb4e4fb7dfe335d19778 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -16,6 +16,7 @@ "up": "menu::SelectPrevious", "enter": "menu::Confirm", "ctrl-enter": "menu::SecondaryConfirm", + "ctrl-escape": "menu::Cancel", "ctrl-c": "menu::Cancel", "escape": "menu::Cancel", "alt-shift-enter": "menu::Restart", From 3d37611b6f20b158454014b5d886805a06902e71 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Fri, 5 Sep 2025 17:43:39 +0200 Subject: [PATCH 030/109] cli: Rename script zed-wsl to zed, and enable on non-WSL (#37631) Closes #23026 With this hotfix, git committing from the built-in Zed terminal (well, PowerShell), now works. Release Notes: - N/A --- .gitattributes | 2 +- crates/zed/resources/windows/{zed-wsl => zed.sh} | 4 ++-- script/bundle-windows.ps1 | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) rename crates/zed/resources/windows/{zed-wsl => zed.sh} (88%) diff --git a/.gitattributes b/.gitattributes index 0dedc2d567dac982b217453c266a046b09ea4830..37d28993301fef9c7eb4da0847cc9f4b7a5f1fbb 100644 --- a/.gitattributes +++ b/.gitattributes @@ -2,4 +2,4 @@ *.json linguist-language=JSON-with-Comments # Ensure the WSL script always has LF line endings, even on Windows -crates/zed/resources/windows/zed-wsl text eol=lf +crates/zed/resources/windows/zed text eol=lf diff --git a/crates/zed/resources/windows/zed-wsl b/crates/zed/resources/windows/zed.sh similarity index 88% rename from crates/zed/resources/windows/zed-wsl rename to crates/zed/resources/windows/zed.sh index d3cbb93af6f5979508229656deadeab0dbf21661..734b1a7eb00dc304786a58674171fdb5872b90c8 100644 --- a/crates/zed/resources/windows/zed-wsl +++ b/crates/zed/resources/windows/zed.sh @@ -20,6 +20,6 @@ if [ $IN_WSL = true ]; then "$ZED_PATH/zed.exe" --wsl "$WSL_USER@$WSL_DISTRO_NAME" "$@" exit $? else - echo "Only WSL is supported for now" >&2 - exit 1 + "$ZED_PATH/zed.exe" "$@" + exit $? fi diff --git a/script/bundle-windows.ps1 b/script/bundle-windows.ps1 index 84ad39fb706f9d3e0e4af73a68b468e0bea33ee1..a26abf8413f375b611d01d57b61ac3f91a960dd7 100644 --- a/script/bundle-windows.ps1 +++ b/script/bundle-windows.ps1 @@ -150,7 +150,7 @@ function CollectFiles { Move-Item -Path "$innoDir\zed_explorer_command_injector.appx" -Destination "$innoDir\appx\zed_explorer_command_injector.appx" -Force Move-Item -Path "$innoDir\zed_explorer_command_injector.dll" -Destination "$innoDir\appx\zed_explorer_command_injector.dll" -Force Move-Item -Path "$innoDir\cli.exe" -Destination "$innoDir\bin\zed.exe" -Force - Move-Item -Path "$innoDir\zed-wsl" -Destination "$innoDir\bin\zed" -Force + Move-Item -Path "$innoDir\zed.sh" -Destination "$innoDir\bin\zed" -Force Move-Item -Path "$innoDir\auto_update_helper.exe" -Destination "$innoDir\tools\auto_update_helper.exe" -Force Move-Item -Path ".\AGS_SDK-6.3.0\ags_lib\lib\amd_ags_x64.dll" -Destination "$innoDir\amd_ags_x64.dll" -Force } From fb6cc8794f360acf0c0671502bb456ae7233fc88 Mon Sep 17 00:00:00 2001 From: Yacine Hmito <6893840+yacinehmito@users.noreply.github.com> Date: Fri, 5 Sep 2025 17:56:40 +0200 Subject: [PATCH 031/109] Fix typo in development docs for macOS (#37607) Release Notes: - N/A --- docs/src/development/macos.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/development/macos.md b/docs/src/development/macos.md index c7e92623d4e226cb575da524fd8241fba3730fd6..851e2efdd7cdf15b9617445fe065149da8a5721f 100644 --- a/docs/src/development/macos.md +++ b/docs/src/development/macos.md @@ -33,7 +33,7 @@ Clone down the [Zed repository](https://github.com/zed-industries/zed). ### Backend Dependencies (optional) {#backend-dependencies} -If you are looking to develop Zed collaboration features using a local collabortation server, please see: [Local Collaboration](./local-collaboration.md) docs. +If you are looking to develop Zed collaboration features using a local collaboration server, please see: [Local Collaboration](./local-collaboration.md) docs. ## Building Zed from Source From 91ab0636ec6ed5a3df39a61bb56d24f715865c39 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BC=A0=E5=B0=8F=E7=99=BD?= <364772080@qq.com> Date: Sat, 6 Sep 2025 00:25:55 +0800 Subject: [PATCH 032/109] windows: Make sure `zed.sh` using the correct line ending (#37650) This got missed in the changes from #37631 Release Notes: - N/A --- .gitattributes | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitattributes b/.gitattributes index 37d28993301fef9c7eb4da0847cc9f4b7a5f1fbb..57afd4ea6942bd3985fb7395101800706d7b4ae6 100644 --- a/.gitattributes +++ b/.gitattributes @@ -2,4 +2,4 @@ *.json linguist-language=JSON-with-Comments # Ensure the WSL script always has LF line endings, even on Windows -crates/zed/resources/windows/zed text eol=lf +crates/zed/resources/windows/zed.sh text eol=lf From 638320b21e8a893a8da0ee23c438127cf82f5f85 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Fri, 5 Sep 2025 12:40:47 -0400 Subject: [PATCH 033/109] Improve macOS version information in telemetry (#37185) macOS versions are currently reported as `macOS 26.0.0`. But this makes it impossible to differentiate amongst macOS Beta releases which have the same version number (`X.0.0`) but are different builds. This PR adds build number info to `os_version` for macOS Betas and [Rapid Security Response](https://support.apple.com/en-us/102657) release that have identical version numbers to stable release, but have different builds numbers. We can differentiate them because the build numbers end with a letter. | Version | Before | After | | - | - | - | | macOS Sonoma 14.7.8 | 14.7.8 | 14.7.8 | | macOS Sequoia 15.6.1 | 15.6.1 | 15.6.1 | | mcOS Ventura 13.3.1 | 13.3.1 | 13.3.1 | | macOS Ventura 13.3.1 (a) | 13.3.1 | 13.3.1 (Build 22E772610a) | | macOS Tahoe 26.0.0 (Beta1) | 26.0.0 | 26.0.0 (Build 25A5316a) | | macOS Tahoe 26.0.0 (Beta5) | 26.0.0 | 26.0.0 (Build 25A5349a) | This should cause minimal telemetry changes and only impacting a macOS betas and a couple specific older macOS versions, but will allow differentiation between macOS beta releases in GitHub issues. Alternatives: 1. Leave as-is (can't differentiate between macOS beta builds) 2. Always include build number info (impacts telemetry; more consistent going forward; differentiates non-final Release Candidates which don't include a trailing letter) I couldn't find a cocoa method to retrieve macOS build number, so I switched dependencies from `cocoa` to `objc2-foundation` in the client crate. We already depend upon this crate as a dependency of `blade-graphics` so I matched the features of that and so workspace-hack doesn't change. https://github.com/zed-industries/zed/blob/1ebc69a44708f344449c0c9d47e33b414277adec/tooling/workspace-hack/Cargo.toml#L355 Release Notes: - N/A --- Cargo.lock | 2 +- Cargo.toml | 25 +++++++++++++++++++++++++ crates/client/Cargo.toml | 2 +- crates/client/src/telemetry.rs | 25 +++++++++++++------------ 4 files changed, 40 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b0fb3b6f49a90bc92d4dff35a6e76574625cc531..975e762dddefa6d2c67f8957e4356a69c903f187 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3070,7 +3070,6 @@ dependencies = [ "clock", "cloud_api_client", "cloud_llm_client", - "cocoa 0.26.0", "collections", "credentials_provider", "derive_more", @@ -3083,6 +3082,7 @@ dependencies = [ "http_client_tls", "httparse", "log", + "objc2-foundation", "parking_lot", "paths", "postage", diff --git a/Cargo.toml b/Cargo.toml index f389153efe9d0719187d14bb554042fcf2888376..1de877334fe6cf7c5d4c84649e27b0633579723e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -537,6 +537,31 @@ nbformat = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c80421 nix = "0.29" num-format = "0.4.4" objc = "0.2" +objc2-foundation = { version = "0.3", default-features = false, features = [ + "NSArray", + "NSAttributedString", + "NSBundle", + "NSCoder", + "NSData", + "NSDate", + "NSDictionary", + "NSEnumerator", + "NSError", + "NSGeometry", + "NSNotification", + "NSNull", + "NSObjCRuntime", + "NSObject", + "NSProcessInfo", + "NSRange", + "NSRunLoop", + "NSString", + "NSURL", + "NSUndoManager", + "NSValue", + "objc2-core-foundation", + "std" +] } open = "5.0.0" ordered-float = "2.1.1" palette = { version = "0.7.5", default-features = false, features = ["std"] } diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index 5c6d1157fd710de0e1dd160b611c0bd7c6667c4d..01007cdc6618996735c859284e3860b936f540e8 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -75,7 +75,7 @@ util = { workspace = true, features = ["test-support"] } windows.workspace = true [target.'cfg(target_os = "macos")'.dependencies] -cocoa.workspace = true +objc2-foundation.workspace = true [target.'cfg(any(target_os = "windows", target_os = "macos"))'.dependencies] tokio-native-tls = "0.3" diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index a5c1532c7563ab4bcb5f8826dcc18f3d52daf222..e3123400866516bda26b071e288bdad9dd5964e0 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -84,6 +84,10 @@ static DOTNET_PROJECT_FILES_REGEX: LazyLock = LazyLock::new(|| { Regex::new(r"^(global\.json|Directory\.Build\.props|.*\.(csproj|fsproj|vbproj|sln))$").unwrap() }); +#[cfg(target_os = "macos")] +static MACOS_VERSION_REGEX: LazyLock = + LazyLock::new(|| Regex::new(r"(\s*\(Build [^)]*[0-9]\))").unwrap()); + pub fn os_name() -> String { #[cfg(target_os = "macos")] { @@ -108,19 +112,16 @@ pub fn os_name() -> String { pub fn os_version() -> String { #[cfg(target_os = "macos")] { - use cocoa::base::nil; - use cocoa::foundation::NSProcessInfo; - - unsafe { - let process_info = cocoa::foundation::NSProcessInfo::processInfo(nil); - let version = process_info.operatingSystemVersion(); - gpui::SemanticVersion::new( - version.majorVersion as usize, - version.minorVersion as usize, - version.patchVersion as usize, - ) + use objc2_foundation::NSProcessInfo; + let process_info = NSProcessInfo::processInfo(); + let version_nsstring = unsafe { process_info.operatingSystemVersionString() }; + // "Version 15.6.1 (Build 24G90)" -> "15.6.1 (Build 24G90)" + let version_string = version_nsstring.to_string().replace("Version ", ""); + // "15.6.1 (Build 24G90)" -> "15.6.1" + // "26.0.0 (Build 25A5349a)" -> unchanged (Beta or Rapid Security Response; ends with letter) + MACOS_VERSION_REGEX + .replace_all(&version_string, "") .to_string() - } } #[cfg(any(target_os = "linux", target_os = "freebsd"))] { From b3405c3bd18749f3f7acda52670ee03528d655b8 Mon Sep 17 00:00:00 2001 From: Matin Aniss <76515905+MatinAniss@users.noreply.github.com> Date: Sat, 6 Sep 2025 02:52:57 +1000 Subject: [PATCH 034/109] Add line ending selector (#35392) Partially addresses this issue #5294 Adds a selector between `LF` and `CRLF` for the buffer's line endings, the checkmark denotes the currently selected line ending. Selector image Release Notes: - Added line ending selector. --------- Co-authored-by: Conrad Irwin --- Cargo.lock | 16 ++ Cargo.toml | 2 + crates/language/src/buffer.rs | 35 +++- crates/language/src/buffer_tests.rs | 72 +++++++ crates/language/src/proto.rs | 25 +++ crates/line_ending_selector/Cargo.toml | 24 +++ crates/line_ending_selector/LICENSE-GPL | 1 + .../src/line_ending_selector.rs | 192 ++++++++++++++++++ crates/proto/proto/buffer.proto | 7 + crates/zed/Cargo.toml | 1 + crates/zed/src/main.rs | 1 + crates/zed/src/zed.rs | 1 + 12 files changed, 376 insertions(+), 1 deletion(-) create mode 100644 crates/line_ending_selector/Cargo.toml create mode 120000 crates/line_ending_selector/LICENSE-GPL create mode 100644 crates/line_ending_selector/src/line_ending_selector.rs diff --git a/Cargo.lock b/Cargo.lock index 975e762dddefa6d2c67f8957e4356a69c903f187..fbdf0e848c356620f2a2cca800cf40ef850c3b13 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9518,6 +9518,21 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "line_ending_selector" +version = "0.1.0" +dependencies = [ + "editor", + "gpui", + "language", + "picker", + "project", + "ui", + "util", + "workspace", + "workspace-hack", +] + [[package]] name = "link-cplusplus" version = "1.0.10" @@ -20492,6 +20507,7 @@ dependencies = [ "language_tools", "languages", "libc", + "line_ending_selector", "livekit_client", "log", "markdown", diff --git a/Cargo.toml b/Cargo.toml index 1de877334fe6cf7c5d4c84649e27b0633579723e..d8e8040cd920e1f6b5a561c80a4a205d030cbb49 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -97,6 +97,7 @@ members = [ "crates/language_selector", "crates/language_tools", "crates/languages", + "crates/line_ending_selector", "crates/livekit_api", "crates/livekit_client", "crates/lmstudio", @@ -323,6 +324,7 @@ language_models = { path = "crates/language_models" } language_selector = { path = "crates/language_selector" } language_tools = { path = "crates/language_tools" } languages = { path = "crates/languages" } +line_ending_selector = { path = "crates/line_ending_selector" } livekit_api = { path = "crates/livekit_api" } livekit_client = { path = "crates/livekit_client" } lmstudio = { path = "crates/lmstudio" } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index c86787e1f9de8cf31037187dc667e2a7e428cea9..2a303bb9a0ff44981def92f593595e92629be1e5 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -284,6 +284,14 @@ pub enum Operation { /// The language server ID. server_id: LanguageServerId, }, + + /// An update to the line ending type of this buffer. + UpdateLineEnding { + /// The line ending type. + line_ending: LineEnding, + /// The buffer's lamport timestamp. + lamport_timestamp: clock::Lamport, + }, } /// An event that occurs in a buffer. @@ -1240,6 +1248,21 @@ impl Buffer { self.syntax_map.lock().language_registry() } + /// Assign the line ending type to the buffer. + pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context) { + self.text.set_line_ending(line_ending); + + let lamport_timestamp = self.text.lamport_clock.tick(); + self.send_operation( + Operation::UpdateLineEnding { + line_ending, + lamport_timestamp, + }, + true, + cx, + ); + } + /// Assign the buffer a new [`Capability`]. pub fn set_capability(&mut self, capability: Capability, cx: &mut Context) { if self.capability != capability { @@ -2557,7 +2580,7 @@ impl Buffer { Operation::UpdateSelections { selections, .. } => selections .iter() .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)), - Operation::UpdateCompletionTriggers { .. } => true, + Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true, } } @@ -2623,6 +2646,13 @@ impl Buffer { } self.text.lamport_clock.observe(lamport_timestamp); } + Operation::UpdateLineEnding { + line_ending, + lamport_timestamp, + } => { + self.text.set_line_ending(line_ending); + self.text.lamport_clock.observe(lamport_timestamp); + } } } @@ -4814,6 +4844,9 @@ impl operation_queue::Operation for Operation { } | Operation::UpdateCompletionTriggers { lamport_timestamp, .. + } + | Operation::UpdateLineEnding { + lamport_timestamp, .. } => *lamport_timestamp, } } diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 5b88112c956e5466748fc349825a78f6232e540e..050ec457dfe6e83d420206b381d5524b9c583441 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -67,6 +67,78 @@ fn test_line_endings(cx: &mut gpui::App) { }); } +#[gpui::test] +fn test_set_line_ending(cx: &mut TestAppContext) { + let base = cx.new(|cx| Buffer::local("one\ntwo\nthree\n", cx)); + let base_replica = cx.new(|cx| { + Buffer::from_proto(1, Capability::ReadWrite, base.read(cx).to_proto(cx), None).unwrap() + }); + base.update(cx, |_buffer, cx| { + cx.subscribe(&base_replica, |this, _, event, cx| { + if let BufferEvent::Operation { + operation, + is_local: true, + } = event + { + this.apply_ops([operation.clone()], cx); + } + }) + .detach(); + }); + base_replica.update(cx, |_buffer, cx| { + cx.subscribe(&base, |this, _, event, cx| { + if let BufferEvent::Operation { + operation, + is_local: true, + } = event + { + this.apply_ops([operation.clone()], cx); + } + }) + .detach(); + }); + + // Base + base_replica.read_with(cx, |buffer, _| { + assert_eq!(buffer.line_ending(), LineEnding::Unix); + }); + base.update(cx, |buffer, cx| { + assert_eq!(buffer.line_ending(), LineEnding::Unix); + buffer.set_line_ending(LineEnding::Windows, cx); + assert_eq!(buffer.line_ending(), LineEnding::Windows); + }); + base_replica.read_with(cx, |buffer, _| { + assert_eq!(buffer.line_ending(), LineEnding::Windows); + }); + base.update(cx, |buffer, cx| { + buffer.set_line_ending(LineEnding::Unix, cx); + assert_eq!(buffer.line_ending(), LineEnding::Unix); + }); + base_replica.read_with(cx, |buffer, _| { + assert_eq!(buffer.line_ending(), LineEnding::Unix); + }); + + // Replica + base.read_with(cx, |buffer, _| { + assert_eq!(buffer.line_ending(), LineEnding::Unix); + }); + base_replica.update(cx, |buffer, cx| { + assert_eq!(buffer.line_ending(), LineEnding::Unix); + buffer.set_line_ending(LineEnding::Windows, cx); + assert_eq!(buffer.line_ending(), LineEnding::Windows); + }); + base.read_with(cx, |buffer, _| { + assert_eq!(buffer.line_ending(), LineEnding::Windows); + }); + base_replica.update(cx, |buffer, cx| { + buffer.set_line_ending(LineEnding::Unix, cx); + assert_eq!(buffer.line_ending(), LineEnding::Unix); + }); + base.read_with(cx, |buffer, _| { + assert_eq!(buffer.line_ending(), LineEnding::Unix); + }); +} + #[gpui::test] fn test_select_language(cx: &mut App) { init_settings(cx, |_| {}); diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 0d5a8e916c8712733dcc7a26faa984453cdd30fd..bc85b10859632fc3e2cf61c663b7159a023f4f3a 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -90,6 +90,15 @@ pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation { language_server_id: server_id.to_proto(), }, ), + + crate::Operation::UpdateLineEnding { + line_ending, + lamport_timestamp, + } => proto::operation::Variant::UpdateLineEnding(proto::operation::UpdateLineEnding { + replica_id: lamport_timestamp.replica_id as u32, + lamport_timestamp: lamport_timestamp.value, + line_ending: serialize_line_ending(*line_ending) as i32, + }), }), } } @@ -341,6 +350,18 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { + crate::Operation::UpdateLineEnding { + lamport_timestamp: clock::Lamport { + replica_id: message.replica_id as ReplicaId, + value: message.lamport_timestamp, + }, + line_ending: deserialize_line_ending( + proto::LineEnding::from_i32(message.line_ending) + .context("missing line_ending")?, + ), + } + } }, ) } @@ -496,6 +517,10 @@ pub fn lamport_timestamp_for_operation(operation: &proto::Operation) -> Option { + replica_id = op.replica_id; + value = op.lamport_timestamp; + } } Some(clock::Lamport { diff --git a/crates/line_ending_selector/Cargo.toml b/crates/line_ending_selector/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..7c5c8f6d8f3996771f832c28d5d71b857bb0b3b6 --- /dev/null +++ b/crates/line_ending_selector/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "line_ending_selector" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/line_ending_selector.rs" +doctest = false + +[dependencies] +editor.workspace = true +gpui.workspace = true +language.workspace = true +picker.workspace = true +project.workspace = true +ui.workspace = true +util.workspace = true +workspace.workspace = true +workspace-hack.workspace = true diff --git a/crates/line_ending_selector/LICENSE-GPL b/crates/line_ending_selector/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/line_ending_selector/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/line_ending_selector/src/line_ending_selector.rs b/crates/line_ending_selector/src/line_ending_selector.rs new file mode 100644 index 0000000000000000000000000000000000000000..532f0b051d79e25229d7cb72419ca557edd5b477 --- /dev/null +++ b/crates/line_ending_selector/src/line_ending_selector.rs @@ -0,0 +1,192 @@ +use editor::Editor; +use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Task, WeakEntity, actions}; +use language::{Buffer, LineEnding}; +use picker::{Picker, PickerDelegate}; +use project::Project; +use std::sync::Arc; +use ui::{ListItem, ListItemSpacing, prelude::*}; +use util::ResultExt; +use workspace::ModalView; + +actions!( + line_ending, + [ + /// Toggles the line ending selector modal. + Toggle + ] +); + +pub fn init(cx: &mut App) { + cx.observe_new(LineEndingSelector::register).detach(); +} + +pub struct LineEndingSelector { + picker: Entity>, +} + +impl LineEndingSelector { + fn register(editor: &mut Editor, _window: Option<&mut Window>, cx: &mut Context) { + let editor_handle = cx.weak_entity(); + editor + .register_action(move |_: &Toggle, window, cx| { + Self::toggle(&editor_handle, window, cx); + }) + .detach(); + } + + fn toggle(editor: &WeakEntity, window: &mut Window, cx: &mut App) { + let Some((workspace, buffer)) = editor + .update(cx, |editor, cx| { + Some((editor.workspace()?, editor.active_excerpt(cx)?.1)) + }) + .ok() + .flatten() + else { + return; + }; + + workspace.update(cx, |workspace, cx| { + let project = workspace.project().clone(); + workspace.toggle_modal(window, cx, move |window, cx| { + LineEndingSelector::new(buffer, project, window, cx) + }); + }) + } + + fn new( + buffer: Entity, + project: Entity, + window: &mut Window, + cx: &mut Context, + ) -> Self { + let line_ending = buffer.read(cx).line_ending(); + let delegate = + LineEndingSelectorDelegate::new(cx.entity().downgrade(), buffer, project, line_ending); + let picker = cx.new(|cx| Picker::nonsearchable_uniform_list(delegate, window, cx)); + Self { picker } + } +} + +impl Render for LineEndingSelector { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + v_flex().w(rems(34.)).child(self.picker.clone()) + } +} + +impl Focusable for LineEndingSelector { + fn focus_handle(&self, cx: &App) -> FocusHandle { + self.picker.focus_handle(cx) + } +} + +impl EventEmitter for LineEndingSelector {} +impl ModalView for LineEndingSelector {} + +struct LineEndingSelectorDelegate { + line_ending_selector: WeakEntity, + buffer: Entity, + project: Entity, + line_ending: LineEnding, + matches: Vec, + selected_index: usize, +} + +impl LineEndingSelectorDelegate { + fn new( + line_ending_selector: WeakEntity, + buffer: Entity, + project: Entity, + line_ending: LineEnding, + ) -> Self { + Self { + line_ending_selector, + buffer, + project, + line_ending, + matches: vec![LineEnding::Unix, LineEnding::Windows], + selected_index: 0, + } + } +} + +impl PickerDelegate for LineEndingSelectorDelegate { + type ListItem = ListItem; + + fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { + "Select a line ending…".into() + } + + fn match_count(&self) -> usize { + self.matches.len() + } + + fn confirm(&mut self, _: bool, window: &mut Window, cx: &mut Context>) { + if let Some(line_ending) = self.matches.get(self.selected_index) { + self.buffer.update(cx, |this, cx| { + this.set_line_ending(*line_ending, cx); + }); + let buffer = self.buffer.clone(); + let project = self.project.clone(); + cx.defer(move |cx| { + project.update(cx, |this, cx| { + this.save_buffer(buffer, cx).detach(); + }); + }); + } + self.dismissed(window, cx); + } + + fn dismissed(&mut self, _: &mut Window, cx: &mut Context>) { + self.line_ending_selector + .update(cx, |_, cx| cx.emit(DismissEvent)) + .log_err(); + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index( + &mut self, + ix: usize, + _window: &mut Window, + _: &mut Context>, + ) { + self.selected_index = ix; + } + + fn update_matches( + &mut self, + _query: String, + _window: &mut Window, + _cx: &mut Context>, + ) -> gpui::Task<()> { + return Task::ready(()); + } + + fn render_match( + &self, + ix: usize, + selected: bool, + _: &mut Window, + _: &mut Context>, + ) -> Option { + let line_ending = self.matches[ix]; + let label = match line_ending { + LineEnding::Unix => "LF", + LineEnding::Windows => "CRLF", + }; + + let mut list_item = ListItem::new(ix) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .toggle_state(selected) + .child(Label::new(label)); + + if self.line_ending == line_ending { + list_item = list_item.end_slot(Icon::new(IconName::Check).color(Color::Muted)); + } + + Some(list_item) + } +} diff --git a/crates/proto/proto/buffer.proto b/crates/proto/proto/buffer.proto index f4dacf2fdca97bf9766c8de348a67cd18f8fb973..4580fd8e9db80e7dc54b1c997f8df108e3bf9330 100644 --- a/crates/proto/proto/buffer.proto +++ b/crates/proto/proto/buffer.proto @@ -143,6 +143,7 @@ message Operation { UpdateSelections update_selections = 3; UpdateDiagnostics update_diagnostics = 4; UpdateCompletionTriggers update_completion_triggers = 5; + UpdateLineEnding update_line_ending = 6; } message Edit { @@ -174,6 +175,12 @@ message Operation { repeated string triggers = 3; uint64 language_server_id = 4; } + + message UpdateLineEnding { + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + LineEnding line_ending = 3; + } } message ProjectTransaction { diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index f2295d5fa732d9e36e2b37cf346199f35cabc803..bee6c87670c87a08945918a3dd49b26463a3a3ef 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -93,6 +93,7 @@ language_models.workspace = true language_selector.workspace = true language_tools.workspace = true languages = { workspace = true, features = ["load-grammars"] } +line_ending_selector.workspace = true libc.workspace = true log.workspace = true markdown.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 9582e7a2ab541243a768370eb08ed1f4f1c465a3..3287e866e48058a763c7db6633c1db4252fc0bec 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -620,6 +620,7 @@ pub fn main() { terminal_view::init(cx); journal::init(app_state.clone(), cx); language_selector::init(cx); + line_ending_selector::init(cx); toolchain_selector::init(cx); theme_selector::init(cx); settings_profile_selector::init(cx); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 864f6badeb6941aa2d6bd17a43977f84a77461b1..fda43a10bad9acc6ae2864519cac5def08fb2f84 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -4480,6 +4480,7 @@ mod tests { "keymap_editor", "keystroke_input", "language_selector", + "line_ending", "lsp_tool", "markdown", "menu", From b65fb0626491be66a26718ea43f29e7d89f74c9f Mon Sep 17 00:00:00 2001 From: Dino Date: Fri, 5 Sep 2025 18:12:51 +0100 Subject: [PATCH 035/109] editor: Fix text manipulation on line mode selections (#37646) This commit updates the implementation for `editor::Editor.manipulate_text` to use `editor::selections_collection::SelectionsCollection.all_adjusted` instead of `editor::selections_collection::SelectionsCollection.all`, as the former takes into account the selection's `line_mode`, fixing the issue where, if an user was in vim's visual line mode, running the `editor: convert to upper case` command would not work as expected. Closes #36953 Release Notes: - Fixed bug where using the editor's convert case commands while in vim's Visual Line mode would not work as expected --- crates/editor/src/editor.rs | 10 +++++++--- crates/editor/src/editor_tests.rs | 14 ++++++++++++++ 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 37951074d15bbb8f34bcbaba9d839eae5d34cf1e..fd2299f37dfc91c4a1d287c549269a7a77fc07e7 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -11391,14 +11391,17 @@ impl Editor { let mut edits = Vec::new(); let mut selection_adjustment = 0i32; - for selection in self.selections.all::(cx) { + for selection in self.selections.all_adjusted(cx) { let selection_is_empty = selection.is_empty(); let (start, end) = if selection_is_empty { let (word_range, _) = buffer.surrounding_word(selection.start, false); (word_range.start, word_range.end) } else { - (selection.start, selection.end) + ( + buffer.point_to_offset(selection.start), + buffer.point_to_offset(selection.end), + ) }; let text = buffer.text_for_range(start..end).collect::(); @@ -11409,7 +11412,8 @@ impl Editor { start: (start as i32 - selection_adjustment) as usize, end: ((start + text.len()) as i32 - selection_adjustment) as usize, goal: SelectionGoal::None, - ..selection + id: selection.id, + reversed: selection.reversed, }); selection_adjustment += old_length - text.len() as i32; diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 90e488368f99ea50bdcbfc671a359fa5e899f59e..f4569b436488728f197183b27c63b2706881c8cb 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -5363,6 +5363,20 @@ async fn test_manipulate_text(cx: &mut TestAppContext) { cx.assert_editor_state(indoc! {" «HeLlO, wOrLD!ˇ» "}); + + // Test selections with `line_mode = true`. + cx.update_editor(|editor, _window, _cx| editor.selections.line_mode = true); + cx.set_state(indoc! {" + «The quick brown + fox jumps over + tˇ»he lazy dog + "}); + cx.update_editor(|e, window, cx| e.convert_to_upper_case(&ConvertToUpperCase, window, cx)); + cx.assert_editor_state(indoc! {" + «THE QUICK BROWN + FOX JUMPS OVER + THE LAZY DOGˇ» + "}); } #[gpui::test] From 5d374193bb7493b993b661ea1231f113946b784b Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 5 Sep 2025 10:34:39 -0700 Subject: [PATCH 036/109] Add terminal::Toggle (#37585) Co-Authored-By: Brandan Release Notes: - Added a new action `terminal::Toggle` that is by default bound to 'ctrl-\`'. This copies the default behaviour from VSCode and Jetbrains where the terminal opens and closes correctly. If you'd like the old behaviour you can rebind 'ctrl-\`' to `terminal::ToggleFocus` Co-authored-by: Brandan --- assets/keymaps/default-linux.json | 2 +- assets/keymaps/default-macos.json | 2 +- assets/keymaps/default-windows.json | 2 +- assets/keymaps/linux/jetbrains.json | 2 +- assets/keymaps/macos/jetbrains.json | 2 +- crates/terminal_view/src/terminal_panel.rs | 9 +++++++++ crates/vim/src/command.rs | 4 ++-- crates/workspace/src/workspace.rs | 10 ++++++++++ 8 files changed, 26 insertions(+), 7 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 44234b819abdf10231e4cb4e4fb7dfe335d19778..70a002cf081deaf5df66a2173dc17e7f02ce3aeb 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -583,7 +583,7 @@ "ctrl-n": "workspace::NewFile", "shift-new": "workspace::NewWindow", "ctrl-shift-n": "workspace::NewWindow", - "ctrl-`": "terminal_panel::ToggleFocus", + "ctrl-`": "terminal_panel::Toggle", "f10": ["app_menu::OpenApplicationMenu", "Zed"], "alt-1": ["workspace::ActivatePane", 0], "alt-2": ["workspace::ActivatePane", 1], diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 954684c826b18828857c6411e2413aa514aeec45..21504c7e623583017459baaac7d25191d7a08b68 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -649,7 +649,7 @@ "alt-shift-enter": "toast::RunAction", "cmd-shift-s": "workspace::SaveAs", "cmd-shift-n": "workspace::NewWindow", - "ctrl-`": "terminal_panel::ToggleFocus", + "ctrl-`": "terminal_panel::Toggle", "cmd-1": ["workspace::ActivatePane", 0], "cmd-2": ["workspace::ActivatePane", 1], "cmd-3": ["workspace::ActivatePane", 2], diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 728907e60ca3361270f15b20f66aaf7571be6ac2..1c9f1281882dc136daa7a3912d3d92b3516a4441 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -599,7 +599,7 @@ "ctrl-n": "workspace::NewFile", "shift-new": "workspace::NewWindow", "ctrl-shift-n": "workspace::NewWindow", - "ctrl-`": "terminal_panel::ToggleFocus", + "ctrl-`": "terminal_panel::Toggle", "f10": ["app_menu::OpenApplicationMenu", "Zed"], "alt-1": ["workspace::ActivatePane", 0], "alt-2": ["workspace::ActivatePane", 1], diff --git a/assets/keymaps/linux/jetbrains.json b/assets/keymaps/linux/jetbrains.json index 3df1243feda88680a4ce03cd0b25ab9ea9a36edd..59a182a968a849edb3359927e7647f611bcd44da 100644 --- a/assets/keymaps/linux/jetbrains.json +++ b/assets/keymaps/linux/jetbrains.json @@ -125,7 +125,7 @@ { "context": "Workspace || Editor", "bindings": { - "alt-f12": "terminal_panel::ToggleFocus", + "alt-f12": "terminal_panel::Toggle", "ctrl-shift-k": "git::Push" } }, diff --git a/assets/keymaps/macos/jetbrains.json b/assets/keymaps/macos/jetbrains.json index 66962811f48a429f2f5d036241c64d6549f60334..2c757c3a30a08eb55e8344945ab66baf91ce0c6b 100644 --- a/assets/keymaps/macos/jetbrains.json +++ b/assets/keymaps/macos/jetbrains.json @@ -127,7 +127,7 @@ { "context": "Workspace || Editor", "bindings": { - "alt-f12": "terminal_panel::ToggleFocus", + "alt-f12": "terminal_panel::Toggle", "cmd-shift-k": "git::Push" } }, diff --git a/crates/terminal_view/src/terminal_panel.rs b/crates/terminal_view/src/terminal_panel.rs index 2ba7f617bf407299b2b0e670f66432ce053718be..44d64c5fe3351d4c3e2a9342bfaf818445d78736 100644 --- a/crates/terminal_view/src/terminal_panel.rs +++ b/crates/terminal_view/src/terminal_panel.rs @@ -49,6 +49,8 @@ const TERMINAL_PANEL_KEY: &str = "TerminalPanel"; actions!( terminal_panel, [ + /// Toggles the terminal panel. + Toggle, /// Toggles focus on the terminal panel. ToggleFocus ] @@ -64,6 +66,13 @@ pub fn init(cx: &mut App) { workspace.toggle_panel_focus::(window, cx); } }); + workspace.register_action(|workspace, _: &Toggle, window, cx| { + if is_enabled_in_workspace(workspace, cx) { + if !workspace.toggle_panel_focus::(window, cx) { + workspace.close_panel::(window, cx); + } + } + }); }, ) .detach(); diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index 29fe6aae0252bcc1ca5767f71b7c668ecae1b9a8..eda483988b4e8a01affa9c85d0cad7657def61eb 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -1265,8 +1265,8 @@ fn generate_commands(_: &App) -> Vec { VimCommand::str(("L", "explore"), "project_panel::ToggleFocus"), VimCommand::str(("S", "explore"), "project_panel::ToggleFocus"), VimCommand::str(("Ve", "xplore"), "project_panel::ToggleFocus"), - VimCommand::str(("te", "rm"), "terminal_panel::ToggleFocus"), - VimCommand::str(("T", "erm"), "terminal_panel::ToggleFocus"), + VimCommand::str(("te", "rm"), "terminal_panel::Toggle"), + VimCommand::str(("T", "erm"), "terminal_panel::Toggle"), VimCommand::str(("C", "ollab"), "collab_panel::ToggleFocus"), VimCommand::str(("Ch", "at"), "chat_panel::ToggleFocus"), VimCommand::str(("No", "tifications"), "notification_panel::ToggleFocus"), diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 0bfcaaf593eca73baa2a6a57def5af17b6ee93b3..6b4e7c1731b23e2e35086431d4d83bda4958d33f 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -3093,6 +3093,16 @@ impl Workspace { } } + pub fn close_panel(&self, window: &mut Window, cx: &mut Context) { + for dock in self.all_docks().iter() { + dock.update(cx, |dock, cx| { + if dock.panel::().is_some() { + dock.set_open(false, window, cx) + } + }) + } + } + pub fn panel(&self, cx: &App) -> Option> { self.all_docks() .iter() From 1c5c8552f2d00d442a0975a76d3231ab94004ea4 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 5 Sep 2025 12:03:26 -0700 Subject: [PATCH 037/109] Show actual error in InvalidBufferView (#37657) Release Notes: - Update error view to show the error --- crates/workspace/src/invalid_buffer_view.rs | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/crates/workspace/src/invalid_buffer_view.rs b/crates/workspace/src/invalid_buffer_view.rs index b8c0db29d3ab95497fc5e850b0738b762f42b28b..05f409653b69e76654fa11d70b57d61fd6c0b73b 100644 --- a/crates/workspace/src/invalid_buffer_view.rs +++ b/crates/workspace/src/invalid_buffer_view.rs @@ -3,7 +3,8 @@ use std::{path::Path, sync::Arc}; use gpui::{EventEmitter, FocusHandle, Focusable}; use ui::{ App, Button, ButtonCommon, ButtonStyle, Clickable, Context, FluentBuilder, InteractiveElement, - KeyBinding, ParentElement, Render, SharedString, Styled as _, Window, h_flex, v_flex, + KeyBinding, Label, LabelCommon, LabelSize, ParentElement, Render, SharedString, Styled as _, + Window, h_flex, v_flex, }; use zed_actions::workspace::OpenWithSystem; @@ -30,7 +31,7 @@ impl InvalidBufferView { Self { is_local, abs_path: Arc::from(abs_path), - error: format!("{e}").into(), + error: format!("{}", e.root_cause()).into(), focus_handle: cx.focus_handle(), } } @@ -88,7 +89,12 @@ impl Render for InvalidBufferView { v_flex() .justify_center() .gap_2() - .child(h_flex().justify_center().child("Unsupported file type")) + .child(h_flex().justify_center().child("Could not open file")) + .child( + h_flex() + .justify_center() + .child(Label::new(self.error.clone()).size(LabelSize::Small)), + ) .when(self.is_local, |contents| { contents.child( h_flex().justify_center().child( From 45fa03410796db8ac488b781b915e4c80a588f1a Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Fri, 5 Sep 2025 21:50:51 +0200 Subject: [PATCH 038/109] Restore notification panel settings (#37661) Follow-up to https://github.com/zed-industries/zed/pull/37489 Notification panel settings were always missing the content, hence this PR adds it. After #37489, the use of the same content twice broke things, which currently makes the notification panel non-configurable on Nightly. This PR fixes this. There once was an issue about the documentation for the panel being wrong as well. However, I was just unable to find that sadly. Release Notes: - N/A --- crates/collab_ui/src/panel_settings.rs | 30 ++++++++++++++++++++------ 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/crates/collab_ui/src/panel_settings.rs b/crates/collab_ui/src/panel_settings.rs index bae118d819c2e38e7b77e5aa841c084e4c45d6e8..81d441167c06ef75c7e251dffefc55ff099a48e8 100644 --- a/crates/collab_ui/src/panel_settings.rs +++ b/crates/collab_ui/src/panel_settings.rs @@ -44,8 +44,24 @@ pub struct ChatPanelSettingsContent { pub default_width: Option, } -#[derive(Deserialize, Debug, SettingsKey)] -#[settings_key(key = "notification_panel")] +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)] +#[settings_key(key = "collaboration_panel")] +pub struct PanelSettingsContent { + /// Whether to show the panel button in the status bar. + /// + /// Default: true + pub button: Option, + /// Where to dock the panel. + /// + /// Default: left + pub dock: Option, + /// Default width of the panel in pixels. + /// + /// Default: 240 + pub default_width: Option, +} + +#[derive(Deserialize, Debug)] pub struct NotificationPanelSettings { pub button: bool, pub dock: DockPosition, @@ -53,19 +69,19 @@ pub struct NotificationPanelSettings { } #[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi, SettingsKey)] -#[settings_key(key = "collaboration_panel")] -pub struct PanelSettingsContent { +#[settings_key(key = "notification_panel")] +pub struct NotificationPanelSettingsContent { /// Whether to show the panel button in the status bar. /// /// Default: true pub button: Option, /// Where to dock the panel. /// - /// Default: left + /// Default: right pub dock: Option, /// Default width of the panel in pixels. /// - /// Default: 240 + /// Default: 300 pub default_width: Option, } @@ -106,7 +122,7 @@ impl Settings for ChatPanelSettings { } impl Settings for NotificationPanelSettings { - type FileContent = PanelSettingsContent; + type FileContent = NotificationPanelSettingsContent; fn load( sources: SettingsSources, From c45177e2963c285a19db2cc2424df53041d48640 Mon Sep 17 00:00:00 2001 From: Smit Barmase Date: Sat, 6 Sep 2025 02:05:42 +0530 Subject: [PATCH 039/109] editor: Fix fold placeholder hover width smaller than marker (#37663) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bug: Screenshot 2025-09-06 at 1 21 39 AM The fold marker we use, `⋯`, isn’t rendered at the same size as the editor’s font. Notice how the fold marker appears larger than the same character typed directly in the editor buffer. image When we shape the line, we use the editor’s font size, and it ends up determining the element’s width. To fix this, we should treat the ellipsis as a UI element rather than a buffer character, since current visual size looks good to me. Screenshot 2025-09-06 at 1 29 28 AM Release Notes: - Fixed an issue where the fold placeholder’s hover area was smaller than the marker. --- crates/editor/src/editor.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index fd2299f37dfc91c4a1d287c549269a7a77fc07e7..2374c8d6875f05608aa800de660fb3602ed35988 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1794,7 +1794,7 @@ impl Editor { let font_size = style.font_size.to_pixels(window.rem_size()); let editor = cx.entity().downgrade(); let fold_placeholder = FoldPlaceholder { - constrain_width: true, + constrain_width: false, render: Arc::new(move |fold_id, fold_range, cx| { let editor = editor.clone(); div() From ea363466aa8e5ca9553820c3bde2746c56dfc6ea Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Fri, 5 Sep 2025 17:03:42 -0400 Subject: [PATCH 040/109] Fix attach modal showing local processes in SSH sessions (#37608) Closes #37520 This change makes the attach modal load processes from the remote server when connecting via SSH, rather than showing local processes from the client machine. This works by using the new GetProcessesRequest RPC message to allow downstream clients to get the correct processes to display. It also only works with downstream ssh clients because the message handler is only registered on headless projects. Release Notes: - debugger: Fix bug where SSH attach modal showed local processes instead of processes from the server --- crates/debugger_ui/src/attach_modal.rs | 92 +++++++++++++++----- crates/debugger_ui/src/new_process_modal.rs | 9 +- crates/proto/proto/debugger.proto | 14 +++ crates/proto/proto/zed.proto | 5 +- crates/proto/src/proto.rs | 4 + crates/remote_server/src/headless_project.rs | 30 +++++++ 6 files changed, 130 insertions(+), 24 deletions(-) diff --git a/crates/debugger_ui/src/attach_modal.rs b/crates/debugger_ui/src/attach_modal.rs index 662a98c82075cd6e936988959c855eadb5138092..3e3bc3ec27c3d1dbf0bacd445b883a50370d5b6f 100644 --- a/crates/debugger_ui/src/attach_modal.rs +++ b/crates/debugger_ui/src/attach_modal.rs @@ -1,8 +1,10 @@ use dap::{DapRegistry, DebugRequest}; use fuzzy::{StringMatch, StringMatchCandidate}; -use gpui::{AppContext, DismissEvent, Entity, EventEmitter, Focusable, Render}; +use gpui::{AppContext, DismissEvent, Entity, EventEmitter, Focusable, Render, Task}; use gpui::{Subscription, WeakEntity}; use picker::{Picker, PickerDelegate}; +use project::Project; +use rpc::proto; use task::ZedDebugConfig; use util::debug_panic; @@ -56,29 +58,28 @@ impl AttachModal { pub fn new( definition: ZedDebugConfig, workspace: WeakEntity, + project: Entity, modal: bool, window: &mut Window, cx: &mut Context, ) -> Self { - let mut processes: Box<[_]> = System::new_all() - .processes() - .values() - .map(|process| { - let name = process.name().to_string_lossy().into_owned(); - Candidate { - name: name.into(), - pid: process.pid().as_u32(), - command: process - .cmd() - .iter() - .map(|s| s.to_string_lossy().to_string()) - .collect::>(), - } - }) - .collect(); - processes.sort_by_key(|k| k.name.clone()); - let processes = processes.into_iter().collect(); - Self::with_processes(workspace, definition, processes, modal, window, cx) + let processes_task = get_processes_for_project(&project, cx); + + let modal = Self::with_processes(workspace, definition, Arc::new([]), modal, window, cx); + + cx.spawn_in(window, async move |this, cx| { + let processes = processes_task.await; + this.update_in(cx, |modal, window, cx| { + modal.picker.update(cx, |picker, cx| { + picker.delegate.candidates = processes; + picker.refresh(window, cx); + }); + })?; + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + + modal } pub(super) fn with_processes( @@ -332,6 +333,57 @@ impl PickerDelegate for AttachModalDelegate { } } +fn get_processes_for_project(project: &Entity, cx: &mut App) -> Task> { + let project = project.read(cx); + + if let Some(remote_client) = project.remote_client() { + let proto_client = remote_client.read(cx).proto_client(); + cx.spawn(async move |_cx| { + let response = proto_client + .request(proto::GetProcesses { + project_id: proto::REMOTE_SERVER_PROJECT_ID, + }) + .await + .unwrap_or_else(|_| proto::GetProcessesResponse { + processes: Vec::new(), + }); + + let mut processes: Vec = response + .processes + .into_iter() + .map(|p| Candidate { + pid: p.pid, + name: p.name.into(), + command: p.command, + }) + .collect(); + + processes.sort_by_key(|k| k.name.clone()); + Arc::from(processes.into_boxed_slice()) + }) + } else { + let mut processes: Box<[_]> = System::new_all() + .processes() + .values() + .map(|process| { + let name = process.name().to_string_lossy().into_owned(); + Candidate { + name: name.into(), + pid: process.pid().as_u32(), + command: process + .cmd() + .iter() + .map(|s| s.to_string_lossy().to_string()) + .collect::>(), + } + }) + .collect(); + processes.sort_by_key(|k| k.name.clone()); + let processes = processes.into_iter().collect(); + Task::ready(processes) + } +} + #[cfg(any(test, feature = "test-support"))] pub(crate) fn _process_names(modal: &AttachModal, cx: &mut Context) -> Vec { modal.picker.read_with(cx, |picker, _| { diff --git a/crates/debugger_ui/src/new_process_modal.rs b/crates/debugger_ui/src/new_process_modal.rs index 68770bc8b15fbf95824de167dbc8d7fada2b5075..ee6289187ba990d5bbaa040631a1c32619857e53 100644 --- a/crates/debugger_ui/src/new_process_modal.rs +++ b/crates/debugger_ui/src/new_process_modal.rs @@ -20,7 +20,7 @@ use gpui::{ }; use itertools::Itertools as _; use picker::{Picker, PickerDelegate, highlighted_match_with_paths::HighlightedMatch}; -use project::{DebugScenarioContext, TaskContexts, TaskSourceKind, task_store::TaskStore}; +use project::{DebugScenarioContext, Project, TaskContexts, TaskSourceKind, task_store::TaskStore}; use settings::Settings; use task::{DebugScenario, RevealTarget, ZedDebugConfig}; use theme::ThemeSettings; @@ -88,8 +88,10 @@ impl NewProcessModal { })?; workspace.update_in(cx, |workspace, window, cx| { let workspace_handle = workspace.weak_handle(); + let project = workspace.project().clone(); workspace.toggle_modal(window, cx, |window, cx| { - let attach_mode = AttachMode::new(None, workspace_handle.clone(), window, cx); + let attach_mode = + AttachMode::new(None, workspace_handle.clone(), project, window, cx); let debug_picker = cx.new(|cx| { let delegate = @@ -940,6 +942,7 @@ impl AttachMode { pub(super) fn new( debugger: Option, workspace: WeakEntity, + project: Entity, window: &mut Window, cx: &mut Context, ) -> Entity { @@ -950,7 +953,7 @@ impl AttachMode { stop_on_entry: Some(false), }; let attach_picker = cx.new(|cx| { - let modal = AttachModal::new(definition.clone(), workspace, false, window, cx); + let modal = AttachModal::new(definition.clone(), workspace, project, false, window, cx); window.focus(&modal.focus_handle(cx)); modal diff --git a/crates/proto/proto/debugger.proto b/crates/proto/proto/debugger.proto index c6f9c9f1342336c36ab8dfd0ec70a24ff6564476..e3cb5ebbce0ceb87a7197f19a133bbb92a572085 100644 --- a/crates/proto/proto/debugger.proto +++ b/crates/proto/proto/debugger.proto @@ -546,3 +546,17 @@ message LogToDebugConsole { uint64 session_id = 2; string message = 3; } + +message GetProcesses { + uint64 project_id = 1; +} + +message GetProcessesResponse { + repeated ProcessInfo processes = 1; +} + +message ProcessInfo { + uint32 pid = 1; + string name = 2; + repeated string command = 3; +} diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 4133b4b5eea6f14e2c9359f7318f192a8566d809..3763671a7a1f29949194d61c70866f96ca6ad972 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -399,7 +399,10 @@ message Envelope { LspQueryResponse lsp_query_response = 366; ToggleLspLogs toggle_lsp_logs = 367; - UpdateUserSettings update_user_settings = 368; // current max + UpdateUserSettings update_user_settings = 368; + + GetProcesses get_processes = 369; + GetProcessesResponse get_processes_response = 370; // current max } reserved 87 to 88; diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 8f4e836b20ae5bae43617e10391f75c3a069a82f..3c98ae62e7a4b1489c071a0ac673d23b394c28d5 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -102,6 +102,8 @@ messages!( (GetPathMetadata, Background), (GetPathMetadataResponse, Background), (GetPermalinkToLine, Foreground), + (GetProcesses, Background), + (GetProcessesResponse, Background), (GetPermalinkToLineResponse, Foreground), (GetProjectSymbols, Background), (GetProjectSymbolsResponse, Background), @@ -485,6 +487,7 @@ request_messages!( (GetDefaultBranch, GetDefaultBranchResponse), (GitClone, GitCloneResponse), (ToggleLspLogs, Ack), + (GetProcesses, GetProcessesResponse), ); lsp_messages!( @@ -610,6 +613,7 @@ entity_messages!( ActivateToolchain, ActiveToolchain, GetPathMetadata, + GetProcesses, CancelLanguageServerWork, RegisterBufferWithLanguageServers, GitShow, diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index f55826631b46b4f9eaaa17d8a9f4b0603a07fcc3..7fb5ac8498c67863783b1944c912f0ad9767fed5 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -32,6 +32,7 @@ use std::{ path::{Path, PathBuf}, sync::{Arc, atomic::AtomicUsize}, }; +use sysinfo::System; use util::ResultExt; use worktree::Worktree; @@ -230,6 +231,7 @@ impl HeadlessProject { session.add_request_handler(cx.weak_entity(), Self::handle_get_path_metadata); session.add_request_handler(cx.weak_entity(), Self::handle_shutdown_remote_server); session.add_request_handler(cx.weak_entity(), Self::handle_ping); + session.add_request_handler(cx.weak_entity(), Self::handle_get_processes); session.add_entity_request_handler(Self::handle_add_worktree); session.add_request_handler(cx.weak_entity(), Self::handle_remove_worktree); @@ -719,6 +721,34 @@ impl HeadlessProject { log::debug!("Received ping from client"); Ok(proto::Ack {}) } + + async fn handle_get_processes( + _this: Entity, + _envelope: TypedEnvelope, + _cx: AsyncApp, + ) -> Result { + let mut processes = Vec::new(); + let system = System::new_all(); + + for (_pid, process) in system.processes() { + let name = process.name().to_string_lossy().into_owned(); + let command = process + .cmd() + .iter() + .map(|s| s.to_string_lossy().to_string()) + .collect::>(); + + processes.push(proto::ProcessInfo { + pid: process.pid().as_u32(), + name, + command, + }); + } + + processes.sort_by_key(|p| p.name.clone()); + + Ok(proto::GetProcessesResponse { processes }) + } } fn prompt_to_proto( From 236b3e546e484647c491323d733c299af518c7a0 Mon Sep 17 00:00:00 2001 From: morgankrey Date: Fri, 5 Sep 2025 14:34:13 -0700 Subject: [PATCH 041/109] Update Link (#37671) Documentation fix Release Notes: - N/A --- docs/src/configuring-languages.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/configuring-languages.md b/docs/src/configuring-languages.md index 52b7a3f7b82aeb3f2f19dcd63ef64c34251f1cd8..9da44fb53dba0ea044ce01ddb2d9ef3d90133adb 100644 --- a/docs/src/configuring-languages.md +++ b/docs/src/configuring-languages.md @@ -251,7 +251,7 @@ You can toggle language server support globally or per-language: } ``` -This disables the language server for Markdown files, which can be useful for performance in large documentation projects. You can configure this globally in your `~/.zed/settings.json` or inside a `.zed/settings.json` in your project directory. +This disables the language server for Markdown files, which can be useful for performance in large documentation projects. You can configure this globally in your `~/.config/zed/settings.json` or inside a `.zed/settings.json` in your project directory. ## Formatting and Linting From 64b6e8ba0fd47828b6cf2917f520ca9962c15df5 Mon Sep 17 00:00:00 2001 From: Remco Smits Date: Fri, 5 Sep 2025 23:35:28 +0200 Subject: [PATCH 042/109] debugger: Fix allow showing more than 1 compact session item (#37036) Closes #36978 This PR fixes an issue that we would only show the first `root -> child` session in compact mode, but the session that came after it, we would only show the child session label instead of also adding the parent label due to compact mode. ## Before Screenshot 2025-08-27 at 22 18 39 ## After Screenshot 2025-08-27 at 21 57 16 With 3 parent + child sessions and one parent session only. Screenshot 2025-08-27 at 22 22 13 cc @cole-miller I know we hacked on this some while ago, so figured you might be the best guy to ask for a review. Release Notes: - Debugger: Fix to allow showing more than 1 compact session item --------- Co-authored-by: Anthony --- crates/debugger_ui/src/dropdown_menus.rs | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/crates/debugger_ui/src/dropdown_menus.rs b/crates/debugger_ui/src/dropdown_menus.rs index c611d5d44f36b4eafb578a400da615bbd96b4cd2..376a4a41ce7b03cd07f578d85f641a6ddfc4ebe8 100644 --- a/crates/debugger_ui/src/dropdown_menus.rs +++ b/crates/debugger_ui/src/dropdown_menus.rs @@ -113,23 +113,6 @@ impl DebugPanel { } }; session_entries.push(root_entry); - - session_entries.extend( - sessions_with_children - .by_ref() - .take_while(|(session, _)| { - session - .read(cx) - .session(cx) - .read(cx) - .parent_id(cx) - .is_some() - }) - .map(|(session, _)| SessionListEntry { - leaf: session.clone(), - ancestors: vec![], - }), - ); } let weak = cx.weak_entity(); From 59bdbf5a5dfc465f6327da4048d72f6536fdd841 Mon Sep 17 00:00:00 2001 From: Nia Date: Sat, 6 Sep 2025 00:27:14 +0200 Subject: [PATCH 043/109] Various fixups to unsafe code (#37651) A collection of fixups of possibly-unsound code and removing some small useless writes. Release Notes: - N/A --- crates/fs/src/fs.rs | 15 ++- crates/gpui/src/arena.rs | 27 +++--- crates/gpui/src/util.rs | 6 +- crates/sqlez/src/connection.rs | 170 +++++++++++++++++---------------- crates/sqlez/src/statement.rs | 78 ++++++++------- crates/util/src/util.rs | 4 +- crates/zlog/src/filter.rs | 14 +-- crates/zlog/src/sink.rs | 38 ++++---- 8 files changed, 181 insertions(+), 171 deletions(-) diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index a5cf9b88254deff5b9a07402207f19875827d7f0..98c8dc9054984c49732bec57a9604a14ceb5ee72 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -20,6 +20,9 @@ use std::os::fd::{AsFd, AsRawFd}; #[cfg(unix)] use std::os::unix::fs::{FileTypeExt, MetadataExt}; +#[cfg(any(target_os = "macos", target_os = "freebsd"))] +use std::mem::MaybeUninit; + use async_tar::Archive; use futures::{AsyncRead, Stream, StreamExt, future::BoxFuture}; use git::repository::{GitRepository, RealGitRepository}; @@ -261,14 +264,15 @@ impl FileHandle for std::fs::File { }; let fd = self.as_fd(); - let mut path_buf: [libc::c_char; libc::PATH_MAX as usize] = [0; libc::PATH_MAX as usize]; + let mut path_buf = MaybeUninit::<[u8; libc::PATH_MAX as usize]>::uninit(); let result = unsafe { libc::fcntl(fd.as_raw_fd(), libc::F_GETPATH, path_buf.as_mut_ptr()) }; if result == -1 { anyhow::bail!("fcntl returned -1".to_string()); } - let c_str = unsafe { CStr::from_ptr(path_buf.as_ptr()) }; + // SAFETY: `fcntl` will initialize the path buffer. + let c_str = unsafe { CStr::from_ptr(path_buf.as_ptr().cast()) }; let path = PathBuf::from(OsStr::from_bytes(c_str.to_bytes())); Ok(path) } @@ -296,15 +300,16 @@ impl FileHandle for std::fs::File { }; let fd = self.as_fd(); - let mut kif: libc::kinfo_file = unsafe { std::mem::zeroed() }; + let mut kif = MaybeUninit::::uninit(); kif.kf_structsize = libc::KINFO_FILE_SIZE; - let result = unsafe { libc::fcntl(fd.as_raw_fd(), libc::F_KINFO, &mut kif) }; + let result = unsafe { libc::fcntl(fd.as_raw_fd(), libc::F_KINFO, kif.as_mut_ptr()) }; if result == -1 { anyhow::bail!("fcntl returned -1".to_string()); } - let c_str = unsafe { CStr::from_ptr(kif.kf_path.as_ptr()) }; + // SAFETY: `fcntl` will initialize the kif. + let c_str = unsafe { CStr::from_ptr(kif.assume_init().kf_path.as_ptr()) }; let path = PathBuf::from(OsStr::from_bytes(c_str.to_bytes())); Ok(path) } diff --git a/crates/gpui/src/arena.rs b/crates/gpui/src/arena.rs index 0983bd23454c9a3a921ed721ecd32561387f9049..a0d0c23987472de46d5b23129adb5a4ec8ee00cb 100644 --- a/crates/gpui/src/arena.rs +++ b/crates/gpui/src/arena.rs @@ -1,8 +1,9 @@ use std::{ alloc::{self, handle_alloc_error}, cell::Cell, + num::NonZeroUsize, ops::{Deref, DerefMut}, - ptr, + ptr::{self, NonNull}, rc::Rc, }; @@ -30,23 +31,23 @@ impl Drop for Chunk { fn drop(&mut self) { unsafe { let chunk_size = self.end.offset_from_unsigned(self.start); - // this never fails as it succeeded during allocation - let layout = alloc::Layout::from_size_align(chunk_size, 1).unwrap(); + // SAFETY: This succeeded during allocation. + let layout = alloc::Layout::from_size_align_unchecked(chunk_size, 1); alloc::dealloc(self.start, layout); } } } impl Chunk { - fn new(chunk_size: usize) -> Self { + fn new(chunk_size: NonZeroUsize) -> Self { unsafe { // this only fails if chunk_size is unreasonably huge - let layout = alloc::Layout::from_size_align(chunk_size, 1).unwrap(); + let layout = alloc::Layout::from_size_align(chunk_size.get(), 1).unwrap(); let start = alloc::alloc(layout); if start.is_null() { handle_alloc_error(layout); } - let end = start.add(chunk_size); + let end = start.add(chunk_size.get()); Self { start, end, @@ -55,14 +56,14 @@ impl Chunk { } } - fn allocate(&mut self, layout: alloc::Layout) -> Option<*mut u8> { + fn allocate(&mut self, layout: alloc::Layout) -> Option> { unsafe { let aligned = self.offset.add(self.offset.align_offset(layout.align())); let next = aligned.add(layout.size()); if next <= self.end { self.offset = next; - Some(aligned) + NonNull::new(aligned) } else { None } @@ -79,7 +80,7 @@ pub struct Arena { elements: Vec, valid: Rc>, current_chunk_index: usize, - chunk_size: usize, + chunk_size: NonZeroUsize, } impl Drop for Arena { @@ -90,7 +91,7 @@ impl Drop for Arena { impl Arena { pub fn new(chunk_size: usize) -> Self { - assert!(chunk_size > 0); + let chunk_size = NonZeroUsize::try_from(chunk_size).unwrap(); Self { chunks: vec![Chunk::new(chunk_size)], elements: Vec::new(), @@ -101,7 +102,7 @@ impl Arena { } pub fn capacity(&self) -> usize { - self.chunks.len() * self.chunk_size + self.chunks.len() * self.chunk_size.get() } pub fn clear(&mut self) { @@ -136,7 +137,7 @@ impl Arena { let layout = alloc::Layout::new::(); let mut current_chunk = &mut self.chunks[self.current_chunk_index]; let ptr = if let Some(ptr) = current_chunk.allocate(layout) { - ptr + ptr.as_ptr() } else { self.current_chunk_index += 1; if self.current_chunk_index >= self.chunks.len() { @@ -149,7 +150,7 @@ impl Arena { } current_chunk = &mut self.chunks[self.current_chunk_index]; if let Some(ptr) = current_chunk.allocate(layout) { - ptr + ptr.as_ptr() } else { panic!( "Arena chunk_size of {} is too small to allocate {} bytes", diff --git a/crates/gpui/src/util.rs b/crates/gpui/src/util.rs index 3d7fa06e6ca013ae38b1c63d1bfd624d46cdf4f1..3704784a954f14b8317202e227ffb1b17092d70d 100644 --- a/crates/gpui/src/util.rs +++ b/crates/gpui/src/util.rs @@ -99,9 +99,9 @@ impl Future for WithTimeout { fn poll(self: Pin<&mut Self>, cx: &mut task::Context) -> task::Poll { // SAFETY: the fields of Timeout are private and we never move the future ourselves // And its already pinned since we are being polled (all futures need to be pinned to be polled) - let this = unsafe { self.get_unchecked_mut() }; - let future = unsafe { Pin::new_unchecked(&mut this.future) }; - let timer = unsafe { Pin::new_unchecked(&mut this.timer) }; + let this = unsafe { &raw mut *self.get_unchecked_mut() }; + let future = unsafe { Pin::new_unchecked(&mut (*this).future) }; + let timer = unsafe { Pin::new_unchecked(&mut (*this).timer) }; if let task::Poll::Ready(output) = future.poll(cx) { task::Poll::Ready(Ok(output)) diff --git a/crates/sqlez/src/connection.rs b/crates/sqlez/src/connection.rs index 228bd4c6a2df31f41dc1988596fc87323063d78c..53f0d4e2614f340cc0563d5cd9374bdc3626d9bb 100644 --- a/crates/sqlez/src/connection.rs +++ b/crates/sqlez/src/connection.rs @@ -92,91 +92,97 @@ impl Connection { let mut remaining_sql = sql.as_c_str(); let sql_start = remaining_sql.as_ptr(); - unsafe { - let mut alter_table = None; - while { - let remaining_sql_str = remaining_sql.to_str().unwrap().trim(); - let any_remaining_sql = remaining_sql_str != ";" && !remaining_sql_str.is_empty(); - if any_remaining_sql { - alter_table = parse_alter_table(remaining_sql_str); + let mut alter_table = None; + while { + let remaining_sql_str = remaining_sql.to_str().unwrap().trim(); + let any_remaining_sql = remaining_sql_str != ";" && !remaining_sql_str.is_empty(); + if any_remaining_sql { + alter_table = parse_alter_table(remaining_sql_str); + } + any_remaining_sql + } { + let mut raw_statement = ptr::null_mut::(); + let mut remaining_sql_ptr = ptr::null(); + + let (res, offset, message, _conn) = if let Some((table_to_alter, column)) = alter_table + { + // ALTER TABLE is a weird statement. When preparing the statement the table's + // existence is checked *before* syntax checking any other part of the statement. + // Therefore, we need to make sure that the table has been created before calling + // prepare. As we don't want to trash whatever database this is connected to, we + // create a new in-memory DB to test. + + let temp_connection = Connection::open_memory(None); + //This should always succeed, if it doesn't then you really should know about it + temp_connection + .exec(&format!("CREATE TABLE {table_to_alter}({column})")) + .unwrap()() + .unwrap(); + + unsafe { + sqlite3_prepare_v2( + temp_connection.sqlite3, + remaining_sql.as_ptr(), + -1, + &mut raw_statement, + &mut remaining_sql_ptr, + ) + }; + + #[cfg(not(any(target_os = "linux", target_os = "freebsd")))] + let offset = unsafe { sqlite3_error_offset(temp_connection.sqlite3) }; + + #[cfg(any(target_os = "linux", target_os = "freebsd"))] + let offset = 0; + + unsafe { + ( + sqlite3_errcode(temp_connection.sqlite3), + offset, + sqlite3_errmsg(temp_connection.sqlite3), + Some(temp_connection), + ) } - any_remaining_sql - } { - let mut raw_statement = ptr::null_mut::(); - let mut remaining_sql_ptr = ptr::null(); - - let (res, offset, message, _conn) = - if let Some((table_to_alter, column)) = alter_table { - // ALTER TABLE is a weird statement. When preparing the statement the table's - // existence is checked *before* syntax checking any other part of the statement. - // Therefore, we need to make sure that the table has been created before calling - // prepare. As we don't want to trash whatever database this is connected to, we - // create a new in-memory DB to test. - - let temp_connection = Connection::open_memory(None); - //This should always succeed, if it doesn't then you really should know about it - temp_connection - .exec(&format!("CREATE TABLE {table_to_alter}({column})")) - .unwrap()() - .unwrap(); - - sqlite3_prepare_v2( - temp_connection.sqlite3, - remaining_sql.as_ptr(), - -1, - &mut raw_statement, - &mut remaining_sql_ptr, - ); - - #[cfg(not(any(target_os = "linux", target_os = "freebsd")))] - let offset = sqlite3_error_offset(temp_connection.sqlite3); - - #[cfg(any(target_os = "linux", target_os = "freebsd"))] - let offset = 0; - - ( - sqlite3_errcode(temp_connection.sqlite3), - offset, - sqlite3_errmsg(temp_connection.sqlite3), - Some(temp_connection), - ) - } else { - sqlite3_prepare_v2( - self.sqlite3, - remaining_sql.as_ptr(), - -1, - &mut raw_statement, - &mut remaining_sql_ptr, - ); - - #[cfg(not(any(target_os = "linux", target_os = "freebsd")))] - let offset = sqlite3_error_offset(self.sqlite3); - - #[cfg(any(target_os = "linux", target_os = "freebsd"))] - let offset = 0; - - ( - sqlite3_errcode(self.sqlite3), - offset, - sqlite3_errmsg(self.sqlite3), - None, - ) - }; - - sqlite3_finalize(raw_statement); - - if res == 1 && offset >= 0 { - let sub_statement_correction = - remaining_sql.as_ptr() as usize - sql_start as usize; - let err_msg = - String::from_utf8_lossy(CStr::from_ptr(message as *const _).to_bytes()) - .into_owned(); - - return Some((err_msg, offset as usize + sub_statement_correction)); + } else { + unsafe { + sqlite3_prepare_v2( + self.sqlite3, + remaining_sql.as_ptr(), + -1, + &mut raw_statement, + &mut remaining_sql_ptr, + ) + }; + + #[cfg(not(any(target_os = "linux", target_os = "freebsd")))] + let offset = unsafe { sqlite3_error_offset(self.sqlite3) }; + + #[cfg(any(target_os = "linux", target_os = "freebsd"))] + let offset = 0; + + unsafe { + ( + sqlite3_errcode(self.sqlite3), + offset, + sqlite3_errmsg(self.sqlite3), + None, + ) } - remaining_sql = CStr::from_ptr(remaining_sql_ptr); - alter_table = None; + }; + + unsafe { sqlite3_finalize(raw_statement) }; + + if res == 1 && offset >= 0 { + let sub_statement_correction = remaining_sql.as_ptr() as usize - sql_start as usize; + let err_msg = String::from_utf8_lossy(unsafe { + CStr::from_ptr(message as *const _).to_bytes() + }) + .into_owned(); + + return Some((err_msg, offset as usize + sub_statement_correction)); } + remaining_sql = unsafe { CStr::from_ptr(remaining_sql_ptr) }; + alter_table = None; } None } diff --git a/crates/sqlez/src/statement.rs b/crates/sqlez/src/statement.rs index eb7553f862b0a291bf08345606ff22317d3eec60..d08e58a6f93344d4bb52c35c8c76406724a230b4 100644 --- a/crates/sqlez/src/statement.rs +++ b/crates/sqlez/src/statement.rs @@ -44,41 +44,41 @@ impl<'a> Statement<'a> { connection, phantom: PhantomData, }; - unsafe { - let sql = CString::new(query.as_ref()).context("Error creating cstr")?; - let mut remaining_sql = sql.as_c_str(); - while { - let remaining_sql_str = remaining_sql - .to_str() - .context("Parsing remaining sql")? - .trim(); - remaining_sql_str != ";" && !remaining_sql_str.is_empty() - } { - let mut raw_statement = ptr::null_mut::(); - let mut remaining_sql_ptr = ptr::null(); + let sql = CString::new(query.as_ref()).context("Error creating cstr")?; + let mut remaining_sql = sql.as_c_str(); + while { + let remaining_sql_str = remaining_sql + .to_str() + .context("Parsing remaining sql")? + .trim(); + remaining_sql_str != ";" && !remaining_sql_str.is_empty() + } { + let mut raw_statement = ptr::null_mut::(); + let mut remaining_sql_ptr = ptr::null(); + unsafe { sqlite3_prepare_v2( connection.sqlite3, remaining_sql.as_ptr(), -1, &mut raw_statement, &mut remaining_sql_ptr, - ); + ) + }; - connection.last_error().with_context(|| { - format!("Prepare call failed for query:\n{}", query.as_ref()) - })?; + connection + .last_error() + .with_context(|| format!("Prepare call failed for query:\n{}", query.as_ref()))?; - remaining_sql = CStr::from_ptr(remaining_sql_ptr); - statement.raw_statements.push(raw_statement); + remaining_sql = unsafe { CStr::from_ptr(remaining_sql_ptr) }; + statement.raw_statements.push(raw_statement); - if !connection.can_write() && sqlite3_stmt_readonly(raw_statement) == 0 { - let sql = CStr::from_ptr(sqlite3_sql(raw_statement)); + if !connection.can_write() && unsafe { sqlite3_stmt_readonly(raw_statement) == 0 } { + let sql = unsafe { CStr::from_ptr(sqlite3_sql(raw_statement)) }; - bail!( - "Write statement prepared with connection that is not write capable. SQL:\n{} ", - sql.to_str()? - ) - } + bail!( + "Write statement prepared with connection that is not write capable. SQL:\n{} ", + sql.to_str()? + ) } } @@ -271,23 +271,21 @@ impl<'a> Statement<'a> { } fn step(&mut self) -> Result { - unsafe { - match sqlite3_step(self.current_statement()) { - SQLITE_ROW => Ok(StepResult::Row), - SQLITE_DONE => { - if self.current_statement >= self.raw_statements.len() - 1 { - Ok(StepResult::Done) - } else { - self.current_statement += 1; - self.step() - } - } - SQLITE_MISUSE => anyhow::bail!("Statement step returned SQLITE_MISUSE"), - _other_error => { - self.connection.last_error()?; - unreachable!("Step returned error code and last error failed to catch it"); + match unsafe { sqlite3_step(self.current_statement()) } { + SQLITE_ROW => Ok(StepResult::Row), + SQLITE_DONE => { + if self.current_statement >= self.raw_statements.len() - 1 { + Ok(StepResult::Done) + } else { + self.current_statement += 1; + self.step() } } + SQLITE_MISUSE => anyhow::bail!("Statement step returned SQLITE_MISUSE"), + _other_error => { + self.connection.last_error()?; + unreachable!("Step returned error code and last error failed to catch it"); + } } } diff --git a/crates/util/src/util.rs b/crates/util/src/util.rs index db44e3945186842990f7ef8d7b2794b023324d56..90f5be1c92875ac0b9b2d3e7352ae858371b3686 100644 --- a/crates/util/src/util.rs +++ b/crates/util/src/util.rs @@ -256,6 +256,9 @@ fn load_shell_from_passwd() -> Result<()> { &mut result, ) }; + anyhow::ensure!(!result.is_null(), "passwd entry for uid {} not found", uid); + + // SAFETY: If `getpwuid_r` doesn't error, we have the entry here. let entry = unsafe { pwd.assume_init() }; anyhow::ensure!( @@ -264,7 +267,6 @@ fn load_shell_from_passwd() -> Result<()> { uid, status ); - anyhow::ensure!(!result.is_null(), "passwd entry for uid {} not found", uid); anyhow::ensure!( entry.pw_uid == uid, "passwd entry has different uid ({}) than getuid ({}) returned", diff --git a/crates/zlog/src/filter.rs b/crates/zlog/src/filter.rs index ee3c2410798b286795b9fd78b89502a2a7894987..31a58894774e6c0d08ea22b585350eb26ff09907 100644 --- a/crates/zlog/src/filter.rs +++ b/crates/zlog/src/filter.rs @@ -22,7 +22,7 @@ pub const LEVEL_ENABLED_MAX_DEFAULT: log::LevelFilter = log::LevelFilter::Info; /// crate that the max level is everything, so that we can dynamically enable /// logs that are more verbose than this level without the `log` crate throwing /// them away before we see them -static mut LEVEL_ENABLED_MAX_STATIC: log::LevelFilter = LEVEL_ENABLED_MAX_DEFAULT; +static LEVEL_ENABLED_MAX_STATIC: AtomicU8 = AtomicU8::new(LEVEL_ENABLED_MAX_DEFAULT as u8); /// A cache of the true maximum log level that _could_ be printed. This is based /// on the maximally verbose level that is configured by the user, and is used @@ -46,7 +46,7 @@ const DEFAULT_FILTERS: &[(&str, log::LevelFilter)] = &[ pub fn init_env_filter(filter: env_config::EnvFilter) { if let Some(level_max) = filter.level_global { - unsafe { LEVEL_ENABLED_MAX_STATIC = level_max } + LEVEL_ENABLED_MAX_STATIC.store(level_max as u8, Ordering::Release) } if ENV_FILTER.set(filter).is_err() { panic!("Environment filter cannot be initialized twice"); @@ -54,7 +54,7 @@ pub fn init_env_filter(filter: env_config::EnvFilter) { } pub fn is_possibly_enabled_level(level: log::Level) -> bool { - level as u8 <= LEVEL_ENABLED_MAX_CONFIG.load(Ordering::Relaxed) + level as u8 <= LEVEL_ENABLED_MAX_CONFIG.load(Ordering::Acquire) } pub fn is_scope_enabled(scope: &Scope, module_path: Option<&str>, level: log::Level) -> bool { @@ -66,7 +66,7 @@ pub fn is_scope_enabled(scope: &Scope, module_path: Option<&str>, level: log::Le // scope map return false; } - let is_enabled_by_default = level <= unsafe { LEVEL_ENABLED_MAX_STATIC }; + let is_enabled_by_default = level as u8 <= LEVEL_ENABLED_MAX_STATIC.load(Ordering::Acquire); let global_scope_map = SCOPE_MAP.read().unwrap_or_else(|err| { SCOPE_MAP.clear_poison(); err.into_inner() @@ -92,13 +92,13 @@ pub fn is_scope_enabled(scope: &Scope, module_path: Option<&str>, level: log::Le pub fn refresh_from_settings(settings: &HashMap) { let env_config = ENV_FILTER.get(); let map_new = ScopeMap::new_from_settings_and_env(settings, env_config, DEFAULT_FILTERS); - let mut level_enabled_max = unsafe { LEVEL_ENABLED_MAX_STATIC }; + let mut level_enabled_max = LEVEL_ENABLED_MAX_STATIC.load(Ordering::Acquire); for entry in &map_new.entries { if let Some(level) = entry.enabled { - level_enabled_max = level_enabled_max.max(level); + level_enabled_max = level_enabled_max.max(level as u8); } } - LEVEL_ENABLED_MAX_CONFIG.store(level_enabled_max as u8, Ordering::Release); + LEVEL_ENABLED_MAX_CONFIG.store(level_enabled_max, Ordering::Release); { let mut global_map = SCOPE_MAP.write().unwrap_or_else(|err| { diff --git a/crates/zlog/src/sink.rs b/crates/zlog/src/sink.rs index 3ac85d4bbfc8aaa5d8568cb14b50e04a94708f1c..afbdf37bf9c74860a3b56b706ffc6d64338fd275 100644 --- a/crates/zlog/src/sink.rs +++ b/crates/zlog/src/sink.rs @@ -4,7 +4,7 @@ use std::{ path::PathBuf, sync::{ Mutex, OnceLock, - atomic::{AtomicU64, Ordering}, + atomic::{AtomicBool, AtomicU64, Ordering}, }, }; @@ -19,17 +19,17 @@ const ANSI_GREEN: &str = "\x1b[32m"; const ANSI_BLUE: &str = "\x1b[34m"; const ANSI_MAGENTA: &str = "\x1b[35m"; -/// Whether stdout output is enabled. -static mut ENABLED_SINKS_STDOUT: bool = false; -/// Whether stderr output is enabled. -static mut ENABLED_SINKS_STDERR: bool = false; - /// Is Some(file) if file output is enabled. static ENABLED_SINKS_FILE: Mutex> = Mutex::new(None); static SINK_FILE_PATH: OnceLock<&'static PathBuf> = OnceLock::new(); static SINK_FILE_PATH_ROTATE: OnceLock<&'static PathBuf> = OnceLock::new(); + +// NB: Since this can be accessed in tests, we probably should stick to atomics here. +/// Whether stdout output is enabled. +static ENABLED_SINKS_STDOUT: AtomicBool = AtomicBool::new(false); +/// Whether stderr output is enabled. +static ENABLED_SINKS_STDERR: AtomicBool = AtomicBool::new(false); /// Atomic counter for the size of the log file in bytes. -// TODO: make non-atomic if writing single threaded static SINK_FILE_SIZE_BYTES: AtomicU64 = AtomicU64::new(0); /// Maximum size of the log file before it will be rotated, in bytes. const SINK_FILE_SIZE_BYTES_MAX: u64 = 1024 * 1024; // 1 MB @@ -42,15 +42,13 @@ pub struct Record<'a> { } pub fn init_output_stdout() { - unsafe { - ENABLED_SINKS_STDOUT = true; - } + // Use atomics here instead of just a `static mut`, since in the context + // of tests these accesses can be multi-threaded. + ENABLED_SINKS_STDOUT.store(true, Ordering::Release); } pub fn init_output_stderr() { - unsafe { - ENABLED_SINKS_STDERR = true; - } + ENABLED_SINKS_STDERR.store(true, Ordering::Release); } pub fn init_output_file( @@ -79,7 +77,7 @@ pub fn init_output_file( if size_bytes >= SINK_FILE_SIZE_BYTES_MAX { rotate_log_file(&mut file, Some(path), path_rotate, &SINK_FILE_SIZE_BYTES); } else { - SINK_FILE_SIZE_BYTES.store(size_bytes, Ordering::Relaxed); + SINK_FILE_SIZE_BYTES.store(size_bytes, Ordering::Release); } *enabled_sinks_file = Some(file); @@ -108,7 +106,7 @@ static LEVEL_ANSI_COLORS: [&str; 6] = [ // PERF: batching pub fn submit(record: Record) { - if unsafe { ENABLED_SINKS_STDOUT } { + if ENABLED_SINKS_STDOUT.load(Ordering::Acquire) { let mut stdout = std::io::stdout().lock(); _ = writeln!( &mut stdout, @@ -123,7 +121,7 @@ pub fn submit(record: Record) { }, record.message ); - } else if unsafe { ENABLED_SINKS_STDERR } { + } else if ENABLED_SINKS_STDERR.load(Ordering::Acquire) { let mut stdout = std::io::stderr().lock(); _ = writeln!( &mut stdout, @@ -173,7 +171,7 @@ pub fn submit(record: Record) { }, record.message ); - SINK_FILE_SIZE_BYTES.fetch_add(writer.written, Ordering::Relaxed) + writer.written + SINK_FILE_SIZE_BYTES.fetch_add(writer.written, Ordering::AcqRel) + writer.written }; if file_size_bytes > SINK_FILE_SIZE_BYTES_MAX { rotate_log_file( @@ -187,7 +185,7 @@ pub fn submit(record: Record) { } pub fn flush() { - if unsafe { ENABLED_SINKS_STDOUT } { + if ENABLED_SINKS_STDOUT.load(Ordering::Acquire) { _ = std::io::stdout().lock().flush(); } let mut file = ENABLED_SINKS_FILE.lock().unwrap_or_else(|handle| { @@ -265,7 +263,7 @@ fn rotate_log_file( // according to the documentation, it only fails if: // - the file is not writeable: should never happen, // - the size would cause an overflow (implementation specific): 0 should never cause an overflow - atomic_size.store(0, Ordering::Relaxed); + atomic_size.store(0, Ordering::Release); } #[cfg(test)] @@ -298,7 +296,7 @@ mod tests { std::fs::read_to_string(&rotation_log_file_path).unwrap(), contents, ); - assert_eq!(size.load(Ordering::Relaxed), 0); + assert_eq!(size.load(Ordering::Acquire), 0); } /// Regression test, ensuring that if log level values change we are made aware From 6a7b84eb87ab6764b2ee1152a714c4f00aced8f2 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Sat, 6 Sep 2025 00:47:39 +0200 Subject: [PATCH 044/109] toolchains: Allow users to provide custom paths to toolchains (#37009) - **toolchains: Add new state to toolchain selector** - **Use toolchain term for Add Toolchain button** - **Hoist out a meta function for toolchain listers** Closes #27332 Release Notes: - python: Users can now specify a custom path to their virtual environment from within the picker. --------- Co-authored-by: Danilo Leal --- Cargo.lock | 5 + assets/keymaps/default-linux.json | 8 + assets/keymaps/default-macos.json | 8 + crates/file_finder/src/open_path_prompt.rs | 32 +- crates/language/src/language.rs | 1 + crates/language/src/toolchain.rs | 62 +- crates/languages/src/lib.rs | 2 +- crates/languages/src/python.rs | 100 ++- crates/project/src/lsp_store.rs | 4 +- crates/project/src/project.rs | 74 +- crates/project/src/project_tests.rs | 29 +- crates/project/src/toolchain_store.rs | 284 ++++++- crates/proto/proto/toolchain.proto | 13 + crates/proto/proto/zed.proto | 5 +- crates/proto/src/proto.rs | 4 + crates/repl/src/kernels/mod.rs | 89 +- crates/toolchain_selector/Cargo.toml | 5 + .../src/active_toolchain.rs | 34 +- .../src/toolchain_selector.rs | 802 +++++++++++++++++- crates/workspace/src/persistence.rs | 148 +++- crates/workspace/src/persistence/model.rs | 3 + crates/workspace/src/workspace.rs | 31 + 22 files changed, 1508 insertions(+), 235 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fbdf0e848c356620f2a2cca800cf40ef850c3b13..295c3a83c52e3b355a8e43e9d36c09149fdc694f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -17000,10 +17000,15 @@ checksum = "bfb942dfe1d8e29a7ee7fcbde5bd2b9a25fb89aa70caea2eba3bee836ff41076" name = "toolchain_selector" version = "0.1.0" dependencies = [ + "anyhow", + "convert_case 0.8.0", "editor", + "file_finder", + "futures 0.3.31", "fuzzy", "gpui", "language", + "menu", "picker", "project", "ui", diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 70a002cf081deaf5df66a2173dc17e7f02ce3aeb..ac44b3f1ae55feb11b0027efea14c6afed8cb62a 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -628,6 +628,7 @@ "alt-save": "workspace::SaveAll", "ctrl-alt-s": "workspace::SaveAll", "ctrl-k m": "language_selector::Toggle", + "ctrl-k ctrl-m": "toolchain::AddToolchain", "escape": "workspace::Unfollow", "ctrl-k ctrl-left": "workspace::ActivatePaneLeft", "ctrl-k ctrl-right": "workspace::ActivatePaneRight", @@ -1028,6 +1029,13 @@ "tab": "channel_modal::ToggleMode" } }, + { + "context": "ToolchainSelector", + "use_key_equivalents": true, + "bindings": { + "ctrl-shift-a": "toolchain::AddToolchain" + } + }, { "context": "FileFinder || (FileFinder > Picker > Editor)", "bindings": { diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 21504c7e623583017459baaac7d25191d7a08b68..337915527ca22f04afc8450cf6a366d1f2995551 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -690,6 +690,7 @@ "cmd-?": "agent::ToggleFocus", "cmd-alt-s": "workspace::SaveAll", "cmd-k m": "language_selector::Toggle", + "cmd-k cmd-m": "toolchain::AddToolchain", "escape": "workspace::Unfollow", "cmd-k cmd-left": "workspace::ActivatePaneLeft", "cmd-k cmd-right": "workspace::ActivatePaneRight", @@ -1094,6 +1095,13 @@ "tab": "channel_modal::ToggleMode" } }, + { + "context": "ToolchainSelector", + "use_key_equivalents": true, + "bindings": { + "cmd-shift-a": "toolchain::AddToolchain" + } + }, { "context": "FileFinder || (FileFinder > Picker > Editor)", "use_key_equivalents": true, diff --git a/crates/file_finder/src/open_path_prompt.rs b/crates/file_finder/src/open_path_prompt.rs index 51e8f5c437ab1aa86433f91022a01e8a2e09f664..c0abb372b28ff817853e9dc7b6523f676359e157 100644 --- a/crates/file_finder/src/open_path_prompt.rs +++ b/crates/file_finder/src/open_path_prompt.rs @@ -23,7 +23,6 @@ use workspace::Workspace; pub(crate) struct OpenPathPrompt; -#[derive(Debug)] pub struct OpenPathDelegate { tx: Option>>>, lister: DirectoryLister, @@ -35,6 +34,9 @@ pub struct OpenPathDelegate { prompt_root: String, path_style: PathStyle, replace_prompt: Task<()>, + render_footer: + Arc>) -> Option + 'static>, + hidden_entries: bool, } impl OpenPathDelegate { @@ -60,9 +62,25 @@ impl OpenPathDelegate { }, path_style, replace_prompt: Task::ready(()), + render_footer: Arc::new(|_, _| None), + hidden_entries: false, } } + pub fn with_footer( + mut self, + footer: Arc< + dyn Fn(&mut Window, &mut Context>) -> Option + 'static, + >, + ) -> Self { + self.render_footer = footer; + self + } + + pub fn show_hidden(mut self) -> Self { + self.hidden_entries = true; + self + } fn get_entry(&self, selected_match_index: usize) -> Option { match &self.directory_state { DirectoryState::List { entries, .. } => { @@ -269,7 +287,7 @@ impl PickerDelegate for OpenPathDelegate { self.cancel_flag.store(true, atomic::Ordering::Release); self.cancel_flag = Arc::new(AtomicBool::new(false)); let cancel_flag = self.cancel_flag.clone(); - + let hidden_entries = self.hidden_entries; let parent_path_is_root = self.prompt_root == dir; let current_dir = self.current_dir(); cx.spawn_in(window, async move |this, cx| { @@ -363,7 +381,7 @@ impl PickerDelegate for OpenPathDelegate { }; let mut max_id = 0; - if !suffix.starts_with('.') { + if !suffix.starts_with('.') && !hidden_entries { new_entries.retain(|entry| { max_id = max_id.max(entry.path.id); !entry.path.string.starts_with('.') @@ -781,6 +799,14 @@ impl PickerDelegate for OpenPathDelegate { } } + fn render_footer( + &self, + window: &mut Window, + cx: &mut Context>, + ) -> Option { + (self.render_footer)(window, cx) + } + fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option { Some(match &self.directory_state { DirectoryState::Create { .. } => SharedString::from("Type a path…"), diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index e4a1510d7df128158691842206a27844304b3237..86faf2b9d316dd068c400c48c5b0b99196cfc191 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -69,6 +69,7 @@ pub use text_diff::{ use theme::SyntaxTheme; pub use toolchain::{ LanguageToolchainStore, LocalLanguageToolchainStore, Toolchain, ToolchainList, ToolchainLister, + ToolchainMetadata, ToolchainScope, }; use tree_sitter::{self, Query, QueryCursor, WasmStore, wasmtime}; use util::serde::default_true; diff --git a/crates/language/src/toolchain.rs b/crates/language/src/toolchain.rs index 84b10c7961eddb130f88b24c9e3438ff2882f8d3..2cc86881fbd515317d4d6f5949e82eb3da63a1bb 100644 --- a/crates/language/src/toolchain.rs +++ b/crates/language/src/toolchain.rs @@ -29,6 +29,40 @@ pub struct Toolchain { pub as_json: serde_json::Value, } +/// Declares a scope of a toolchain added by user. +/// +/// When the user adds a toolchain, we give them an option to see that toolchain in: +/// - All of their projects +/// - A project they're currently in. +/// - Only in the subproject they're currently in. +#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)] +pub enum ToolchainScope { + Subproject(WorktreeId, Arc), + Project, + /// Available in all projects on this box. It wouldn't make sense to show suggestions across machines. + Global, +} + +impl ToolchainScope { + pub fn label(&self) -> &'static str { + match self { + ToolchainScope::Subproject(_, _) => "Subproject", + ToolchainScope::Project => "Project", + ToolchainScope::Global => "Global", + } + } + + pub fn description(&self) -> &'static str { + match self { + ToolchainScope::Subproject(_, _) => { + "Available only in the subproject you're currently in." + } + ToolchainScope::Project => "Available in all locations in your current project.", + ToolchainScope::Global => "Available in all of your projects on this machine.", + } + } +} + impl std::hash::Hash for Toolchain { fn hash(&self, state: &mut H) { let Self { @@ -58,23 +92,41 @@ impl PartialEq for Toolchain { } #[async_trait] -pub trait ToolchainLister: Send + Sync { +pub trait ToolchainLister: Send + Sync + 'static { + /// List all available toolchains for a given path. async fn list( &self, worktree_root: PathBuf, subroot_relative_path: Arc, project_env: Option>, ) -> ToolchainList; - // Returns a term which we should use in UI to refer to a toolchain. - fn term(&self) -> SharedString; - /// Returns the name of the manifest file for this toolchain. - fn manifest_name(&self) -> ManifestName; + + /// Given a user-created toolchain, resolve lister-specific details. + /// Put another way: fill in the details of the toolchain so the user does not have to. + async fn resolve( + &self, + path: PathBuf, + project_env: Option>, + ) -> anyhow::Result; + async fn activation_script( &self, toolchain: &Toolchain, shell: ShellKind, fs: &dyn Fs, ) -> Vec; + /// Returns various "static" bits of information about this toolchain lister. This function should be pure. + fn meta(&self) -> ToolchainMetadata; +} + +#[derive(Clone, PartialEq, Eq, Hash)] +pub struct ToolchainMetadata { + /// Returns a term which we should use in UI to refer to toolchains produced by a given `[ToolchainLister]`. + pub term: SharedString, + /// A user-facing placeholder describing the semantic meaning of a path to a new toolchain. + pub new_toolchain_placeholder: SharedString, + /// The name of the manifest file for this toolchain. + pub manifest_name: ManifestName, } #[async_trait(?Send)] diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index 168cf8f57ca25444e54c11bb8e594faa94726b5d..33fb2af0612a203b45276bb8e7f580c5a86a90b6 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -97,7 +97,7 @@ pub fn init(languages: Arc, node: NodeRuntime, cx: &mut App) { let python_context_provider = Arc::new(python::PythonContextProvider); let python_lsp_adapter = Arc::new(python::PythonLspAdapter::new(node.clone())); let basedpyright_lsp_adapter = Arc::new(BasedPyrightLspAdapter::new()); - let python_toolchain_provider = Arc::new(python::PythonToolchainProvider::default()); + let python_toolchain_provider = Arc::new(python::PythonToolchainProvider); let rust_context_provider = Arc::new(rust::RustContextProvider); let rust_lsp_adapter = Arc::new(rust::RustLspAdapter); let tailwind_adapter = Arc::new(tailwind::TailwindLspAdapter::new(node.clone())); diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 06fb49293f838fca2d54de076139ac8c4ebacfc2..d1f40a8233a3590b382bc1e0edbe5dd69b3317d8 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -5,19 +5,19 @@ use collections::HashMap; use futures::AsyncBufReadExt; use gpui::{App, Task}; use gpui::{AsyncApp, SharedString}; -use language::Toolchain; use language::ToolchainList; use language::ToolchainLister; use language::language_settings::language_settings; use language::{ContextLocation, LanguageToolchainStore}; use language::{ContextProvider, LspAdapter, LspAdapterDelegate}; use language::{LanguageName, ManifestName, ManifestProvider, ManifestQuery}; +use language::{Toolchain, ToolchainMetadata}; use lsp::LanguageServerBinary; use lsp::LanguageServerName; use node_runtime::{NodeRuntime, VersionStrategy}; use pet_core::Configuration; use pet_core::os_environment::Environment; -use pet_core::python_environment::PythonEnvironmentKind; +use pet_core::python_environment::{PythonEnvironment, PythonEnvironmentKind}; use project::Fs; use project::lsp_store::language_server_settings; use serde_json::{Value, json}; @@ -688,17 +688,7 @@ fn python_env_kind_display(k: &PythonEnvironmentKind) -> &'static str { } } -pub(crate) struct PythonToolchainProvider { - term: SharedString, -} - -impl Default for PythonToolchainProvider { - fn default() -> Self { - Self { - term: SharedString::new_static("Virtual Environment"), - } - } -} +pub(crate) struct PythonToolchainProvider; static ENV_PRIORITY_LIST: &[PythonEnvironmentKind] = &[ // Prioritize non-Conda environments. @@ -744,9 +734,6 @@ async fn get_worktree_venv_declaration(worktree_root: &Path) -> Option { #[async_trait] impl ToolchainLister for PythonToolchainProvider { - fn manifest_name(&self) -> language::ManifestName { - ManifestName::from(SharedString::new_static("pyproject.toml")) - } async fn list( &self, worktree_root: PathBuf, @@ -847,32 +834,7 @@ impl ToolchainLister for PythonToolchainProvider { let mut toolchains: Vec<_> = toolchains .into_iter() - .filter_map(|toolchain| { - let mut name = String::from("Python"); - if let Some(version) = &toolchain.version { - _ = write!(name, " {version}"); - } - - let name_and_kind = match (&toolchain.name, &toolchain.kind) { - (Some(name), Some(kind)) => { - Some(format!("({name}; {})", python_env_kind_display(kind))) - } - (Some(name), None) => Some(format!("({name})")), - (None, Some(kind)) => Some(format!("({})", python_env_kind_display(kind))), - (None, None) => None, - }; - - if let Some(nk) = name_and_kind { - _ = write!(name, " {nk}"); - } - - Some(Toolchain { - name: name.into(), - path: toolchain.executable.as_ref()?.to_str()?.to_owned().into(), - language_name: LanguageName::new("Python"), - as_json: serde_json::to_value(toolchain.clone()).ok()?, - }) - }) + .filter_map(venv_to_toolchain) .collect(); toolchains.dedup(); ToolchainList { @@ -881,9 +843,34 @@ impl ToolchainLister for PythonToolchainProvider { groups: Default::default(), } } - fn term(&self) -> SharedString { - self.term.clone() + fn meta(&self) -> ToolchainMetadata { + ToolchainMetadata { + term: SharedString::new_static("Virtual Environment"), + new_toolchain_placeholder: SharedString::new_static( + "A path to the python3 executable within a virtual environment, or path to virtual environment itself", + ), + manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")), + } + } + + async fn resolve( + &self, + path: PathBuf, + env: Option>, + ) -> anyhow::Result { + let env = env.unwrap_or_default(); + let environment = EnvironmentApi::from_env(&env); + let locators = pet::locators::create_locators( + Arc::new(pet_conda::Conda::from(&environment)), + Arc::new(pet_poetry::Poetry::from(&environment)), + &environment, + ); + let toolchain = pet::resolve::resolve_environment(&path, &locators, &environment) + .context("Could not find a virtual environment in provided path")?; + let venv = toolchain.resolved.unwrap_or(toolchain.discovered); + venv_to_toolchain(venv).context("Could not convert a venv into a toolchain") } + async fn activation_script( &self, toolchain: &Toolchain, @@ -956,6 +943,31 @@ impl ToolchainLister for PythonToolchainProvider { } } +fn venv_to_toolchain(venv: PythonEnvironment) -> Option { + let mut name = String::from("Python"); + if let Some(ref version) = venv.version { + _ = write!(name, " {version}"); + } + + let name_and_kind = match (&venv.name, &venv.kind) { + (Some(name), Some(kind)) => Some(format!("({name}; {})", python_env_kind_display(kind))), + (Some(name), None) => Some(format!("({name})")), + (None, Some(kind)) => Some(format!("({})", python_env_kind_display(kind))), + (None, None) => None, + }; + + if let Some(nk) = name_and_kind { + _ = write!(name, " {nk}"); + } + + Some(Toolchain { + name: name.into(), + path: venv.executable.as_ref()?.to_str()?.to_owned().into(), + language_name: LanguageName::new("Python"), + as_json: serde_json::to_value(venv).ok()?, + }) +} + pub struct EnvironmentApi<'a> { global_search_locations: Arc>>, project_env: &'a HashMap, diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 7f7e759b275baadfe3b2d3931955ad39b03fdb05..a247c07c910c135b46714123c9dec8b452cbc60b 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -3933,8 +3933,8 @@ impl LspStore { event: &ToolchainStoreEvent, _: &mut Context, ) { - match event { - ToolchainStoreEvent::ToolchainActivated => self.request_workspace_config_refresh(), + if let ToolchainStoreEvent::ToolchainActivated = event { + self.request_workspace_config_refresh() } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 66924f159a0a97dce558d742ca3ee80456542305..0ebfd83f4e414763e0f99d473e3b60dab159f743 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -48,7 +48,7 @@ use clock::ReplicaId; use dap::client::DebugAdapterClient; -use collections::{BTreeSet, HashMap, HashSet}; +use collections::{BTreeSet, HashMap, HashSet, IndexSet}; use debounced_delay::DebouncedDelay; pub use debugger::breakpoint_store::BreakpointWithPosition; use debugger::{ @@ -74,8 +74,9 @@ use gpui::{ }; use language::{ Buffer, BufferEvent, Capability, CodeLabel, CursorShape, Language, LanguageName, - LanguageRegistry, PointUtf16, ToOffset, ToPointUtf16, Toolchain, ToolchainList, Transaction, - Unclipped, language_settings::InlayHintKind, proto::split_operations, + LanguageRegistry, PointUtf16, ToOffset, ToPointUtf16, Toolchain, ToolchainMetadata, + ToolchainScope, Transaction, Unclipped, language_settings::InlayHintKind, + proto::split_operations, }; use lsp::{ CodeActionKind, CompletionContext, CompletionItemKind, DocumentHighlightKind, InsertTextMode, @@ -104,6 +105,7 @@ use snippet::Snippet; use snippet_provider::SnippetProvider; use std::{ borrow::Cow, + collections::BTreeMap, ops::Range, path::{Component, Path, PathBuf}, pin::pin, @@ -117,7 +119,7 @@ use terminals::Terminals; use text::{Anchor, BufferId, OffsetRangeExt, Point, Rope}; use toolchain_store::EmptyToolchainStore; use util::{ - ResultExt as _, + ResultExt as _, maybe, paths::{PathStyle, RemotePathBuf, SanitizedPath, compare_paths}, }; use worktree::{CreatedEntry, Snapshot, Traversal}; @@ -142,7 +144,7 @@ pub use lsp_store::{ LanguageServerStatus, LanguageServerToQuery, LspStore, LspStoreEvent, SERVER_PROGRESS_THROTTLE_TIMEOUT, }; -pub use toolchain_store::ToolchainStore; +pub use toolchain_store::{ToolchainStore, Toolchains}; const MAX_PROJECT_SEARCH_HISTORY_SIZE: usize = 500; const MAX_SEARCH_RESULT_FILES: usize = 5_000; const MAX_SEARCH_RESULT_RANGES: usize = 10_000; @@ -3370,7 +3372,7 @@ impl Project { path: ProjectPath, language_name: LanguageName, cx: &App, - ) -> Task)>> { + ) -> Task> { if let Some(toolchain_store) = self.toolchain_store.as_ref().map(Entity::downgrade) { cx.spawn(async move |cx| { toolchain_store @@ -3383,16 +3385,70 @@ impl Project { } } - pub async fn toolchain_term( + pub async fn toolchain_metadata( languages: Arc, language_name: LanguageName, - ) -> Option { + ) -> Option { languages .language_for_name(language_name.as_ref()) .await .ok()? .toolchain_lister() - .map(|lister| lister.term()) + .map(|lister| lister.meta()) + } + + pub fn add_toolchain( + &self, + toolchain: Toolchain, + scope: ToolchainScope, + cx: &mut Context, + ) { + maybe!({ + self.toolchain_store.as_ref()?.update(cx, |this, cx| { + this.add_toolchain(toolchain, scope, cx); + }); + Some(()) + }); + } + + pub fn remove_toolchain( + &self, + toolchain: Toolchain, + scope: ToolchainScope, + cx: &mut Context, + ) { + maybe!({ + self.toolchain_store.as_ref()?.update(cx, |this, cx| { + this.remove_toolchain(toolchain, scope, cx); + }); + Some(()) + }); + } + + pub fn user_toolchains( + &self, + cx: &App, + ) -> Option>> { + Some(self.toolchain_store.as_ref()?.read(cx).user_toolchains()) + } + + pub fn resolve_toolchain( + &self, + path: PathBuf, + language_name: LanguageName, + cx: &App, + ) -> Task> { + if let Some(toolchain_store) = self.toolchain_store.as_ref().map(Entity::downgrade) { + cx.spawn(async move |cx| { + toolchain_store + .update(cx, |this, cx| { + this.resolve_toolchain(path, language_name, cx) + })? + .await + }) + } else { + Task::ready(Err(anyhow!("This project does not support toolchains"))) + } } pub fn toolchain_store(&self) -> Option> { diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 969e18f6d40346aa86d83bd0beb77d6652ff0763..e65da3acd41e7ce4db06821da58fe0969a74217f 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -22,7 +22,7 @@ use itertools::Itertools; use language::{ Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding, ManifestName, ManifestProvider, - ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainLister, + ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList, ToolchainLister, language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings}, tree_sitter_rust, tree_sitter_typescript, }; @@ -727,7 +727,12 @@ async fn test_running_multiple_instances_of_a_single_server_in_one_worktree( // We're not using venvs at all here, so both folders should fall under the same root. assert_eq!(server.server_id(), LanguageServerId(0)); // Now, let's select a different toolchain for one of subprojects. - let (available_toolchains_for_b, root_path) = project + + let Toolchains { + toolchains: available_toolchains_for_b, + root_path, + .. + } = project .update(cx, |this, cx| { let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id(); this.available_toolchains( @@ -9213,13 +9218,21 @@ fn python_lang(fs: Arc) -> Arc { ..Default::default() } } - // Returns a term which we should use in UI to refer to a toolchain. - fn term(&self) -> SharedString { - SharedString::new_static("virtual environment") + async fn resolve( + &self, + _: PathBuf, + _: Option>, + ) -> anyhow::Result { + Err(anyhow::anyhow!("Not implemented")) } - /// Returns the name of the manifest file for this toolchain. - fn manifest_name(&self) -> ManifestName { - SharedString::new_static("pyproject.toml").into() + fn meta(&self) -> ToolchainMetadata { + ToolchainMetadata { + term: SharedString::new_static("Virtual Environment"), + new_toolchain_placeholder: SharedString::new_static( + "A path to the python3 executable within a virtual environment, or path to virtual environment itself", + ), + manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")), + } } async fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &dyn Fs) -> Vec { vec![] diff --git a/crates/project/src/toolchain_store.rs b/crates/project/src/toolchain_store.rs index 57d492e26fc7b59df02df0128ed6b9ade132c6d9..e76b98f697768c987f527eaf444c159334b12c96 100644 --- a/crates/project/src/toolchain_store.rs +++ b/crates/project/src/toolchain_store.rs @@ -4,20 +4,23 @@ use std::{ sync::Arc, }; -use anyhow::{Result, bail}; +use anyhow::{Context as _, Result, bail}; use async_trait::async_trait; -use collections::BTreeMap; +use collections::{BTreeMap, IndexSet}; use gpui::{ App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity, }; use language::{ LanguageName, LanguageRegistry, LanguageToolchainStore, ManifestDelegate, Toolchain, - ToolchainList, + ToolchainList, ToolchainScope, }; use rpc::{ AnyProtoClient, TypedEnvelope, - proto::{self, FromProto, ToProto}, + proto::{ + self, FromProto, ResolveToolchainResponse, ToProto, + resolve_toolchain_response::Response as ResolveResponsePayload, + }, }; use settings::WorktreeId; use util::ResultExt as _; @@ -28,24 +31,31 @@ use crate::{ worktree_store::WorktreeStore, }; -pub struct ToolchainStore(ToolchainStoreInner); +pub struct ToolchainStore { + mode: ToolchainStoreInner, + user_toolchains: BTreeMap>, + _sub: Subscription, +} + enum ToolchainStoreInner { - Local( - Entity, - #[allow(dead_code)] Subscription, - ), - Remote( - Entity, - #[allow(dead_code)] Subscription, - ), + Local(Entity), + Remote(Entity), } +pub struct Toolchains { + /// Auto-detected toolchains. + pub toolchains: ToolchainList, + /// Path of the project root at which we ran the automatic toolchain detection. + pub root_path: Arc, + pub user_toolchains: BTreeMap>, +} impl EventEmitter for ToolchainStore {} impl ToolchainStore { pub fn init(client: &AnyProtoClient) { client.add_entity_request_handler(Self::handle_activate_toolchain); client.add_entity_request_handler(Self::handle_list_toolchains); client.add_entity_request_handler(Self::handle_active_toolchain); + client.add_entity_request_handler(Self::handle_resolve_toolchain); } pub fn local( @@ -62,18 +72,26 @@ impl ToolchainStore { active_toolchains: Default::default(), manifest_tree, }); - let subscription = cx.subscribe(&entity, |_, _, e: &ToolchainStoreEvent, cx| { + let _sub = cx.subscribe(&entity, |_, _, e: &ToolchainStoreEvent, cx| { cx.emit(e.clone()) }); - Self(ToolchainStoreInner::Local(entity, subscription)) + Self { + mode: ToolchainStoreInner::Local(entity), + user_toolchains: Default::default(), + _sub, + } } pub(super) fn remote(project_id: u64, client: AnyProtoClient, cx: &mut Context) -> Self { let entity = cx.new(|_| RemoteToolchainStore { client, project_id }); - let _subscription = cx.subscribe(&entity, |_, _, e: &ToolchainStoreEvent, cx| { + let _sub = cx.subscribe(&entity, |_, _, e: &ToolchainStoreEvent, cx| { cx.emit(e.clone()) }); - Self(ToolchainStoreInner::Remote(entity, _subscription)) + Self { + mode: ToolchainStoreInner::Remote(entity), + user_toolchains: Default::default(), + _sub, + } } pub(crate) fn activate_toolchain( &self, @@ -81,43 +99,130 @@ impl ToolchainStore { toolchain: Toolchain, cx: &mut App, ) -> Task> { - match &self.0 { - ToolchainStoreInner::Local(local, _) => { + match &self.mode { + ToolchainStoreInner::Local(local) => { local.update(cx, |this, cx| this.activate_toolchain(path, toolchain, cx)) } - ToolchainStoreInner::Remote(remote, _) => { + ToolchainStoreInner::Remote(remote) => { remote.update(cx, |this, cx| this.activate_toolchain(path, toolchain, cx)) } } } + + pub(crate) fn user_toolchains(&self) -> BTreeMap> { + self.user_toolchains.clone() + } + pub(crate) fn add_toolchain( + &mut self, + toolchain: Toolchain, + scope: ToolchainScope, + cx: &mut Context, + ) { + let did_insert = self + .user_toolchains + .entry(scope) + .or_default() + .insert(toolchain); + if did_insert { + cx.emit(ToolchainStoreEvent::CustomToolchainsModified); + } + } + + pub(crate) fn remove_toolchain( + &mut self, + toolchain: Toolchain, + scope: ToolchainScope, + cx: &mut Context, + ) { + let mut did_remove = false; + self.user_toolchains + .entry(scope) + .and_modify(|toolchains| did_remove = toolchains.shift_remove(&toolchain)); + if did_remove { + cx.emit(ToolchainStoreEvent::CustomToolchainsModified); + } + } + + pub(crate) fn resolve_toolchain( + &self, + abs_path: PathBuf, + language_name: LanguageName, + cx: &mut Context, + ) -> Task> { + debug_assert!(abs_path.is_absolute()); + match &self.mode { + ToolchainStoreInner::Local(local) => local.update(cx, |this, cx| { + this.resolve_toolchain(abs_path, language_name, cx) + }), + ToolchainStoreInner::Remote(remote) => remote.update(cx, |this, cx| { + this.resolve_toolchain(abs_path, language_name, cx) + }), + } + } pub(crate) fn list_toolchains( &self, path: ProjectPath, language_name: LanguageName, cx: &mut Context, - ) -> Task)>> { - match &self.0 { - ToolchainStoreInner::Local(local, _) => { + ) -> Task> { + let user_toolchains = self + .user_toolchains + .iter() + .filter(|(scope, _)| { + if let ToolchainScope::Subproject(worktree_id, relative_path) = scope { + path.worktree_id == *worktree_id && relative_path.starts_with(&path.path) + } else { + true + } + }) + .map(|(scope, toolchains)| { + ( + scope.clone(), + toolchains + .iter() + .filter(|toolchain| toolchain.language_name == language_name) + .cloned() + .collect::>(), + ) + }) + .collect::>(); + let task = match &self.mode { + ToolchainStoreInner::Local(local) => { local.update(cx, |this, cx| this.list_toolchains(path, language_name, cx)) } - ToolchainStoreInner::Remote(remote, _) => { + ToolchainStoreInner::Remote(remote) => { remote.read(cx).list_toolchains(path, language_name, cx) } - } + }; + cx.spawn(async move |_, _| { + let (mut toolchains, root_path) = task.await?; + toolchains.toolchains.retain(|toolchain| { + !user_toolchains + .values() + .any(|toolchains| toolchains.contains(toolchain)) + }); + + Some(Toolchains { + toolchains, + root_path, + user_toolchains, + }) + }) } + pub(crate) fn active_toolchain( &self, path: ProjectPath, language_name: LanguageName, cx: &App, ) -> Task> { - match &self.0 { - ToolchainStoreInner::Local(local, _) => Task::ready(local.read(cx).active_toolchain( + match &self.mode { + ToolchainStoreInner::Local(local) => Task::ready(local.read(cx).active_toolchain( path.worktree_id, &path.path, language_name, )), - ToolchainStoreInner::Remote(remote, _) => { + ToolchainStoreInner::Remote(remote) => { remote.read(cx).active_toolchain(path, language_name, cx) } } @@ -197,7 +302,7 @@ impl ToolchainStore { })? .await; let has_values = toolchains.is_some(); - let groups = if let Some((toolchains, _)) = &toolchains { + let groups = if let Some(Toolchains { toolchains, .. }) = &toolchains { toolchains .groups .iter() @@ -211,7 +316,12 @@ impl ToolchainStore { } else { vec![] }; - let (toolchains, relative_path) = if let Some((toolchains, relative_path)) = toolchains { + let (toolchains, relative_path) = if let Some(Toolchains { + toolchains, + root_path: relative_path, + .. + }) = toolchains + { let toolchains = toolchains .toolchains .into_iter() @@ -236,16 +346,45 @@ impl ToolchainStore { relative_worktree_path: Some(relative_path.to_string_lossy().into_owned()), }) } + + async fn handle_resolve_toolchain( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let toolchain = this + .update(&mut cx, |this, cx| { + let language_name = LanguageName::from_proto(envelope.payload.language_name); + let path = PathBuf::from(envelope.payload.abs_path); + this.resolve_toolchain(path, language_name, cx) + })? + .await; + let response = match toolchain { + Ok(toolchain) => { + let toolchain = proto::Toolchain { + name: toolchain.name.to_string(), + path: toolchain.path.to_string(), + raw_json: toolchain.as_json.to_string(), + }; + ResolveResponsePayload::Toolchain(toolchain) + } + Err(e) => ResolveResponsePayload::Error(e.to_string()), + }; + Ok(ResolveToolchainResponse { + response: Some(response), + }) + } + pub fn as_language_toolchain_store(&self) -> Arc { - match &self.0 { - ToolchainStoreInner::Local(local, _) => Arc::new(LocalStore(local.downgrade())), - ToolchainStoreInner::Remote(remote, _) => Arc::new(RemoteStore(remote.downgrade())), + match &self.mode { + ToolchainStoreInner::Local(local) => Arc::new(LocalStore(local.downgrade())), + ToolchainStoreInner::Remote(remote) => Arc::new(RemoteStore(remote.downgrade())), } } pub fn as_local_store(&self) -> Option<&Entity> { - match &self.0 { - ToolchainStoreInner::Local(local, _) => Some(local), - ToolchainStoreInner::Remote(_, _) => None, + match &self.mode { + ToolchainStoreInner::Local(local) => Some(local), + ToolchainStoreInner::Remote(_) => None, } } } @@ -311,6 +450,7 @@ struct RemoteStore(WeakEntity); #[derive(Clone)] pub enum ToolchainStoreEvent { ToolchainActivated, + CustomToolchainsModified, } impl EventEmitter for LocalToolchainStore {} @@ -351,7 +491,7 @@ impl LocalToolchainStore { .await .ok()?; let toolchains = language.toolchain_lister()?; - let manifest_name = toolchains.manifest_name(); + let manifest_name = toolchains.meta().manifest_name; let (snapshot, worktree) = this .update(cx, |this, cx| { this.worktree_store @@ -414,6 +554,33 @@ impl LocalToolchainStore { }) .cloned() } + + fn resolve_toolchain( + &self, + path: PathBuf, + language_name: LanguageName, + cx: &mut Context, + ) -> Task> { + let registry = self.languages.clone(); + let environment = self.project_environment.clone(); + cx.spawn(async move |_, cx| { + let language = cx + .background_spawn(registry.language_for_name(&language_name.0)) + .await + .with_context(|| format!("Language {} not found", language_name.0))?; + let toolchain_lister = language.toolchain_lister().with_context(|| { + format!("Language {} does not support toolchains", language_name.0) + })?; + + let project_env = environment + .update(cx, |environment, cx| { + environment.get_directory_environment(path.as_path().into(), cx) + })? + .await; + cx.background_spawn(async move { toolchain_lister.resolve(path, project_env).await }) + .await + }) + } } impl EventEmitter for RemoteToolchainStore {} @@ -556,4 +723,47 @@ impl RemoteToolchainStore { }) }) } + + fn resolve_toolchain( + &self, + abs_path: PathBuf, + language_name: LanguageName, + cx: &mut Context, + ) -> Task> { + let project_id = self.project_id; + let client = self.client.clone(); + cx.background_spawn(async move { + let response: proto::ResolveToolchainResponse = client + .request(proto::ResolveToolchain { + project_id, + language_name: language_name.clone().into(), + abs_path: abs_path.to_string_lossy().into_owned(), + }) + .await?; + + let response = response + .response + .context("Failed to resolve toolchain via RPC")?; + use proto::resolve_toolchain_response::Response; + match response { + Response::Toolchain(toolchain) => { + Ok(Toolchain { + language_name: language_name.clone(), + name: toolchain.name.into(), + // todo(windows) + // Do we need to convert path to native string? + path: PathBuf::from_proto(toolchain.path) + .to_string_lossy() + .to_string() + .into(), + as_json: serde_json::Value::from_str(&toolchain.raw_json) + .context("Deserializing ResolveToolchain LSP response")?, + }) + } + Response::Error(error) => { + anyhow::bail!("{error}"); + } + } + }) + } } diff --git a/crates/proto/proto/toolchain.proto b/crates/proto/proto/toolchain.proto index 08844a307a2c44cf2a30405b3202f10c72db579d..b190322ca0602078ea28d00fe970e4958fb17fb0 100644 --- a/crates/proto/proto/toolchain.proto +++ b/crates/proto/proto/toolchain.proto @@ -44,3 +44,16 @@ message ActiveToolchain { message ActiveToolchainResponse { optional Toolchain toolchain = 1; } + +message ResolveToolchain { + uint64 project_id = 1; + string abs_path = 2; + string language_name = 3; +} + +message ResolveToolchainResponse { + oneof response { + Toolchain toolchain = 1; + string error = 2; + } +} diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 3763671a7a1f29949194d61c70866f96ca6ad972..39fa1fdd53d140cb5d88da751d843e6a7ad1db70 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -402,7 +402,10 @@ message Envelope { UpdateUserSettings update_user_settings = 368; GetProcesses get_processes = 369; - GetProcessesResponse get_processes_response = 370; // current max + GetProcessesResponse get_processes_response = 370; + + ResolveToolchain resolve_toolchain = 371; + ResolveToolchainResponse resolve_toolchain_response = 372; // current max } reserved 87 to 88; diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 3c98ae62e7a4b1489c071a0ac673d23b394c28d5..4c0fc3dc98e22029cf167c0506916d71f3e93602 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -26,6 +26,8 @@ messages!( (ActivateToolchain, Foreground), (ActiveToolchain, Foreground), (ActiveToolchainResponse, Foreground), + (ResolveToolchain, Background), + (ResolveToolchainResponse, Background), (AddNotification, Foreground), (AddProjectCollaborator, Foreground), (AddWorktree, Foreground), @@ -459,6 +461,7 @@ request_messages!( (ListToolchains, ListToolchainsResponse), (ActivateToolchain, Ack), (ActiveToolchain, ActiveToolchainResponse), + (ResolveToolchain, ResolveToolchainResponse), (GetPathMetadata, GetPathMetadataResponse), (GetCrashFiles, GetCrashFilesResponse), (CancelLanguageServerWork, Ack), @@ -612,6 +615,7 @@ entity_messages!( ListToolchains, ActivateToolchain, ActiveToolchain, + ResolveToolchain, GetPathMetadata, GetProcesses, CancelLanguageServerWork, diff --git a/crates/repl/src/kernels/mod.rs b/crates/repl/src/kernels/mod.rs index 52188a39c48f5fc07a1f4a64949a82d205f75f9f..fb16cb1ea3b093b0592cb114a1224dc4858630fe 100644 --- a/crates/repl/src/kernels/mod.rs +++ b/crates/repl/src/kernels/mod.rs @@ -11,7 +11,7 @@ use language::LanguageName; pub use native_kernel::*; mod remote_kernels; -use project::{Project, ProjectPath, WorktreeId}; +use project::{Project, ProjectPath, Toolchains, WorktreeId}; pub use remote_kernels::*; use anyhow::Result; @@ -92,49 +92,58 @@ pub fn python_env_kernel_specifications( let background_executor = cx.background_executor().clone(); async move { - let toolchains = if let Some((toolchains, _)) = toolchains.await { - toolchains + let (toolchains, user_toolchains) = if let Some(Toolchains { + toolchains, + root_path: _, + user_toolchains, + }) = toolchains.await + { + (toolchains, user_toolchains) } else { return Ok(Vec::new()); }; - let kernelspecs = toolchains.toolchains.into_iter().map(|toolchain| { - background_executor.spawn(async move { - let python_path = toolchain.path.to_string(); - - // Check if ipykernel is installed - let ipykernel_check = util::command::new_smol_command(&python_path) - .args(&["-c", "import ipykernel"]) - .output() - .await; - - if ipykernel_check.is_ok() && ipykernel_check.unwrap().status.success() { - // Create a default kernelspec for this environment - let default_kernelspec = JupyterKernelspec { - argv: vec![ - python_path.clone(), - "-m".to_string(), - "ipykernel_launcher".to_string(), - "-f".to_string(), - "{connection_file}".to_string(), - ], - display_name: toolchain.name.to_string(), - language: "python".to_string(), - interrupt_mode: None, - metadata: None, - env: None, - }; - - Some(KernelSpecification::PythonEnv(LocalKernelSpecification { - name: toolchain.name.to_string(), - path: PathBuf::from(&python_path), - kernelspec: default_kernelspec, - })) - } else { - None - } - }) - }); + let kernelspecs = user_toolchains + .into_values() + .flatten() + .chain(toolchains.toolchains) + .map(|toolchain| { + background_executor.spawn(async move { + let python_path = toolchain.path.to_string(); + + // Check if ipykernel is installed + let ipykernel_check = util::command::new_smol_command(&python_path) + .args(&["-c", "import ipykernel"]) + .output() + .await; + + if ipykernel_check.is_ok() && ipykernel_check.unwrap().status.success() { + // Create a default kernelspec for this environment + let default_kernelspec = JupyterKernelspec { + argv: vec![ + python_path.clone(), + "-m".to_string(), + "ipykernel_launcher".to_string(), + "-f".to_string(), + "{connection_file}".to_string(), + ], + display_name: toolchain.name.to_string(), + language: "python".to_string(), + interrupt_mode: None, + metadata: None, + env: None, + }; + + Some(KernelSpecification::PythonEnv(LocalKernelSpecification { + name: toolchain.name.to_string(), + path: PathBuf::from(&python_path), + kernelspec: default_kernelspec, + })) + } else { + None + } + }) + }); let kernel_specs = futures::future::join_all(kernelspecs) .await diff --git a/crates/toolchain_selector/Cargo.toml b/crates/toolchain_selector/Cargo.toml index 46b88594fdda8979a861fb33317cae81a32d2ea1..a17f82564093e2ae17f95ec82559f308b910b2dd 100644 --- a/crates/toolchain_selector/Cargo.toml +++ b/crates/toolchain_selector/Cargo.toml @@ -6,10 +6,15 @@ publish.workspace = true license = "GPL-3.0-or-later" [dependencies] +anyhow.workspace = true +convert_case.workspace = true editor.workspace = true +file_finder.workspace = true +futures.workspace = true fuzzy.workspace = true gpui.workspace = true language.workspace = true +menu.workspace = true picker.workspace = true project.workspace = true ui.workspace = true diff --git a/crates/toolchain_selector/src/active_toolchain.rs b/crates/toolchain_selector/src/active_toolchain.rs index bf45bffea30791a062e4a130b0f742f3d47c1342..3e26f3ad6c3d23c4b0e00c4c9f67e37fd9c33d32 100644 --- a/crates/toolchain_selector/src/active_toolchain.rs +++ b/crates/toolchain_selector/src/active_toolchain.rs @@ -5,8 +5,8 @@ use gpui::{ AsyncWindowContext, Context, Entity, IntoElement, ParentElement, Render, Subscription, Task, WeakEntity, Window, div, }; -use language::{Buffer, BufferEvent, LanguageName, Toolchain}; -use project::{Project, ProjectPath, WorktreeId, toolchain_store::ToolchainStoreEvent}; +use language::{Buffer, BufferEvent, LanguageName, Toolchain, ToolchainScope}; +use project::{Project, ProjectPath, Toolchains, WorktreeId, toolchain_store::ToolchainStoreEvent}; use ui::{Button, ButtonCommon, Clickable, FluentBuilder, LabelSize, SharedString, Tooltip}; use util::maybe; use workspace::{StatusItemView, Workspace, item::ItemHandle}; @@ -69,15 +69,15 @@ impl ActiveToolchain { .read_with(cx, |this, _| Some(this.language()?.name())) .ok() .flatten()?; - let term = workspace + let meta = workspace .update(cx, |workspace, cx| { let languages = workspace.project().read(cx).languages(); - Project::toolchain_term(languages.clone(), language_name.clone()) + Project::toolchain_metadata(languages.clone(), language_name.clone()) }) .ok()? .await?; let _ = this.update(cx, |this, cx| { - this.term = term; + this.term = meta.term; cx.notify(); }); let (worktree_id, path) = active_file @@ -170,7 +170,11 @@ impl ActiveToolchain { let project = workspace .read_with(cx, |this, _| this.project().clone()) .ok()?; - let (toolchains, relative_path) = cx + let Toolchains { + toolchains, + root_path: relative_path, + user_toolchains, + } = cx .update(|_, cx| { project.read(cx).available_toolchains( ProjectPath { @@ -183,8 +187,20 @@ impl ActiveToolchain { }) .ok()? .await?; - if let Some(toolchain) = toolchains.toolchains.first() { - // Since we don't have a selected toolchain, pick one for user here. + // Since we don't have a selected toolchain, pick one for user here. + let default_choice = user_toolchains + .iter() + .find_map(|(scope, toolchains)| { + if scope == &ToolchainScope::Global { + // Ignore global toolchains when making a default choice. They're unlikely to be the right choice. + None + } else { + toolchains.first() + } + }) + .or_else(|| toolchains.toolchains.first()) + .cloned(); + if let Some(toolchain) = &default_choice { workspace::WORKSPACE_DB .set_toolchain( workspace_id, @@ -209,7 +225,7 @@ impl ActiveToolchain { .await; } - toolchains.toolchains.first().cloned() + default_choice } }) } diff --git a/crates/toolchain_selector/src/toolchain_selector.rs b/crates/toolchain_selector/src/toolchain_selector.rs index feeca8cf52a5116d53562826da72a0bb304d16ce..2f946a69152f76912a1da996e429c48e3ec3be10 100644 --- a/crates/toolchain_selector/src/toolchain_selector.rs +++ b/crates/toolchain_selector/src/toolchain_selector.rs @@ -1,25 +1,39 @@ mod active_toolchain; pub use active_toolchain::ActiveToolchain; +use convert_case::Casing as _; use editor::Editor; +use file_finder::OpenPathDelegate; +use futures::channel::oneshot; use fuzzy::{StringMatch, StringMatchCandidate, match_strings}; use gpui::{ - App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, ParentElement, - Render, Styled, Task, WeakEntity, Window, actions, + Action, Animation, AnimationExt, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, + Focusable, KeyContext, ParentElement, Render, Styled, Subscription, Task, WeakEntity, Window, + actions, pulsating_between, }; -use language::{LanguageName, Toolchain, ToolchainList}; +use language::{Language, LanguageName, Toolchain, ToolchainScope}; use picker::{Picker, PickerDelegate}; -use project::{Project, ProjectPath, WorktreeId}; -use std::{borrow::Cow, path::Path, sync::Arc}; -use ui::{HighlightedLabel, ListItem, ListItemSpacing, prelude::*}; -use util::ResultExt; +use project::{DirectoryLister, Project, ProjectPath, Toolchains, WorktreeId}; +use std::{ + borrow::Cow, + path::{Path, PathBuf}, + sync::Arc, + time::Duration, +}; +use ui::{ + Divider, HighlightedLabel, KeyBinding, List, ListItem, ListItemSpacing, Navigable, + NavigableEntry, prelude::*, +}; +use util::{ResultExt, maybe, paths::PathStyle}; use workspace::{ModalView, Workspace}; actions!( toolchain, [ /// Selects a toolchain for the current project. - Select + Select, + /// Adds a new toolchain for the current project. + AddToolchain ] ); @@ -28,9 +42,513 @@ pub fn init(cx: &mut App) { } pub struct ToolchainSelector { + state: State, + create_search_state: Arc) -> SearchState + 'static>, + language: Option>, + project: Entity, + language_name: LanguageName, + worktree_id: WorktreeId, + relative_path: Arc, +} + +#[derive(Clone)] +struct SearchState { picker: Entity>, } +struct AddToolchainState { + state: AddState, + project: Entity, + language_name: LanguageName, + root_path: ProjectPath, + weak: WeakEntity, +} + +struct ScopePickerState { + entries: [NavigableEntry; 3], + selected_scope: ToolchainScope, +} + +#[expect( + dead_code, + reason = "These tasks have to be kept alive to run to completion" +)] +enum PathInputState { + WaitingForPath(Task<()>), + Resolving(Task<()>), +} + +enum AddState { + Path { + picker: Entity>, + error: Option>, + input_state: PathInputState, + _subscription: Subscription, + }, + Name { + toolchain: Toolchain, + editor: Entity, + scope_picker: ScopePickerState, + }, +} + +impl AddToolchainState { + fn new( + project: Entity, + language_name: LanguageName, + root_path: ProjectPath, + window: &mut Window, + cx: &mut Context, + ) -> Entity { + let weak = cx.weak_entity(); + + cx.new(|cx| { + let (lister, rx) = Self::create_path_browser_delegate(project.clone(), cx); + let picker = cx.new(|cx| Picker::uniform_list(lister, window, cx)); + Self { + state: AddState::Path { + _subscription: cx.subscribe(&picker, |_, _, _: &DismissEvent, cx| { + cx.stop_propagation(); + }), + picker, + error: None, + input_state: Self::wait_for_path(rx, window, cx), + }, + project, + language_name, + root_path, + weak, + } + }) + } + + fn create_path_browser_delegate( + project: Entity, + cx: &mut Context, + ) -> (OpenPathDelegate, oneshot::Receiver>>) { + let (tx, rx) = oneshot::channel(); + let weak = cx.weak_entity(); + let lister = OpenPathDelegate::new( + tx, + DirectoryLister::Project(project), + false, + PathStyle::current(), + ) + .show_hidden() + .with_footer(Arc::new(move |_, cx| { + let error = weak + .read_with(cx, |this, _| { + if let AddState::Path { error, .. } = &this.state { + error.clone() + } else { + None + } + }) + .ok() + .flatten(); + let is_loading = weak + .read_with(cx, |this, _| { + matches!( + this.state, + AddState::Path { + input_state: PathInputState::Resolving(_), + .. + } + ) + }) + .unwrap_or_default(); + Some( + v_flex() + .child(Divider::horizontal()) + .child( + h_flex() + .p_1() + .justify_between() + .gap_2() + .child(Label::new("Select Toolchain Path").color(Color::Muted).map( + |this| { + if is_loading { + this.with_animation( + "select-toolchain-label", + Animation::new(Duration::from_secs(2)) + .repeat() + .with_easing(pulsating_between(0.4, 0.8)), + |label, delta| label.alpha(delta), + ) + .into_any() + } else { + this.into_any_element() + } + }, + )) + .when_some(error, |this, error| { + this.child(Label::new(error).color(Color::Error)) + }), + ) + .into_any(), + ) + })); + + (lister, rx) + } + fn resolve_path( + path: PathBuf, + root_path: ProjectPath, + language_name: LanguageName, + project: Entity, + window: &mut Window, + cx: &mut Context, + ) -> PathInputState { + PathInputState::Resolving(cx.spawn_in(window, async move |this, cx| { + _ = maybe!(async move { + let toolchain = project + .update(cx, |this, cx| { + this.resolve_toolchain(path.clone(), language_name, cx) + })? + .await; + let Ok(toolchain) = toolchain else { + // Go back to the path input state + _ = this.update_in(cx, |this, window, cx| { + if let AddState::Path { + input_state, + picker, + error, + .. + } = &mut this.state + && matches!(input_state, PathInputState::Resolving(_)) + { + let Err(e) = toolchain else { unreachable!() }; + *error = Some(Arc::from(e.to_string())); + let (delegate, rx) = + Self::create_path_browser_delegate(this.project.clone(), cx); + picker.update(cx, |picker, cx| { + *picker = Picker::uniform_list(delegate, window, cx); + picker.set_query( + Arc::from(path.to_string_lossy().as_ref()), + window, + cx, + ); + }); + *input_state = Self::wait_for_path(rx, window, cx); + this.focus_handle(cx).focus(window); + } + }); + return Err(anyhow::anyhow!("Failed to resolve toolchain")); + }; + let resolved_toolchain_path = project.read_with(cx, |this, cx| { + this.find_project_path(&toolchain.path.as_ref(), cx) + })?; + + // Suggest a default scope based on the applicability. + let scope = if let Some(project_path) = resolved_toolchain_path { + if root_path.path.as_ref() != Path::new("") + && project_path.starts_with(&root_path) + { + ToolchainScope::Subproject(root_path.worktree_id, root_path.path) + } else { + ToolchainScope::Project + } + } else { + // This path lies outside of the project. + ToolchainScope::Global + }; + + _ = this.update_in(cx, |this, window, cx| { + let scope_picker = ScopePickerState { + entries: std::array::from_fn(|_| NavigableEntry::focusable(cx)), + selected_scope: scope, + }; + this.state = AddState::Name { + editor: cx.new(|cx| { + let mut editor = Editor::single_line(window, cx); + editor.set_text(toolchain.name.as_ref(), window, cx); + editor + }), + toolchain, + scope_picker, + }; + this.focus_handle(cx).focus(window); + }); + + Result::<_, anyhow::Error>::Ok(()) + }) + .await; + })) + } + + fn wait_for_path( + rx: oneshot::Receiver>>, + window: &mut Window, + cx: &mut Context, + ) -> PathInputState { + let task = cx.spawn_in(window, async move |this, cx| { + maybe!(async move { + let result = rx.await.log_err()?; + + let path = result + .into_iter() + .flat_map(|paths| paths.into_iter()) + .next()?; + this.update_in(cx, |this, window, cx| { + if let AddState::Path { + input_state, error, .. + } = &mut this.state + && matches!(input_state, PathInputState::WaitingForPath(_)) + { + error.take(); + *input_state = Self::resolve_path( + path, + this.root_path.clone(), + this.language_name.clone(), + this.project.clone(), + window, + cx, + ); + } + }) + .ok()?; + Some(()) + }) + .await; + }); + PathInputState::WaitingForPath(task) + } + + fn confirm_toolchain( + &mut self, + _: &menu::Confirm, + window: &mut Window, + cx: &mut Context, + ) { + let AddState::Name { + toolchain, + editor, + scope_picker, + } = &mut self.state + else { + return; + }; + + let text = editor.read(cx).text(cx); + if text.is_empty() { + return; + } + + toolchain.name = SharedString::from(text); + self.project.update(cx, |this, cx| { + this.add_toolchain(toolchain.clone(), scope_picker.selected_scope.clone(), cx); + }); + _ = self.weak.update(cx, |this, cx| { + this.state = State::Search((this.create_search_state)(window, cx)); + this.focus_handle(cx).focus(window); + cx.notify(); + }); + } +} +impl Focusable for AddToolchainState { + fn focus_handle(&self, cx: &App) -> FocusHandle { + match &self.state { + AddState::Path { picker, .. } => picker.focus_handle(cx), + AddState::Name { editor, .. } => editor.focus_handle(cx), + } + } +} + +impl AddToolchainState { + fn select_scope(&mut self, scope: ToolchainScope, cx: &mut Context) { + if let AddState::Name { scope_picker, .. } = &mut self.state { + scope_picker.selected_scope = scope; + cx.notify(); + } + } +} + +impl Focusable for State { + fn focus_handle(&self, cx: &App) -> FocusHandle { + match self { + State::Search(state) => state.picker.focus_handle(cx), + State::AddToolchain(state) => state.focus_handle(cx), + } + } +} +impl Render for AddToolchainState { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let theme = cx.theme().clone(); + let weak = self.weak.upgrade(); + let label = SharedString::new_static("Add"); + + v_flex() + .size_full() + // todo: These modal styles shouldn't be needed as the modal picker already has `elevation_3` + // They get duplicated in the middle state of adding a virtual env, but then are needed for this last state + .bg(cx.theme().colors().elevated_surface_background) + .border_1() + .border_color(cx.theme().colors().border_variant) + .rounded_lg() + .when_some(weak, |this, weak| { + this.on_action(window.listener_for( + &weak, + |this: &mut ToolchainSelector, _: &menu::Cancel, window, cx| { + this.state = State::Search((this.create_search_state)(window, cx)); + this.state.focus_handle(cx).focus(window); + cx.notify(); + }, + )) + }) + .on_action(cx.listener(Self::confirm_toolchain)) + .map(|this| match &self.state { + AddState::Path { picker, .. } => this.child(picker.clone()), + AddState::Name { + editor, + scope_picker, + .. + } => { + let scope_options = [ + ToolchainScope::Global, + ToolchainScope::Project, + ToolchainScope::Subproject( + self.root_path.worktree_id, + self.root_path.path.clone(), + ), + ]; + + let mut navigable_scope_picker = Navigable::new( + v_flex() + .child( + h_flex() + .w_full() + .p_2() + .border_b_1() + .border_color(theme.colors().border) + .child(editor.clone()), + ) + .child( + v_flex() + .child( + Label::new("Scope") + .size(LabelSize::Small) + .color(Color::Muted) + .mt_1() + .ml_2(), + ) + .child(List::new().children( + scope_options.iter().enumerate().map(|(i, scope)| { + let is_selected = *scope == scope_picker.selected_scope; + let label = scope.label(); + let description = scope.description(); + let scope_clone_for_action = scope.clone(); + let scope_clone_for_click = scope.clone(); + + div() + .id(SharedString::from(format!("scope-option-{i}"))) + .track_focus(&scope_picker.entries[i].focus_handle) + .on_action(cx.listener( + move |this, _: &menu::Confirm, _, cx| { + this.select_scope( + scope_clone_for_action.clone(), + cx, + ); + }, + )) + .child( + ListItem::new(SharedString::from(format!( + "scope-{i}" + ))) + .toggle_state( + is_selected + || scope_picker.entries[i] + .focus_handle + .contains_focused(window, cx), + ) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .child( + h_flex() + .gap_2() + .child(Label::new(label)) + .child( + Label::new(description) + .size(LabelSize::Small) + .color(Color::Muted), + ), + ) + .on_click(cx.listener(move |this, _, _, cx| { + this.select_scope( + scope_clone_for_click.clone(), + cx, + ); + })), + ) + }), + )) + .child(Divider::horizontal()) + .child(h_flex().p_1p5().justify_end().map(|this| { + let is_disabled = editor.read(cx).is_empty(cx); + let handle = self.focus_handle(cx); + this.child( + Button::new("add-toolchain", label) + .disabled(is_disabled) + .key_binding(KeyBinding::for_action_in( + &menu::Confirm, + &handle, + window, + cx, + )) + .on_click(cx.listener(|this, _, window, cx| { + this.confirm_toolchain( + &menu::Confirm, + window, + cx, + ); + })) + .map(|this| { + if false { + this.with_animation( + "inspecting-user-toolchain", + Animation::new(Duration::from_millis( + 500, + )) + .repeat() + .with_easing(pulsating_between( + 0.4, 0.8, + )), + |label, delta| label.alpha(delta), + ) + .into_any() + } else { + this.into_any_element() + } + }), + ) + })), + ) + .into_any_element(), + ); + + for entry in &scope_picker.entries { + navigable_scope_picker = navigable_scope_picker.entry(entry.clone()); + } + + this.child(navigable_scope_picker.render(window, cx)) + } + }) + } +} + +#[derive(Clone)] +enum State { + Search(SearchState), + AddToolchain(Entity), +} + +impl RenderOnce for State { + fn render(self, _: &mut Window, _: &mut App) -> impl IntoElement { + match self { + State::Search(state) => state.picker.into_any_element(), + State::AddToolchain(state) => state.into_any_element(), + } + } +} impl ToolchainSelector { fn register( workspace: &mut Workspace, @@ -40,6 +558,16 @@ impl ToolchainSelector { workspace.register_action(move |workspace, _: &Select, window, cx| { Self::toggle(workspace, window, cx); }); + workspace.register_action(move |workspace, _: &AddToolchain, window, cx| { + let Some(toolchain_selector) = workspace.active_modal::(cx) else { + Self::toggle(workspace, window, cx); + return; + }; + + toolchain_selector.update(cx, |toolchain_selector, cx| { + toolchain_selector.handle_add_toolchain(&AddToolchain, window, cx); + }); + }); } fn toggle( @@ -105,35 +633,100 @@ impl ToolchainSelector { window: &mut Window, cx: &mut Context, ) -> Self { - let toolchain_selector = cx.entity().downgrade(); - let picker = cx.new(|cx| { - let delegate = ToolchainSelectorDelegate::new( - active_toolchain, - toolchain_selector, - workspace, - worktree_id, - worktree_root, - project, - relative_path, - language_name, + let language_registry = project.read(cx).languages().clone(); + cx.spawn({ + let language_name = language_name.clone(); + async move |this, cx| { + let language = language_registry + .language_for_name(&language_name.0) + .await + .ok(); + this.update(cx, |this, cx| { + this.language = language; + cx.notify(); + }) + .ok(); + } + }) + .detach(); + let project_clone = project.clone(); + let language_name_clone = language_name.clone(); + let relative_path_clone = relative_path.clone(); + + let create_search_state = Arc::new(move |window: &mut Window, cx: &mut Context| { + let toolchain_selector = cx.entity().downgrade(); + let picker = cx.new(|cx| { + let delegate = ToolchainSelectorDelegate::new( + active_toolchain.clone(), + toolchain_selector, + workspace.clone(), + worktree_id, + worktree_root.clone(), + project_clone.clone(), + relative_path_clone.clone(), + language_name_clone.clone(), + window, + cx, + ); + Picker::uniform_list(delegate, window, cx) + }); + let picker_focus_handle = picker.focus_handle(cx); + picker.update(cx, |picker, _| { + picker.delegate.focus_handle = picker_focus_handle.clone(); + }); + SearchState { picker } + }); + + Self { + state: State::Search(create_search_state(window, cx)), + create_search_state, + language: None, + project, + language_name, + worktree_id, + relative_path, + } + } + + fn handle_add_toolchain( + &mut self, + _: &AddToolchain, + window: &mut Window, + cx: &mut Context, + ) { + if matches!(self.state, State::Search(_)) { + self.state = State::AddToolchain(AddToolchainState::new( + self.project.clone(), + self.language_name.clone(), + ProjectPath { + worktree_id: self.worktree_id, + path: self.relative_path.clone(), + }, window, cx, - ); - Picker::uniform_list(delegate, window, cx) - }); - Self { picker } + )); + self.state.focus_handle(cx).focus(window); + cx.notify(); + } } } impl Render for ToolchainSelector { - fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { - v_flex().w(rems(34.)).child(self.picker.clone()) + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let mut key_context = KeyContext::new_with_defaults(); + key_context.add("ToolchainSelector"); + + v_flex() + .key_context(key_context) + .w(rems(34.)) + .on_action(cx.listener(Self::handle_add_toolchain)) + .child(self.state.clone().render(window, cx)) } } impl Focusable for ToolchainSelector { fn focus_handle(&self, cx: &App) -> FocusHandle { - self.picker.focus_handle(cx) + self.state.focus_handle(cx) } } @@ -142,7 +735,7 @@ impl ModalView for ToolchainSelector {} pub struct ToolchainSelectorDelegate { toolchain_selector: WeakEntity, - candidates: ToolchainList, + candidates: Arc<[(Toolchain, Option)]>, matches: Vec, selected_index: usize, workspace: WeakEntity, @@ -150,6 +743,9 @@ pub struct ToolchainSelectorDelegate { worktree_abs_path_root: Arc, relative_path: Arc, placeholder_text: Arc, + add_toolchain_text: Arc, + project: Entity, + focus_handle: FocusHandle, _fetch_candidates_task: Task>, } @@ -166,19 +762,33 @@ impl ToolchainSelectorDelegate { window: &mut Window, cx: &mut Context>, ) -> Self { + let _project = project.clone(); + let _fetch_candidates_task = cx.spawn_in(window, { async move |this, cx| { - let term = project + let meta = _project .read_with(cx, |this, _| { - Project::toolchain_term(this.languages().clone(), language_name.clone()) + Project::toolchain_metadata(this.languages().clone(), language_name.clone()) }) .ok()? .await?; let relative_path = this - .read_with(cx, |this, _| this.delegate.relative_path.clone()) + .update(cx, |this, cx| { + this.delegate.add_toolchain_text = format!( + "Add {}", + meta.term.as_ref().to_case(convert_case::Case::Title) + ) + .into(); + cx.notify(); + this.delegate.relative_path.clone() + }) .ok()?; - let (available_toolchains, relative_path) = project + let Toolchains { + toolchains: available_toolchains, + root_path: relative_path, + user_toolchains, + } = _project .update(cx, |this, cx| { this.available_toolchains( ProjectPath { @@ -200,7 +810,7 @@ impl ToolchainSelectorDelegate { } }; let placeholder_text = - format!("Select a {} for {pretty_path}…", term.to_lowercase(),).into(); + format!("Select a {} for {pretty_path}…", meta.term.to_lowercase(),).into(); let _ = this.update_in(cx, move |this, window, cx| { this.delegate.relative_path = relative_path; this.delegate.placeholder_text = placeholder_text; @@ -208,15 +818,27 @@ impl ToolchainSelectorDelegate { }); let _ = this.update_in(cx, move |this, window, cx| { - this.delegate.candidates = available_toolchains; + this.delegate.candidates = user_toolchains + .into_iter() + .flat_map(|(scope, toolchains)| { + toolchains + .into_iter() + .map(move |toolchain| (toolchain, Some(scope.clone()))) + }) + .chain( + available_toolchains + .toolchains + .into_iter() + .map(|toolchain| (toolchain, None)), + ) + .collect(); if let Some(active_toolchain) = active_toolchain && let Some(position) = this .delegate .candidates - .toolchains .iter() - .position(|toolchain| *toolchain == active_toolchain) + .position(|(toolchain, _)| *toolchain == active_toolchain) { this.delegate.set_selected_index(position, window, cx); } @@ -238,6 +860,9 @@ impl ToolchainSelectorDelegate { placeholder_text, relative_path, _fetch_candidates_task, + project, + focus_handle: cx.focus_handle(), + add_toolchain_text: Arc::from("Add Toolchain"), } } fn relativize_path(path: SharedString, worktree_root: &Path) -> SharedString { @@ -263,7 +888,7 @@ impl PickerDelegate for ToolchainSelectorDelegate { fn confirm(&mut self, _: bool, window: &mut Window, cx: &mut Context>) { if let Some(string_match) = self.matches.get(self.selected_index) { - let toolchain = self.candidates.toolchains[string_match.candidate_id].clone(); + let (toolchain, _) = self.candidates[string_match.candidate_id].clone(); if let Some(workspace_id) = self .workspace .read_with(cx, |this, _| this.database_id()) @@ -330,11 +955,11 @@ impl PickerDelegate for ToolchainSelectorDelegate { cx.spawn_in(window, async move |this, cx| { let matches = if query.is_empty() { candidates - .toolchains .into_iter() .enumerate() - .map(|(index, candidate)| { - let path = Self::relativize_path(candidate.path, &worktree_root_path); + .map(|(index, (candidate, _))| { + let path = + Self::relativize_path(candidate.path.clone(), &worktree_root_path); let string = format!("{}{}", candidate.name, path); StringMatch { candidate_id: index, @@ -346,11 +971,11 @@ impl PickerDelegate for ToolchainSelectorDelegate { .collect() } else { let candidates = candidates - .toolchains .into_iter() .enumerate() - .map(|(candidate_id, toolchain)| { - let path = Self::relativize_path(toolchain.path, &worktree_root_path); + .map(|(candidate_id, (toolchain, _))| { + let path = + Self::relativize_path(toolchain.path.clone(), &worktree_root_path); let string = format!("{}{}", toolchain.name, path); StringMatchCandidate::new(candidate_id, &string) }) @@ -383,11 +1008,11 @@ impl PickerDelegate for ToolchainSelectorDelegate { &self, ix: usize, selected: bool, - _window: &mut Window, - _: &mut Context>, + _: &mut Window, + cx: &mut Context>, ) -> Option { let mat = &self.matches[ix]; - let toolchain = &self.candidates.toolchains[mat.candidate_id]; + let (toolchain, scope) = &self.candidates[mat.candidate_id]; let label = toolchain.name.clone(); let path = Self::relativize_path(toolchain.path.clone(), &self.worktree_abs_path_root); @@ -399,8 +1024,9 @@ impl PickerDelegate for ToolchainSelectorDelegate { path_highlights.iter_mut().for_each(|index| { *index -= label.len(); }); + let id: SharedString = format!("toolchain-{ix}",).into(); Some( - ListItem::new(ix) + ListItem::new(id) .inset(true) .spacing(ListItemSpacing::Sparse) .toggle_state(selected) @@ -409,7 +1035,89 @@ impl PickerDelegate for ToolchainSelectorDelegate { HighlightedLabel::new(path, path_highlights) .size(LabelSize::Small) .color(Color::Muted), - ), + ) + .when_some(scope.as_ref(), |this, scope| { + let id: SharedString = format!( + "delete-custom-toolchain-{}-{}", + toolchain.name, toolchain.path + ) + .into(); + let toolchain = toolchain.clone(); + let scope = scope.clone(); + + this.end_slot(IconButton::new(id, IconName::Trash)) + .on_click(cx.listener(move |this, _, _, cx| { + this.delegate.project.update(cx, |this, cx| { + this.remove_toolchain(toolchain.clone(), scope.clone(), cx) + }); + + this.delegate.matches.retain_mut(|m| { + if m.candidate_id == ix { + return false; + } else if m.candidate_id > ix { + m.candidate_id -= 1; + } + true + }); + + this.delegate.candidates = this + .delegate + .candidates + .iter() + .enumerate() + .filter_map(|(i, toolchain)| (ix != i).then_some(toolchain.clone())) + .collect(); + + if this.delegate.selected_index >= ix { + this.delegate.selected_index = + this.delegate.selected_index.saturating_sub(1); + } + cx.stop_propagation(); + cx.notify(); + })) + }), + ) + } + fn render_footer( + &self, + _window: &mut Window, + cx: &mut Context>, + ) -> Option { + Some( + v_flex() + .rounded_b_md() + .child(Divider::horizontal()) + .child( + h_flex() + .p_1p5() + .gap_0p5() + .justify_end() + .child( + Button::new("xd", self.add_toolchain_text.clone()) + .key_binding(KeyBinding::for_action_in( + &AddToolchain, + &self.focus_handle, + _window, + cx, + )) + .on_click(|_, window, cx| { + window.dispatch_action(Box::new(AddToolchain), cx) + }), + ) + .child( + Button::new("select", "Select") + .key_binding(KeyBinding::for_action_in( + &menu::Confirm, + &self.focus_handle, + _window, + cx, + )) + .on_click(|_, window, cx| { + window.dispatch_action(menu::Confirm.boxed_clone(), cx) + }), + ), + ) + .into_any_element(), ) } } diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index ef5a86a2762510fbea6f6a1a5172953a0ea20f7d..d674f6dd4d56ba95a664ac7d9e4ebf25969e2125 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -9,7 +9,7 @@ use std::{ }; use anyhow::{Context as _, Result, bail}; -use collections::HashMap; +use collections::{HashMap, IndexSet}; use db::{ query, sqlez::{connection::Connection, domain::Domain}, @@ -18,16 +18,16 @@ use db::{ use gpui::{Axis, Bounds, Task, WindowBounds, WindowId, point, size}; use project::debugger::breakpoint_store::{BreakpointState, SourceBreakpoint}; -use language::{LanguageName, Toolchain}; +use language::{LanguageName, Toolchain, ToolchainScope}; use project::WorktreeId; use remote::{RemoteConnectionOptions, SshConnectionOptions, WslConnectionOptions}; use sqlez::{ bindable::{Bind, Column, StaticColumnCount}, - statement::{SqlType, Statement}, + statement::Statement, thread_safe_connection::ThreadSafeConnection, }; -use ui::{App, px}; +use ui::{App, SharedString, px}; use util::{ResultExt, maybe}; use uuid::Uuid; @@ -169,6 +169,7 @@ impl From for BreakpointStateWrapper<'static> { BreakpointStateWrapper(Cow::Owned(kind)) } } + impl StaticColumnCount for BreakpointStateWrapper<'_> { fn column_count() -> usize { 1 @@ -193,11 +194,6 @@ impl Column for BreakpointStateWrapper<'_> { } } -/// This struct is used to implement traits on Vec -#[derive(Debug)] -#[allow(dead_code)] -struct Breakpoints(Vec); - impl sqlez::bindable::StaticColumnCount for Breakpoint { fn column_count() -> usize { // Position, log message, condition message, and hit condition message @@ -246,26 +242,6 @@ impl Column for Breakpoint { } } -impl Column for Breakpoints { - fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { - let mut breakpoints = Vec::new(); - let mut index = start_index; - - loop { - match statement.column_type(index) { - Ok(SqlType::Null) => break, - _ => { - let (breakpoint, next_index) = Breakpoint::column(statement, index)?; - - breakpoints.push(breakpoint); - index = next_index; - } - } - } - Ok((Breakpoints(breakpoints), index)) - } -} - #[derive(Clone, Debug, PartialEq)] struct SerializedPixels(gpui::Pixels); impl sqlez::bindable::StaticColumnCount for SerializedPixels {} @@ -711,6 +687,18 @@ impl Domain for WorkspaceDb { CREATE UNIQUE INDEX ix_workspaces_location ON workspaces(remote_connection_id, paths); ), + sql!(CREATE TABLE user_toolchains ( + remote_connection_id INTEGER, + workspace_id INTEGER NOT NULL, + worktree_id INTEGER NOT NULL, + relative_worktree_path TEXT NOT NULL, + language_name TEXT NOT NULL, + name TEXT NOT NULL, + path TEXT NOT NULL, + raw_json TEXT NOT NULL, + + PRIMARY KEY (workspace_id, worktree_id, relative_worktree_path, language_name, name, path, raw_json) + ) STRICT;), ]; // Allow recovering from bad migration that was initially shipped to nightly @@ -831,6 +819,7 @@ impl WorkspaceDb { session_id: None, breakpoints: self.breakpoints(workspace_id), window_id, + user_toolchains: self.user_toolchains(workspace_id, remote_connection_id), }) } @@ -880,6 +869,73 @@ impl WorkspaceDb { } } + fn user_toolchains( + &self, + workspace_id: WorkspaceId, + remote_connection_id: Option, + ) -> BTreeMap> { + type RowKind = (WorkspaceId, u64, String, String, String, String, String); + + let toolchains: Vec = self + .select_bound(sql! { + SELECT workspace_id, worktree_id, relative_worktree_path, + language_name, name, path, raw_json + FROM user_toolchains WHERE remote_connection_id IS ?1 AND ( + workspace_id IN (0, ?2) + ) + }) + .and_then(|mut statement| { + (statement)((remote_connection_id.map(|id| id.0), workspace_id)) + }) + .unwrap_or_default(); + let mut ret = BTreeMap::<_, IndexSet<_>>::default(); + + for ( + _workspace_id, + worktree_id, + relative_worktree_path, + language_name, + name, + path, + raw_json, + ) in toolchains + { + // INTEGER's that are primary keys (like workspace ids, remote connection ids and such) start at 1, so we're safe to + let scope = if _workspace_id == WorkspaceId(0) { + debug_assert_eq!(worktree_id, u64::MAX); + debug_assert_eq!(relative_worktree_path, String::default()); + ToolchainScope::Global + } else { + debug_assert_eq!(workspace_id, _workspace_id); + debug_assert_eq!( + worktree_id == u64::MAX, + relative_worktree_path == String::default() + ); + + if worktree_id != u64::MAX && relative_worktree_path != String::default() { + ToolchainScope::Subproject( + WorktreeId::from_usize(worktree_id as usize), + Arc::from(relative_worktree_path.as_ref()), + ) + } else { + ToolchainScope::Project + } + }; + let Ok(as_json) = serde_json::from_str(&raw_json) else { + continue; + }; + let toolchain = Toolchain { + name: SharedString::from(name), + path: SharedString::from(path), + language_name: LanguageName::from_proto(language_name), + as_json, + }; + ret.entry(scope).or_default().insert(toolchain); + } + + ret + } + /// Saves a workspace using the worktree roots. Will garbage collect any workspaces /// that used this workspace previously pub(crate) async fn save_workspace(&self, workspace: SerializedWorkspace) { @@ -935,6 +991,22 @@ impl WorkspaceDb { } } } + for (scope, toolchains) in workspace.user_toolchains { + for toolchain in toolchains { + let query = sql!(INSERT OR REPLACE INTO user_toolchains(remote_connection_id, workspace_id, worktree_id, relative_worktree_path, language_name, name, path, raw_json) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8)); + let (workspace_id, worktree_id, relative_worktree_path) = match scope { + ToolchainScope::Subproject(worktree_id, ref path) => (Some(workspace.id), Some(worktree_id), Some(path.to_string_lossy().into_owned())), + ToolchainScope::Project => (Some(workspace.id), None, None), + ToolchainScope::Global => (None, None, None), + }; + let args = (remote_connection_id, workspace_id.unwrap_or(WorkspaceId(0)), worktree_id.map_or(usize::MAX,|id| id.to_usize()), relative_worktree_path.unwrap_or_default(), + toolchain.language_name.as_ref().to_owned(), toolchain.name.to_string(), toolchain.path.to_string(), toolchain.as_json.to_string()); + if let Err(err) = conn.exec_bound(query)?(args) { + log::error!("{err}"); + continue; + } + } + } conn.exec_bound(sql!( DELETE @@ -1797,6 +1869,7 @@ mod tests { }, session_id: None, window_id: None, + user_toolchains: Default::default(), }; db.save_workspace(workspace.clone()).await; @@ -1917,6 +1990,7 @@ mod tests { }, session_id: None, window_id: None, + user_toolchains: Default::default(), }; db.save_workspace(workspace.clone()).await; @@ -1950,6 +2024,7 @@ mod tests { breakpoints: collections::BTreeMap::default(), session_id: None, window_id: None, + user_toolchains: Default::default(), }; db.save_workspace(workspace_without_breakpoint.clone()) @@ -2047,6 +2122,7 @@ mod tests { breakpoints: Default::default(), session_id: None, window_id: None, + user_toolchains: Default::default(), }; let workspace_2 = SerializedWorkspace { @@ -2061,6 +2137,7 @@ mod tests { breakpoints: Default::default(), session_id: None, window_id: None, + user_toolchains: Default::default(), }; db.save_workspace(workspace_1.clone()).await; @@ -2167,6 +2244,7 @@ mod tests { centered_layout: false, session_id: None, window_id: Some(999), + user_toolchains: Default::default(), }; db.save_workspace(workspace.clone()).await; @@ -2200,6 +2278,7 @@ mod tests { centered_layout: false, session_id: None, window_id: Some(1), + user_toolchains: Default::default(), }; let mut workspace_2 = SerializedWorkspace { @@ -2214,6 +2293,7 @@ mod tests { breakpoints: Default::default(), session_id: None, window_id: Some(2), + user_toolchains: Default::default(), }; db.save_workspace(workspace_1.clone()).await; @@ -2255,6 +2335,7 @@ mod tests { centered_layout: false, session_id: None, window_id: Some(3), + user_toolchains: Default::default(), }; db.save_workspace(workspace_3.clone()).await; @@ -2292,6 +2373,7 @@ mod tests { breakpoints: Default::default(), session_id: Some("session-id-1".to_owned()), window_id: Some(10), + user_toolchains: Default::default(), }; let workspace_2 = SerializedWorkspace { @@ -2306,6 +2388,7 @@ mod tests { breakpoints: Default::default(), session_id: Some("session-id-1".to_owned()), window_id: Some(20), + user_toolchains: Default::default(), }; let workspace_3 = SerializedWorkspace { @@ -2320,6 +2403,7 @@ mod tests { breakpoints: Default::default(), session_id: Some("session-id-2".to_owned()), window_id: Some(30), + user_toolchains: Default::default(), }; let workspace_4 = SerializedWorkspace { @@ -2334,6 +2418,7 @@ mod tests { breakpoints: Default::default(), session_id: None, window_id: None, + user_toolchains: Default::default(), }; let connection_id = db @@ -2359,6 +2444,7 @@ mod tests { breakpoints: Default::default(), session_id: Some("session-id-2".to_owned()), window_id: Some(50), + user_toolchains: Default::default(), }; let workspace_6 = SerializedWorkspace { @@ -2373,6 +2459,7 @@ mod tests { centered_layout: false, session_id: Some("session-id-3".to_owned()), window_id: Some(60), + user_toolchains: Default::default(), }; db.save_workspace(workspace_1.clone()).await; @@ -2424,6 +2511,7 @@ mod tests { centered_layout: false, session_id: None, window_id: None, + user_toolchains: Default::default(), } } @@ -2458,6 +2546,7 @@ mod tests { session_id: Some("one-session".to_owned()), breakpoints: Default::default(), window_id: Some(window_id), + user_toolchains: Default::default(), }) .collect::>(); @@ -2555,6 +2644,7 @@ mod tests { session_id: Some("one-session".to_owned()), breakpoints: Default::default(), window_id: Some(window_id), + user_toolchains: Default::default(), }) .collect::>(); diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index 005a1ba2347f8ac3847199ad4564d8ca45420f4a..08a2f2e38dd142848f8a9c07652e147b58bee233 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -5,12 +5,14 @@ use crate::{ }; use anyhow::Result; use async_recursion::async_recursion; +use collections::IndexSet; use db::sqlez::{ bindable::{Bind, Column, StaticColumnCount}, statement::Statement, }; use gpui::{AsyncWindowContext, Entity, WeakEntity}; +use language::{Toolchain, ToolchainScope}; use project::{Project, debugger::breakpoint_store::SourceBreakpoint}; use remote::RemoteConnectionOptions; use std::{ @@ -57,6 +59,7 @@ pub(crate) struct SerializedWorkspace { pub(crate) docks: DockStructure, pub(crate) session_id: Option, pub(crate) breakpoints: BTreeMap, Vec>, + pub(crate) user_toolchains: BTreeMap>, pub(crate) window_id: Option, } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 6b4e7c1731b23e2e35086431d4d83bda4958d33f..58373b5d1a30a431106282d26589aa09694d3382 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -73,6 +73,7 @@ use postage::stream::Stream; use project::{ DirectoryLister, Project, ProjectEntryId, ProjectPath, ResolvedPath, Worktree, WorktreeId, debugger::{breakpoint_store::BreakpointStoreEvent, session::ThreadStatus}, + toolchain_store::ToolchainStoreEvent, }; use remote::{RemoteClientDelegate, RemoteConnectionOptions, remote_client::ConnectionIdentifier}; use schemars::JsonSchema; @@ -1275,6 +1276,19 @@ impl Workspace { }, ) .detach(); + if let Some(toolchain_store) = project.read(cx).toolchain_store() { + cx.subscribe_in( + &toolchain_store, + window, + |workspace, _, event, window, cx| match event { + ToolchainStoreEvent::CustomToolchainsModified => { + workspace.serialize_workspace(window, cx); + } + _ => {} + }, + ) + .detach(); + } cx.on_focus_lost(window, |this, window, cx| { let focus_handle = this.focus_handle(cx); @@ -1565,6 +1579,16 @@ impl Workspace { })? .await; } + if let Some(workspace) = serialized_workspace.as_ref() { + project_handle.update(cx, |this, cx| { + for (scope, toolchains) in &workspace.user_toolchains { + for toolchain in toolchains { + this.add_toolchain(toolchain.clone(), scope.clone(), cx); + } + } + })?; + } + let window = if let Some(window) = requesting_window { let centered_layout = serialized_workspace .as_ref() @@ -5240,10 +5264,16 @@ impl Workspace { .read(cx) .all_source_breakpoints(cx) }); + let user_toolchains = self + .project + .read(cx) + .user_toolchains(cx) + .unwrap_or_default(); let center_group = build_serialized_pane_group(&self.center.root, window, cx); let docks = build_serialized_docks(self, window, cx); let window_bounds = Some(SerializedWindowBounds(window.window_bounds())); + let serialized_workspace = SerializedWorkspace { id: database_id, location, @@ -5256,6 +5286,7 @@ impl Workspace { session_id: self.session_id.clone(), breakpoints, window_id: Some(window.window_handle().window_id().as_u64()), + user_toolchains, }; window.spawn(cx, async move |_| { From a6a111cadd34d79ef55d5b70d563178bbaffd965 Mon Sep 17 00:00:00 2001 From: chbk Date: Sat, 6 Sep 2025 01:36:36 +0200 Subject: [PATCH 045/109] Highlight labels in Go (#37673) Release Notes: - Highlight labels in Go | Zed 0.202.7 | With this PR | | --- | --- | | go-0 202 7 | go-pr | --- crates/languages/src/go/highlights.scm | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/languages/src/go/highlights.scm b/crates/languages/src/go/highlights.scm index bb0eaab88a1c0c79a04496d453831cf396d706b6..5d630cbdfc746b56320cd5083222897d84dbf528 100644 --- a/crates/languages/src/go/highlights.scm +++ b/crates/languages/src/go/highlights.scm @@ -4,6 +4,8 @@ (field_identifier) @property (package_identifier) @namespace +(label_name) @label + (keyed_element . (literal_element From 23dc1f5ea4061591ed44121e8a4ba191f7e7b647 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 5 Sep 2025 18:09:50 -0700 Subject: [PATCH 046/109] Disable foreign keys in sqlite when running migrations (#37572) Closes #37473 ### Background Previously, we enabled foreign keys at all times for our sqlite database that we use for client-side state. The problem with this is that In sqlite, `alter table` is somewhat limited, so for many migrations, you must *recreate* the table: create a new table called e.g. `workspace__2`, then copy all of the data from `workspaces` into `workspace__2`, then delete the old `workspaces` table and rename `workspaces__2` to `workspaces`. The way foreign keys work in sqlite, when we delete the old table, all of its associated records in other tables will be deleted due to `on delete cascade` clauses. Unfortunately, one of the types of associated records that can be deleted are `editors`, which sometimes store unsaved text. It is very bad to delete these records, as they are the *only* place that this unsaved text is stored. This has already happened multiple times as we have migrated tables as we develop Zed, but I caused it to happened again in https://github.com/zed-industries/zed/pull/36714. ### The Fix The Sqlite docs recommend a multi-step approach to migrations where you: * disable foreign keys * start a transaction * create a new table * populate the new table with data from the old table * delete the old table * rename the new table to the old name * run a foreign key check * if it passes, commit the transaction * enable foreign keys In this PR, I've adjusted our sqlite migration code path to follow this pattern more closely. Specifically, we disable foreign key checks before running migrations, run a foreign key check before committing, and then enable foreign key checks after the migrations are done. In addition, I've added a generic query that we run *before* running the foreign key check that explicitly deletes any rows that have dangling foreign keys. This way, we avoid failing the migration (and breaking the app) if a migration deletes data that *does* cause associated records to need to be deleted. But now, in the common case where we migrate old data in the new table and keep the ids, all of the associated data will be preserved. Release Notes: - Fixed a bug where workspace state would be lost when upgrading from Zed 0.201.x. or below. --- Cargo.lock | 1 + crates/sqlez/Cargo.toml | 1 + crates/sqlez/src/migrations.rs | 51 ++++++++++++++++++++-- crates/sqlez/src/thread_safe_connection.rs | 11 +++++ crates/workspace/src/persistence.rs | 3 ++ 5 files changed, 64 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 295c3a83c52e3b355a8e43e9d36c09149fdc694f..f4c94f8078b1ab392ed1a50e15c71dab1921f0a3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -15347,6 +15347,7 @@ dependencies = [ "futures 0.3.31", "indoc", "libsqlite3-sys", + "log", "parking_lot", "smol", "sqlformat", diff --git a/crates/sqlez/Cargo.toml b/crates/sqlez/Cargo.toml index 16a3adebae24e0573f9e7ada18bc9259ff588ad1..6eb75aa171979283325d22300f95d584cee2cffb 100644 --- a/crates/sqlez/Cargo.toml +++ b/crates/sqlez/Cargo.toml @@ -14,6 +14,7 @@ collections.workspace = true futures.workspace = true indoc.workspace = true libsqlite3-sys.workspace = true +log.workspace = true parking_lot.workspace = true smol.workspace = true sqlformat.workspace = true diff --git a/crates/sqlez/src/migrations.rs b/crates/sqlez/src/migrations.rs index 2429ddeb4127591b56fb74a9c84884d9dc5f378f..567d82f9afe22ea4ab126c0989891c5d603879fd 100644 --- a/crates/sqlez/src/migrations.rs +++ b/crates/sqlez/src/migrations.rs @@ -59,6 +59,7 @@ impl Connection { let mut store_completed_migration = self .exec_bound("INSERT INTO migrations (domain, step, migration) VALUES (?, ?, ?)")?; + let mut did_migrate = false; for (index, migration) in migrations.iter().enumerate() { let migration = sqlformat::format(migration, &sqlformat::QueryParams::None, Default::default()); @@ -70,9 +71,7 @@ impl Connection { &sqlformat::QueryParams::None, Default::default(), ); - if completed_migration == migration - || migration.trim().starts_with("-- ALLOW_MIGRATION_CHANGE") - { + if completed_migration == migration { // Migration already run. Continue continue; } else if should_allow_migration_change(index, &completed_migration, &migration) @@ -91,12 +90,58 @@ impl Connection { } self.eager_exec(&migration)?; + did_migrate = true; store_completed_migration((domain, index, migration))?; } + if did_migrate { + self.delete_rows_with_orphaned_foreign_key_references()?; + self.exec("PRAGMA foreign_key_check;")?()?; + } + Ok(()) }) } + + /// Delete any rows that were orphaned by a migration. This is needed + /// because we disable foreign key constraints during migrations, so + /// that it's possible to re-create a table with the same name, without + /// deleting all associated data. + fn delete_rows_with_orphaned_foreign_key_references(&self) -> Result<()> { + let foreign_key_info: Vec<(String, String, String, String)> = self.select( + r#" + SELECT DISTINCT + schema.name as child_table, + foreign_keys.[from] as child_key, + foreign_keys.[table] as parent_table, + foreign_keys.[to] as parent_key + FROM sqlite_schema schema + JOIN pragma_foreign_key_list(schema.name) foreign_keys + WHERE + schema.type = 'table' AND + schema.name NOT LIKE "sqlite_%" + "#, + )?()?; + + if !foreign_key_info.is_empty() { + log::info!( + "Found {} foreign key relationships to check", + foreign_key_info.len() + ); + } + + for (child_table, child_key, parent_table, parent_key) in foreign_key_info { + self.exec(&format!( + " + DELETE FROM {child_table} + WHERE {child_key} IS NOT NULL and {child_key} NOT IN + (SELECT {parent_key} FROM {parent_table}) + " + ))?()?; + } + + Ok(()) + } } #[cfg(test)] diff --git a/crates/sqlez/src/thread_safe_connection.rs b/crates/sqlez/src/thread_safe_connection.rs index 58d3afe78fb4d8b211c48c0ae1f9f72af74ad5c1..482905ac817bf94fcb64cb858b784c94283b686c 100644 --- a/crates/sqlez/src/thread_safe_connection.rs +++ b/crates/sqlez/src/thread_safe_connection.rs @@ -95,6 +95,14 @@ impl ThreadSafeConnectionBuilder { let mut migration_result = anyhow::Result::<()>::Err(anyhow::anyhow!("Migration never run")); + let foreign_keys_enabled: bool = + connection.select_row::("PRAGMA foreign_keys")?() + .unwrap_or(None) + .map(|enabled| enabled != 0) + .unwrap_or(false); + + connection.exec("PRAGMA foreign_keys = OFF;")?()?; + for _ in 0..MIGRATION_RETRIES { migration_result = connection .with_savepoint("thread_safe_multi_migration", || M::migrate(connection)); @@ -104,6 +112,9 @@ impl ThreadSafeConnectionBuilder { } } + if foreign_keys_enabled { + connection.exec("PRAGMA foreign_keys = ON;")?()?; + } migration_result }) .await?; diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index d674f6dd4d56ba95a664ac7d9e4ebf25969e2125..797c4796830ff767a0213058c417bb3a764c6bec 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -699,6 +699,9 @@ impl Domain for WorkspaceDb { PRIMARY KEY (workspace_id, worktree_id, relative_worktree_path, language_name, name, path, raw_json) ) STRICT;), + sql!( + DROP TABLE ssh_connections; + ), ]; // Allow recovering from bad migration that was initially shipped to nightly From 47a475681f43718d750b484af9c3cc189c1ac58e Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Fri, 5 Sep 2025 22:22:55 -0600 Subject: [PATCH 047/109] Optimize Chunks::seek when offset is in current chunk (#37659) Release Notes: - N/A --- crates/rope/src/rope.rs | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index 33886854220862c60153dc3ea1f02180c62212a3..9185b5baa300af93ec7ceb3e951ae6ba71772721 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -639,18 +639,20 @@ impl<'a> Chunks<'a> { pub fn seek(&mut self, mut offset: usize) { offset = offset.clamp(self.range.start, self.range.end); - let bias = if self.reversed { - Bias::Left + if self.reversed { + if offset > self.chunks.end() { + self.chunks.seek_forward(&offset, Bias::Left); + } else if offset <= *self.chunks.start() { + self.chunks.seek(&offset, Bias::Left); + } } else { - Bias::Right + if offset >= self.chunks.end() { + self.chunks.seek_forward(&offset, Bias::Right); + } else if offset < *self.chunks.start() { + self.chunks.seek(&offset, Bias::Right); + } }; - if offset >= self.chunks.end() { - self.chunks.seek_forward(&offset, bias); - } else { - self.chunks.seek(&offset, bias); - } - self.offset = offset; } From 8c9442ad11691004278607d83a4205807c644e82 Mon Sep 17 00:00:00 2001 From: Umesh Yadav <23421535+imumesh18@users.noreply.github.com> Date: Sat, 6 Sep 2025 10:46:08 +0530 Subject: [PATCH 048/109] language_models: Skip empty delta text content in OpenAI and OpenAI compatible provider (#37626) Closes #37302 Related: #37614 In case of open_ai_compatible providers like Zhipu AI and z.ai they return empty content along with usage data. below is the example json captured from z.ai. We now ignore empty content returned by providers now to avoid this issue where we would return the same empty content back to provider which would error out. ``` OpenAI Stream Response JSON: { "id": "2025090518465610d80dc21e66426d", "created": 1757069216, "model": "glm-4.5", "choices": [ { "index": 0, "finish_reason": "tool_calls", "delta": { "role": "assistant", "content": "" } } ], "usage": { "prompt_tokens": 7882, "completion_tokens": 150, "total_tokens": 8032, "prompt_tokens_details": { "cached_tokens": 7881 } } } ``` Release Notes: - Skip empty delta text content in OpenAI and OpenAI compatible provider Signed-off-by: Umesh Yadav --- crates/language_models/src/provider/open_ai.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index 4348fd42110b2554de801b812a7b001dc49ad06e..cfd43033515e2c3527c8d0dfbf1267fb96793819 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -586,7 +586,9 @@ impl OpenAiEventMapper { }; if let Some(content) = choice.delta.content.clone() { - events.push(Ok(LanguageModelCompletionEvent::Text(content))); + if !content.is_empty() { + events.push(Ok(LanguageModelCompletionEvent::Text(content))); + } } if let Some(tool_calls) = choice.delta.tool_calls.as_ref() { From 1f37fbd0511fcafd6b39e9de4d6d6db7244453dd Mon Sep 17 00:00:00 2001 From: Umesh Yadav <23421535+imumesh18@users.noreply.github.com> Date: Sat, 6 Sep 2025 11:12:15 +0530 Subject: [PATCH 049/109] language_models: Use `/models/user` for fetching OpenRouter models (#37534) This PR switches the OpenRouter integration from fetching all models to fetching only the models specified in the user's account preferences. This will help improve the experience **The Problem** The previous implementation used the `/models` endpoint, which returned an exhaustive list of all models supported by OpenRouter. This resulted in a long and cluttered model selection dropdown in Zed, making it difficult for users to find the models they actually use. **The Solution** We now use the `/models/user` endpoint. This API call returns a curated list based on the models and providers the user has selected in their [OpenRouter dashboard](https://openrouter.ai/models). Ref: [OpenRouter API Docs for User-Filtered Models](https://openrouter.ai/docs/api-reference/list-models-filtered-by-user-provider-preferences) Release Notes: - language_models: Support OpenRouter user preferences for available models --- .../src/provider/open_router.rs | 25 +++++++++++++++---- crates/open_router/src/open_router.rs | 8 ++++-- 2 files changed, 26 insertions(+), 7 deletions(-) diff --git a/crates/language_models/src/provider/open_router.rs b/crates/language_models/src/provider/open_router.rs index 9138f6b82e7e74e9e6a7468306b2f5cf6768987e..f73a97e6426f80e1ad8d1b8214e16bf361d0f0ce 100644 --- a/crates/language_models/src/provider/open_router.rs +++ b/crates/language_models/src/provider/open_router.rs @@ -92,7 +92,7 @@ pub struct State { api_key_from_env: bool, http_client: Arc, available_models: Vec, - fetch_models_task: Option>>, + fetch_models_task: Option>>, settings: OpenRouterSettings, _subscription: Subscription, } @@ -178,20 +178,35 @@ impl State { }) } - fn fetch_models(&mut self, cx: &mut Context) -> Task> { + fn fetch_models( + &mut self, + cx: &mut Context, + ) -> Task> { let settings = &AllLanguageModelSettings::get_global(cx).open_router; let http_client = self.http_client.clone(); let api_url = settings.api_url.clone(); - + let Some(api_key) = self.api_key.clone() else { + return Task::ready(Err(LanguageModelCompletionError::NoApiKey { + provider: PROVIDER_NAME, + })); + }; cx.spawn(async move |this, cx| { - let models = list_models(http_client.as_ref(), &api_url) + let models = list_models(http_client.as_ref(), &api_url, &api_key) .await - .map_err(|e| anyhow::anyhow!("OpenRouter error: {:?}", e))?; + .map_err(|e| { + LanguageModelCompletionError::Other(anyhow::anyhow!( + "OpenRouter error: {:?}", + e + )) + })?; this.update(cx, |this, cx| { this.available_models = models; cx.notify(); }) + .map_err(|e| LanguageModelCompletionError::Other(e))?; + + Ok(()) }) } diff --git a/crates/open_router/src/open_router.rs b/crates/open_router/src/open_router.rs index dfaa49746d093810924f744cd1aeb3e8747ddb00..cbc6c243d87c8f9ea3d0186dbecb8f0ac2e10a90 100644 --- a/crates/open_router/src/open_router.rs +++ b/crates/open_router/src/open_router.rs @@ -529,12 +529,16 @@ pub async fn stream_completion( pub async fn list_models( client: &dyn HttpClient, api_url: &str, + api_key: &str, ) -> Result, OpenRouterError> { - let uri = format!("{api_url}/models"); + let uri = format!("{api_url}/models/user"); let request_builder = HttpRequest::builder() .method(Method::GET) .uri(uri) - .header("Accept", "application/json"); + .header("Accept", "application/json") + .header("Authorization", format!("Bearer {}", api_key)) + .header("HTTP-Referer", "https://zed.dev") + .header("X-Title", "Zed Editor"); let request = request_builder .body(AsyncBody::default()) From 777ce7cc97b161feef64f67481b76415a2d848b4 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Sat, 6 Sep 2025 10:37:59 +0300 Subject: [PATCH 050/109] Fixed LSP binary info not being shown in full (#37682) Follow-up of https://github.com/zed-industries/zed/pull/37083 Closes https://github.com/zed-industries/zed/issues/37677 Release Notes: - Fixed LSP binary info not being shown in full --- crates/language_tools/src/lsp_log_view.rs | 18 ++++++++++++------ crates/project/src/lsp_store/log_store.rs | 14 -------------- 2 files changed, 12 insertions(+), 20 deletions(-) diff --git a/crates/language_tools/src/lsp_log_view.rs b/crates/language_tools/src/lsp_log_view.rs index b1f1e5c4f62b4c14b88cdd3de27a1624c7c7158f..fb63ab9a99147328c4987bd80b698ef4a477f013 100644 --- a/crates/language_tools/src/lsp_log_view.rs +++ b/crates/language_tools/src/lsp_log_view.rs @@ -325,7 +325,7 @@ impl LspLogView { let server_info = format!( "* Server: {NAME} (id {ID}) -* Binary: {BINARY:#?} +* Binary: {BINARY} * Registered workspace folders: {WORKSPACE_FOLDERS} @@ -335,10 +335,10 @@ impl LspLogView { * Configuration: {CONFIGURATION}", NAME = info.name, ID = info.id, - BINARY = info.binary.as_ref().map_or_else( - || "Unknown".to_string(), - |bin| bin.path.as_path().to_string_lossy().to_string() - ), + BINARY = info + .binary + .as_ref() + .map_or_else(|| "Unknown".to_string(), |binary| format!("{binary:#?}")), WORKSPACE_FOLDERS = info.workspace_folders.join(", "), CAPABILITIES = serde_json::to_string_pretty(&info.capabilities) .unwrap_or_else(|e| format!("Failed to serialize capabilities: {e}")), @@ -990,10 +990,16 @@ impl Render for LspLogToolbarItemView { let server_id = server.server_id; let rpc_trace_enabled = server.rpc_trace_enabled; let log_view = log_view.clone(); + let label = match server.selected_entry { + LogKind::Rpc => RPC_MESSAGES, + LogKind::Trace => SERVER_TRACE, + LogKind::Logs => SERVER_LOGS, + LogKind::ServerInfo => SERVER_INFO, + }; PopoverMenu::new("LspViewSelector") .anchor(Corner::TopLeft) .trigger( - Button::new("language_server_menu_header", server.selected_entry.label()) + Button::new("language_server_menu_header", label) .icon(IconName::ChevronDown) .icon_size(IconSize::Small) .icon_color(Color::Muted), diff --git a/crates/project/src/lsp_store/log_store.rs b/crates/project/src/lsp_store/log_store.rs index 67a20dd6cd8b2f5d6ca48d7790fc0b2e60aff370..00098712bf0092a6795de2ed48c7ccf15925c555 100644 --- a/crates/project/src/lsp_store/log_store.rs +++ b/crates/project/src/lsp_store/log_store.rs @@ -16,11 +16,6 @@ const SEND_LINE: &str = "\n// Send:"; const RECEIVE_LINE: &str = "\n// Receive:"; const MAX_STORED_LOG_ENTRIES: usize = 2000; -const RPC_MESSAGES: &str = "RPC Messages"; -const SERVER_LOGS: &str = "Server Logs"; -const SERVER_TRACE: &str = "Server Trace"; -const SERVER_INFO: &str = "Server Info"; - pub fn init(on_headless_host: bool, cx: &mut App) -> Entity { let log_store = cx.new(|cx| LogStore::new(on_headless_host, cx)); cx.set_global(GlobalLogStore(log_store.clone())); @@ -216,15 +211,6 @@ impl LogKind { LanguageServerLogType::Rpc { .. } => Self::Rpc, } } - - pub fn label(&self) -> &'static str { - match self { - LogKind::Rpc => RPC_MESSAGES, - LogKind::Trace => SERVER_TRACE, - LogKind::Logs => SERVER_LOGS, - LogKind::ServerInfo => SERVER_INFO, - } - } } impl LogStore { From 1d828b6ac6028e5ffa421a14b08172ec86bd175d Mon Sep 17 00:00:00 2001 From: Marco Groot <60631182+marcogroot@users.noreply.github.com> Date: Sat, 6 Sep 2025 20:29:34 +1000 Subject: [PATCH 051/109] Fix broken link in `CONTRIBUTING.md` (#37688) Can see currently the link is dead currently, but this changes fixes locally https://github.com/user-attachments/assets/e01d9c47-e91e-4c24-8285-01e3b45583b9 Release Notes: - N/A --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 407ba002c7bc5a75c922faa72f1f270c62e82410..1c0b1e363ed0f04ff33c070a4a84815cece78545 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -65,7 +65,7 @@ If you would like to add a new icon to the Zed icon theme, [open a Discussion](h ## Bird's-eye view of Zed -We suggest you keep the [zed glossary](docs/src/development/GLOSSARY.md) at your side when starting out. It lists and explains some of the structures and terms you will see throughout the codebase. +We suggest you keep the [zed glossary](docs/src/development/glossary.md) at your side when starting out. It lists and explains some of the structures and terms you will see throughout the codebase. Zed is made up of several smaller crates - let's go over those you're most likely to interact with: From 065518577eb589cad60945c69428494f0ed01757 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Sat, 6 Sep 2025 16:37:21 +0300 Subject: [PATCH 052/109] Fix the tasks docs (#37699) Closes https://github.com/zed-industries/zed/issues/37698 Release Notes: - N/A --- docs/src/tasks.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/tasks.md b/docs/src/tasks.md index bff3eac86048752be50f8fd605bc5b76677ca0c0..e530f568cdce0fb8e1da059b4b841fac7049e8fd 100644 --- a/docs/src/tasks.md +++ b/docs/src/tasks.md @@ -45,7 +45,7 @@ Zed supports ways to spawn (and rerun) commands using its integrated terminal to // Whether to show the task line in the output of the spawned task, defaults to `true`. "show_summary": true, // Whether to show the command line in the output of the spawned task, defaults to `true`. - "show_output": true + "show_command": true // Represents the tags for inline runnable indicators, or spawning multiple tasks at once. // "tags": [] } From 84f166fc85e1675fb76183ad6f212891d596c38d Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Sat, 6 Sep 2025 16:39:21 +0300 Subject: [PATCH 053/109] Tweak word completions more (#37697) Follow-up of https://github.com/zed-industries/zed/pull/37352 Closes https://github.com/zed-industries/zed/issues/37132 * disabled word completions in the agent panel's editor * if not disabled, allow to trigger word completions with an action even if the completions threshold is not reached Release Notes: - Fixed word completions appearing in the agent panel's editor and not appearing when triggered with the action before the completion threshold is reached --- crates/agent_ui/src/message_editor.rs | 1 + crates/editor/src/code_context_menus.rs | 2 +- crates/editor/src/editor.rs | 42 ++++++++++++---- crates/editor/src/editor_tests.rs | 64 +++++++++++++++++++++++++ 4 files changed, 100 insertions(+), 9 deletions(-) diff --git a/crates/agent_ui/src/message_editor.rs b/crates/agent_ui/src/message_editor.rs index 45e7529ec21c576354a556bdc27112da4d57e085..6f0ad2767a46fb23b40e0116fd9cf85f06c28aca 100644 --- a/crates/agent_ui/src/message_editor.rs +++ b/crates/agent_ui/src/message_editor.rs @@ -125,6 +125,7 @@ pub(crate) fn create_editor( cx, ); editor.set_placeholder_text("Message the agent – @ to include context", cx); + editor.disable_word_completions(); editor.set_show_indent_guides(false, cx); editor.set_soft_wrap(); editor.set_use_modal_editing(true); diff --git a/crates/editor/src/code_context_menus.rs b/crates/editor/src/code_context_menus.rs index 6d57048985955730bef2c7840d645c87b56915fc..18fce84dbca9cfe845d0912295ed22929ccb9cf7 100644 --- a/crates/editor/src/code_context_menus.rs +++ b/crates/editor/src/code_context_menus.rs @@ -251,7 +251,7 @@ enum MarkdownCacheKey { pub enum CompletionsMenuSource { Normal, SnippetChoices, - Words, + Words { ignore_threshold: bool }, } // TODO: There should really be a wrapper around fuzzy match tasks that does this. diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 2374c8d6875f05608aa800de660fb3602ed35988..b1f9bde6ddba09a77ca386793847552f85d5be96 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1030,6 +1030,7 @@ pub struct Editor { inline_diagnostics_update: Task<()>, inline_diagnostics_enabled: bool, diagnostics_enabled: bool, + word_completions_enabled: bool, inline_diagnostics: Vec<(Anchor, InlineDiagnostic)>, soft_wrap_mode_override: Option, hard_wrap: Option, @@ -2163,6 +2164,7 @@ impl Editor { }, inline_diagnostics_enabled: full_mode, diagnostics_enabled: full_mode, + word_completions_enabled: full_mode, inline_value_cache: InlineValueCache::new(inlay_hint_settings.show_value_hints), inlay_hint_cache: InlayHintCache::new(inlay_hint_settings), gutter_hovered: false, @@ -4892,8 +4894,15 @@ impl Editor { }); match completions_source { - Some(CompletionsMenuSource::Words) => { - self.show_word_completions(&ShowWordCompletions, window, cx) + Some(CompletionsMenuSource::Words { .. }) => { + self.open_or_update_completions_menu( + Some(CompletionsMenuSource::Words { + ignore_threshold: false, + }), + None, + window, + cx, + ); } Some(CompletionsMenuSource::Normal) | Some(CompletionsMenuSource::SnippetChoices) @@ -5401,7 +5410,14 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.open_or_update_completions_menu(Some(CompletionsMenuSource::Words), None, window, cx); + self.open_or_update_completions_menu( + Some(CompletionsMenuSource::Words { + ignore_threshold: true, + }), + None, + window, + cx, + ); } pub fn show_completions( @@ -5450,9 +5466,13 @@ impl Editor { drop(multibuffer_snapshot); + let mut ignore_word_threshold = false; let provider = match requested_source { Some(CompletionsMenuSource::Normal) | None => self.completion_provider.clone(), - Some(CompletionsMenuSource::Words) => None, + Some(CompletionsMenuSource::Words { ignore_threshold }) => { + ignore_word_threshold = ignore_threshold; + None + } Some(CompletionsMenuSource::SnippetChoices) => { log::error!("bug: SnippetChoices requested_source is not handled"); None @@ -5573,10 +5593,12 @@ impl Editor { .as_ref() .is_none_or(|query| !query.chars().any(|c| c.is_digit(10))); - let omit_word_completions = match &query { - Some(query) => query.chars().count() < completion_settings.words_min_length, - None => completion_settings.words_min_length != 0, - }; + let omit_word_completions = !self.word_completions_enabled + || (!ignore_word_threshold + && match &query { + Some(query) => query.chars().count() < completion_settings.words_min_length, + None => completion_settings.words_min_length != 0, + }); let (mut words, provider_responses) = match &provider { Some(provider) => { @@ -17121,6 +17143,10 @@ impl Editor { self.inline_diagnostics.clear(); } + pub fn disable_word_completions(&mut self) { + self.word_completions_enabled = false; + } + pub fn diagnostics_enabled(&self) -> bool { self.diagnostics_enabled && self.mode.is_full() } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index f4569b436488728f197183b27c63b2706881c8cb..36405079b8e241ed068eb32289d64064f16df39c 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -14278,6 +14278,26 @@ async fn test_word_completions_do_not_show_before_threshold(cx: &mut TestAppCont } }); + cx.update_editor(|editor, window, cx| { + editor.show_word_completions(&ShowWordCompletions, window, cx); + }); + cx.executor().run_until_parked(); + cx.update_editor(|editor, window, cx| { + if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref() + { + assert_eq!(completion_menu_entries(menu), &["wowser", "wowen", "wow"], "Even though the threshold is not met, invoking word completions with an action should provide the completions"); + } else { + panic!("expected completion menu to be open after the word completions are called with an action"); + } + + editor.cancel(&Cancel, window, cx); + }); + cx.update_editor(|editor, _, _| { + if editor.context_menu.borrow_mut().is_some() { + panic!("expected completion menu to be hidden after canceling"); + } + }); + cx.simulate_keystroke("o"); cx.executor().run_until_parked(); cx.update_editor(|editor, _, _| { @@ -14300,6 +14320,50 @@ async fn test_word_completions_do_not_show_before_threshold(cx: &mut TestAppCont }); } +#[gpui::test] +async fn test_word_completions_disabled(cx: &mut TestAppContext) { + init_test(cx, |language_settings| { + language_settings.defaults.completions = Some(CompletionSettings { + words: WordsCompletionMode::Enabled, + words_min_length: 0, + lsp: true, + lsp_fetch_timeout_ms: 0, + lsp_insert_mode: LspInsertMode::Insert, + }); + }); + + let mut cx = EditorLspTestContext::new_rust(lsp::ServerCapabilities::default(), cx).await; + cx.update_editor(|editor, _, _| { + editor.disable_word_completions(); + }); + cx.set_state(indoc! {"ˇ + wow + wowen + wowser + "}); + cx.simulate_keystroke("w"); + cx.executor().run_until_parked(); + cx.update_editor(|editor, _, _| { + if editor.context_menu.borrow_mut().is_some() { + panic!( + "expected completion menu to be hidden, as words completion are disabled for this editor" + ); + } + }); + + cx.update_editor(|editor, window, cx| { + editor.show_word_completions(&ShowWordCompletions, window, cx); + }); + cx.executor().run_until_parked(); + cx.update_editor(|editor, _, _| { + if editor.context_menu.borrow_mut().is_some() { + panic!( + "expected completion menu to be hidden even if called for explicitly, as words completion are disabled for this editor" + ); + } + }); +} + fn gen_text_edit(params: &CompletionParams, text: &str) -> Option { let position = || lsp::Position { line: params.text_document_position.position.line, From e04473dd2612a63504aca730b3f5a51ad72fdc2c Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Sat, 6 Sep 2025 19:51:51 +0300 Subject: [PATCH 054/109] Revert "gpui: Skip `test` attribute expansion for rust-analyzer (#37611)" (#37705) This reverts commit 4124bedab796d2ac0a1e57f8b94f72500969797a. With the new annotation, r-a starts to skip the tasks that are marked with `gpui::test` and when it fully loads, it starts to return module-only tasks: https://github.com/user-attachments/assets/5af3e3e4-91b7-4f19-aab0-ed7f186e5f74 Release Notes: - N/A --- crates/gpui/src/gpui.rs | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index 0858cb014e33da354eb8a6488982b913b76d2b52..3c4ee41c16ab7cfc5e42007291e330282b330ecb 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -121,14 +121,6 @@ mod seal { pub trait Sealed {} } -// This allows r-a to skip expanding the gpui test macro which should -// reduce resource usage a bit as the test attribute is special cased -// to be treated as a no-op. -#[cfg(rust_analyzer)] -pub use core::prelude::v1::test; -#[cfg(not(rust_analyzer))] -pub use gpui_macros::test; - pub use action::*; pub use anyhow::Result; pub use app::*; @@ -142,7 +134,7 @@ pub use elements::*; pub use executor::*; pub use geometry::*; pub use global::*; -pub use gpui_macros::{AppContext, IntoElement, Render, VisualContext, register_action}; +pub use gpui_macros::{AppContext, IntoElement, Render, VisualContext, register_action, test}; pub use http_client; pub use input::*; pub use inspector::*; From 1552afd8bf8da873e43e31088e2c56bfdaf691c7 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Sat, 6 Sep 2025 15:38:48 -0400 Subject: [PATCH 055/109] docs: Use `#action` throughout `configuring-zed.md` (#37709) Release Notes: - N/A --- docs/src/configuring-zed.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index e245b3ca2facecb097b315f28d98ef2ea5a20048..56b4de832862439b93d8a0359dbf8284226e1671 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -116,7 +116,7 @@ Non-negative `float` values ## Allow Rewrap -- Description: Controls where the `editor::Rewrap` action is allowed in the current language scope +- Description: Controls where the {#action editor::Rewrap} action is allowed in the current language scope - Setting: `allow_rewrap` - Default: `"in_comments"` @@ -2192,7 +2192,7 @@ Example: ## Go to Definition Fallback -- Description: What to do when the "go to definition" action fails to find a definition +- Description: What to do when the {#action editor::GoToDefinition} action fails to find a definition - Setting: `go_to_definition_fallback` - Default: `"find_all_references"` @@ -2383,7 +2383,7 @@ Example: **Options** -Run the `icon theme selector: toggle` action in the command palette to see a current list of valid icon themes names. +Run the {#action icon_theme_selector::Toggle} action in the command palette to see a current list of valid icon themes names. ### Light @@ -2393,7 +2393,7 @@ Run the `icon theme selector: toggle` action in the command palette to see a cur **Options** -Run the `icon theme selector: toggle` action in the command palette to see a current list of valid icon themes names. +Run the {#action icon_theme_selector::Toggle} action in the command palette to see a current list of valid icon themes names. ## Image Viewer @@ -2832,7 +2832,7 @@ Configuration object for defining settings profiles. Example: - Double-clicking on the file - Double-clicking on the tab header - - Using the `project_panel::OpenPermanent` action + - Using the {#action project_panel::OpenPermanent} action - Editing the file - Dragging the file to a different pane @@ -4053,7 +4053,7 @@ Example command to set the title: `echo -e "\e]2;New Title\007";` **Options** -Run the `theme selector: toggle` action in the command palette to see a current list of valid themes names. +Run the {#action theme_selector::Toggle} action in the command palette to see a current list of valid themes names. ### Light @@ -4063,7 +4063,7 @@ Run the `theme selector: toggle` action in the command palette to see a current **Options** -Run the `theme selector: toggle` action in the command palette to see a current list of valid themes names. +Run the {#action theme_selector::Toggle} action in the command palette to see a current list of valid themes names. ## Title Bar From 5c30578c4961d3766ee991b119658b02741b85e5 Mon Sep 17 00:00:00 2001 From: Smit Barmase Date: Sun, 7 Sep 2025 02:01:55 +0530 Subject: [PATCH 056/109] linux: Fix IME preedit text not showing in Terminal on Wayland (#37701) Closes https://github.com/zed-industries/zed/issues/37268 Release Notes: - Fixed an issue where IME preedit text was not showing in the Terminal on Wayland. --- crates/terminal_view/src/terminal_element.rs | 12 ++++----- crates/terminal_view/src/terminal_view.rs | 28 ++++++++++++-------- 2 files changed, 22 insertions(+), 18 deletions(-) diff --git a/crates/terminal_view/src/terminal_element.rs b/crates/terminal_view/src/terminal_element.rs index 5bbf5ad36b3de89514d92ce9e305988817cec32f..a786aa20e60f28b1f22bd1c9e8d993098aa96de4 100644 --- a/crates/terminal_view/src/terminal_element.rs +++ b/crates/terminal_view/src/terminal_element.rs @@ -1192,8 +1192,8 @@ impl Element for TerminalElement { bounds.origin + Point::new(layout.gutter, px(0.)) - Point::new(px(0.), scroll_top); let marked_text_cloned: Option = { - let ime_state = self.terminal_view.read(cx); - ime_state.marked_text.clone() + let ime_state = &self.terminal_view.read(cx).ime_state; + ime_state.as_ref().map(|state| state.marked_text.clone()) }; let terminal_input_handler = TerminalInputHandler { @@ -1421,11 +1421,9 @@ impl InputHandler for TerminalInputHandler { _window: &mut Window, cx: &mut App, ) { - if let Some(range) = new_marked_range { - self.terminal_view.update(cx, |view, view_cx| { - view.set_marked_text(new_text.to_string(), range, view_cx); - }); - } + self.terminal_view.update(cx, |view, view_cx| { + view.set_marked_text(new_text.to_string(), new_marked_range, view_cx); + }); } fn unmark_text(&mut self, _window: &mut Window, cx: &mut App) { diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index 2548a7c24460be3161147b69e30c6191ba5dd2e6..08caf9a4ef1c0b49dbfa8f8f2578f00ddb130ee0 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -62,6 +62,11 @@ use std::{ time::Duration, }; +struct ImeState { + marked_text: String, + marked_range_utf16: Option>, +} + const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500); const TERMINAL_SCROLLBAR_WIDTH: Pixels = px(12.); @@ -138,8 +143,7 @@ pub struct TerminalView { scroll_handle: TerminalScrollHandle, show_scrollbar: bool, hide_scrollbar_task: Option>, - marked_text: Option, - marked_range_utf16: Option>, + ime_state: Option, _subscriptions: Vec, _terminal_subscriptions: Vec, } @@ -263,8 +267,7 @@ impl TerminalView { show_scrollbar: !Self::should_autohide_scrollbar(cx), hide_scrollbar_task: None, cwd_serialized: false, - marked_text: None, - marked_range_utf16: None, + ime_state: None, _subscriptions: vec![ focus_in, focus_out, @@ -323,24 +326,27 @@ impl TerminalView { pub(crate) fn set_marked_text( &mut self, text: String, - range: Range, + range: Option>, cx: &mut Context, ) { - self.marked_text = Some(text); - self.marked_range_utf16 = Some(range); + self.ime_state = Some(ImeState { + marked_text: text, + marked_range_utf16: range, + }); cx.notify(); } /// Gets the current marked range (UTF-16). pub(crate) fn marked_text_range(&self) -> Option> { - self.marked_range_utf16.clone() + self.ime_state + .as_ref() + .and_then(|state| state.marked_range_utf16.clone()) } /// Clears the marked (pre-edit) text state. pub(crate) fn clear_marked_text(&mut self, cx: &mut Context) { - if self.marked_text.is_some() { - self.marked_text = None; - self.marked_range_utf16 = None; + if self.ime_state.is_some() { + self.ime_state = None; cx.notify(); } } From 29def012a18af534d17ded2febc77841ce3af601 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BC=A0=E5=B0=8F=E7=99=BD?= <364772080@qq.com> Date: Sun, 7 Sep 2025 14:09:35 +0800 Subject: [PATCH 057/109] windows: Update Windows keymap (#37721) Pickup the changes from #37009 Release Notes: - N/A --- assets/keymaps/default-windows.json | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 1c9f1281882dc136daa7a3912d3d92b3516a4441..d10451ac856e201033490da260d4e21b40cf718b 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -644,6 +644,7 @@ "alt-save": "workspace::SaveAll", "ctrl-k s": "workspace::SaveAll", "ctrl-k m": "language_selector::Toggle", + "ctrl-m ctrl-m": "toolchain::AddToolchain", "escape": "workspace::Unfollow", "ctrl-k ctrl-left": "workspace::ActivatePaneLeft", "ctrl-k ctrl-right": "workspace::ActivatePaneRight", @@ -1075,6 +1076,13 @@ "tab": "channel_modal::ToggleMode" } }, + { + "context": "ToolchainSelector", + "use_key_equivalents": true, + "bindings": { + "ctrl-shift-a": "toolchain::AddToolchain" + } + }, { "context": "FileFinder || (FileFinder > Picker > Editor)", "use_key_equivalents": true, From 0ef7ee172fd8dd4d0638a9731140cb3a74344918 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BC=A0=E5=B0=8F=E7=99=BD?= <364772080@qq.com> Date: Sun, 7 Sep 2025 14:45:41 +0800 Subject: [PATCH 058/109] windows: Remove some unused keys from the keymap (#37722) AFAIK, we dont handle these keys on Windows. Release Notes: - N/A --- assets/keymaps/default-windows.json | 37 ----------------------------- 1 file changed, 37 deletions(-) diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index d10451ac856e201033490da260d4e21b40cf718b..de0d97b52e2b0fe9bac931cb46debc812a56a70b 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -25,7 +25,6 @@ "ctrl-alt-enter": ["picker::ConfirmInput", { "secondary": true }], "ctrl-shift-w": "workspace::CloseWindow", "shift-escape": "workspace::ToggleZoom", - "open": "workspace::Open", "ctrl-o": "workspace::Open", "ctrl-=": ["zed::IncreaseBufferFontSize", { "persist": false }], "ctrl-shift-=": ["zed::IncreaseBufferFontSize", { "persist": false }], @@ -68,18 +67,13 @@ "ctrl-k q": "editor::Rewrap", "ctrl-backspace": ["editor::DeleteToPreviousWordStart", { "ignore_newlines": false, "ignore_brackets": false }], "ctrl-delete": ["editor::DeleteToNextWordEnd", { "ignore_newlines": false, "ignore_brackets": false }], - "cut": "editor::Cut", "shift-delete": "editor::Cut", "ctrl-x": "editor::Cut", - "copy": "editor::Copy", "ctrl-insert": "editor::Copy", "ctrl-c": "editor::Copy", - "paste": "editor::Paste", "shift-insert": "editor::Paste", "ctrl-v": "editor::Paste", - "undo": "editor::Undo", "ctrl-z": "editor::Undo", - "redo": "editor::Redo", "ctrl-y": "editor::Redo", "ctrl-shift-z": "editor::Redo", "up": "editor::MoveUp", @@ -138,7 +132,6 @@ "ctrl-shift-enter": "editor::NewlineAbove", "ctrl-k ctrl-z": "editor::ToggleSoftWrap", "ctrl-k z": "editor::ToggleSoftWrap", - "find": "buffer_search::Deploy", "ctrl-f": "buffer_search::Deploy", "ctrl-h": "buffer_search::DeployReplace", "ctrl-shift-.": "assistant::QuoteSelection", @@ -177,7 +170,6 @@ "context": "Markdown", "use_key_equivalents": true, "bindings": { - "copy": "markdown::Copy", "ctrl-c": "markdown::Copy" } }, @@ -225,7 +217,6 @@ "bindings": { "ctrl-enter": "assistant::Assist", "ctrl-s": "workspace::Save", - "save": "workspace::Save", "ctrl-shift-,": "assistant::InsertIntoEditor", "shift-enter": "assistant::Split", "ctrl-r": "assistant::CycleMessageRole", @@ -272,7 +263,6 @@ "context": "AgentPanel > Markdown", "use_key_equivalents": true, "bindings": { - "copy": "markdown::CopyAsMarkdown", "ctrl-c": "markdown::CopyAsMarkdown" } }, @@ -367,7 +357,6 @@ "context": "PromptLibrary", "use_key_equivalents": true, "bindings": { - "new": "rules_library::NewRule", "ctrl-n": "rules_library::NewRule", "ctrl-shift-s": "rules_library::ToggleDefaultRule" } @@ -381,7 +370,6 @@ "enter": "search::SelectNextMatch", "shift-enter": "search::SelectPreviousMatch", "alt-enter": "search::SelectAllMatches", - "find": "search::FocusSearch", "ctrl-f": "search::FocusSearch", "ctrl-h": "search::ToggleReplace", "ctrl-l": "search::ToggleSelection" @@ -408,7 +396,6 @@ "use_key_equivalents": true, "bindings": { "escape": "project_search::ToggleFocus", - "shift-find": "search::FocusSearch", "ctrl-shift-f": "search::FocusSearch", "ctrl-shift-h": "search::ToggleReplace", "alt-r": "search::ToggleRegex" // vscode @@ -472,14 +459,12 @@ "forward": "pane::GoForward", "f3": "search::SelectNextMatch", "shift-f3": "search::SelectPreviousMatch", - "shift-find": "project_search::ToggleFocus", "ctrl-shift-f": "project_search::ToggleFocus", "shift-alt-h": "search::ToggleReplace", "alt-l": "search::ToggleSelection", "alt-enter": "search::SelectAllMatches", "alt-c": "search::ToggleCaseSensitive", "alt-w": "search::ToggleWholeWord", - "alt-find": "project_search::ToggleFilters", "alt-f": "project_search::ToggleFilters", "alt-r": "search::ToggleRegex", // "ctrl-shift-alt-x": "search::ToggleRegex", @@ -579,25 +564,19 @@ "context": "Workspace", "use_key_equivalents": true, "bindings": { - "alt-open": ["projects::OpenRecent", { "create_new_window": false }], // Change the default action on `menu::Confirm` by setting the parameter // "ctrl-alt-o": ["projects::OpenRecent", { "create_new_window": true }], "ctrl-r": ["projects::OpenRecent", { "create_new_window": false }], - "shift-alt-open": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }], // Change to open path modal for existing remote connection by setting the parameter // "ctrl-shift-alt-o": "["projects::OpenRemote", { "from_existing_connection": true }]", "ctrl-shift-alt-o": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }], "shift-alt-b": "branches::OpenRecent", "shift-alt-enter": "toast::RunAction", "ctrl-shift-`": "workspace::NewTerminal", - "save": "workspace::Save", "ctrl-s": "workspace::Save", "ctrl-k ctrl-shift-s": "workspace::SaveWithoutFormat", - "shift-save": "workspace::SaveAs", "ctrl-shift-s": "workspace::SaveAs", - "new": "workspace::NewFile", "ctrl-n": "workspace::NewFile", - "shift-new": "workspace::NewWindow", "ctrl-shift-n": "workspace::NewWindow", "ctrl-`": "terminal_panel::Toggle", "f10": ["app_menu::OpenApplicationMenu", "Zed"], @@ -621,7 +600,6 @@ "shift-alt-0": "workspace::ResetOpenDocksSize", "ctrl-shift-alt--": ["workspace::DecreaseOpenDocksSize", { "px": 0 }], "ctrl-shift-alt-=": ["workspace::IncreaseOpenDocksSize", { "px": 0 }], - "shift-find": "pane::DeploySearch", "ctrl-shift-f": "pane::DeploySearch", "ctrl-shift-h": ["pane::DeploySearch", { "replace_enabled": true }], "ctrl-shift-t": "pane::ReopenClosedItem", @@ -641,7 +619,6 @@ "ctrl-shift-g": "git_panel::ToggleFocus", "ctrl-shift-d": "debug_panel::ToggleFocus", "ctrl-shift-/": "agent::ToggleFocus", - "alt-save": "workspace::SaveAll", "ctrl-k s": "workspace::SaveAll", "ctrl-k m": "language_selector::Toggle", "ctrl-m ctrl-m": "toolchain::AddToolchain", @@ -849,9 +826,7 @@ "bindings": { "left": "outline_panel::CollapseSelectedEntry", "right": "outline_panel::ExpandSelectedEntry", - "alt-copy": "outline_panel::CopyPath", "shift-alt-c": "outline_panel::CopyPath", - "shift-alt-copy": "workspace::CopyRelativePath", "ctrl-shift-alt-c": "workspace::CopyRelativePath", "ctrl-alt-r": "outline_panel::RevealInFileManager", "space": "outline_panel::OpenSelectedEntry", @@ -867,21 +842,14 @@ "bindings": { "left": "project_panel::CollapseSelectedEntry", "right": "project_panel::ExpandSelectedEntry", - "new": "project_panel::NewFile", "ctrl-n": "project_panel::NewFile", - "alt-new": "project_panel::NewDirectory", "alt-n": "project_panel::NewDirectory", - "cut": "project_panel::Cut", "ctrl-x": "project_panel::Cut", - "copy": "project_panel::Copy", "ctrl-insert": "project_panel::Copy", "ctrl-c": "project_panel::Copy", - "paste": "project_panel::Paste", "shift-insert": "project_panel::Paste", "ctrl-v": "project_panel::Paste", - "alt-copy": "project_panel::CopyPath", "shift-alt-c": "project_panel::CopyPath", - "shift-alt-copy": "workspace::CopyRelativePath", "ctrl-k ctrl-shift-c": "workspace::CopyRelativePath", "enter": "project_panel::Rename", "f2": "project_panel::Rename", @@ -893,7 +861,6 @@ "ctrl-alt-r": "project_panel::RevealInFileManager", "ctrl-shift-enter": "project_panel::OpenWithSystem", "alt-d": "project_panel::CompareMarkedFiles", - "shift-find": "project_panel::NewSearchInDirectory", "ctrl-k ctrl-shift-f": "project_panel::NewSearchInDirectory", "shift-down": "menu::SelectNext", "shift-up": "menu::SelectPrevious", @@ -1118,10 +1085,8 @@ "use_key_equivalents": true, "bindings": { "ctrl-alt-space": "terminal::ShowCharacterPalette", - "copy": "terminal::Copy", "ctrl-insert": "terminal::Copy", "ctrl-shift-c": "terminal::Copy", - "paste": "terminal::Paste", "shift-insert": "terminal::Paste", "ctrl-shift-v": "terminal::Paste", "ctrl-enter": "assistant::InlineAssist", @@ -1137,7 +1102,6 @@ "ctrl-w": ["terminal::SendKeystroke", "ctrl-w"], "ctrl-backspace": ["terminal::SendKeystroke", "ctrl-w"], "ctrl-shift-a": "editor::SelectAll", - "find": "buffer_search::Deploy", "ctrl-shift-f": "buffer_search::Deploy", "ctrl-shift-l": "terminal::Clear", "ctrl-shift-w": "pane::CloseActiveItem", @@ -1218,7 +1182,6 @@ "use_key_equivalents": true, "bindings": { "ctrl-f": "search::FocusSearch", - "alt-find": "keymap_editor::ToggleKeystrokeSearch", "alt-f": "keymap_editor::ToggleKeystrokeSearch", "alt-c": "keymap_editor::ToggleConflictFilter", "enter": "keymap_editor::EditBinding", From 76aaf6a8fe1992ec57114633e022c9771756bfaa Mon Sep 17 00:00:00 2001 From: Bruno Taschenbier <139721757+tastenbier@users.noreply.github.com> Date: Sun, 7 Sep 2025 17:00:58 +0000 Subject: [PATCH 059/109] Fix docs for `tabs.close_position` in `default.json` (#37729) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Minor docs fix. Seems like 0a4ff2f47536c872ebd1ac3e672538a6251832e8 accidentally added "hidden" to the docs of both – `close_position` and `show_close_button`. Release Notes: - N/A Co-authored-by: tastenbier <> --- assets/settings/default.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 0b5481bd4e4e2177302e38199bb66e87471d2904..63a11403d3dd4b30926a6a1f32e86dadf3804054 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -962,7 +962,7 @@ // Show git status colors in the editor tabs. "git_status": false, // Position of the close button on the editor tabs. - // One of: ["right", "left", "hidden"] + // One of: ["right", "left"] "close_position": "right", // Whether to show the file icon for a tab. "file_icons": false, From 0e33a3afe0f37b1a7a37fec6b068abe9dd984009 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Sun, 7 Sep 2025 11:16:49 -0600 Subject: [PATCH 060/109] zeta: Check whether data collection is allowed for recent edit history (#37680) Also: * Adds tests for can_collect_data. * Temporarily removes collection of diagnostics. Release Notes: - Edit Prediction: Fixed a bug where requests were marked eligible for data collection despite the recent edit history in the request involving files that may not be open source. The requests affected by this bug will not be used in training data. --- Cargo.lock | 1 + .../zed/src/zed/edit_prediction_registry.rs | 7 +- crates/zeta/Cargo.toml | 1 + crates/zeta/src/input_excerpt.rs | 8 +- crates/zeta/src/license_detection.rs | 1 - crates/zeta/src/zeta.rs | 882 ++++++++++++------ crates/zeta_cli/src/main.rs | 35 +- 7 files changed, 595 insertions(+), 340 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f4c94f8078b1ab392ed1a50e15c71dab1921f0a3..dbcea05ea9bc52288defc8c299d82eb508337544 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -20841,6 +20841,7 @@ dependencies = [ "language_model", "log", "menu", + "parking_lot", "postage", "project", "rand 0.9.1", diff --git a/crates/zed/src/zed/edit_prediction_registry.rs b/crates/zed/src/zed/edit_prediction_registry.rs index 4f009ccb0b1197f11b034ac48b89dd37b6f41278..ae26427fc6547079b163235f5d1c3df26a489795 100644 --- a/crates/zed/src/zed/edit_prediction_registry.rs +++ b/crates/zed/src/zed/edit_prediction_registry.rs @@ -8,7 +8,7 @@ use settings::SettingsStore; use std::{cell::RefCell, rc::Rc, sync::Arc}; use supermaven::{Supermaven, SupermavenCompletionProvider}; use ui::Window; -use zeta::{ProviderDataCollection, ZetaEditPredictionProvider}; +use zeta::ZetaEditPredictionProvider; pub fn init(client: Arc, user_store: Entity, cx: &mut App) { let editors: Rc, AnyWindowHandle>>> = Rc::default(); @@ -214,11 +214,8 @@ fn assign_edit_prediction_provider( }); } - let data_collection = - ProviderDataCollection::new(zeta.clone(), singleton_buffer, cx); - let provider = - cx.new(|_| zeta::ZetaEditPredictionProvider::new(zeta, data_collection)); + cx.new(|_| zeta::ZetaEditPredictionProvider::new(zeta, singleton_buffer)); editor.set_edit_prediction_provider(Some(provider), window, cx); } diff --git a/crates/zeta/Cargo.toml b/crates/zeta/Cargo.toml index a9c2a7619f4db22e51c014672aa2100b30a2539a..09bcfa7f542ce9c01802c9cebc11dfc9a8da2542 100644 --- a/crates/zeta/Cargo.toml +++ b/crates/zeta/Cargo.toml @@ -72,6 +72,7 @@ gpui = { workspace = true, features = ["test-support"] } http_client = { workspace = true, features = ["test-support"] } indoc.workspace = true language = { workspace = true, features = ["test-support"] } +parking_lot.workspace = true reqwest_client = { workspace = true, features = ["test-support"] } rpc = { workspace = true, features = ["test-support"] } settings = { workspace = true, features = ["test-support"] } diff --git a/crates/zeta/src/input_excerpt.rs b/crates/zeta/src/input_excerpt.rs index dd1bbed1d72e8668e9ed55c9b66b911addfcdd43..06bff5b1bea0f099b2ccd98605ac5de5bb5e6360 100644 --- a/crates/zeta/src/input_excerpt.rs +++ b/crates/zeta/src/input_excerpt.rs @@ -1,6 +1,6 @@ use crate::{ CURSOR_MARKER, EDITABLE_REGION_END_MARKER, EDITABLE_REGION_START_MARKER, START_OF_FILE_MARKER, - tokens_for_bytes, + guess_token_count, }; use language::{BufferSnapshot, Point}; use std::{fmt::Write, ops::Range}; @@ -22,7 +22,7 @@ pub fn excerpt_for_cursor_position( let mut remaining_edit_tokens = editable_region_token_limit; while let Some(parent) = snapshot.syntax_ancestor(scope_range.clone()) { - let parent_tokens = tokens_for_bytes(parent.byte_range().len()); + let parent_tokens = guess_token_count(parent.byte_range().len()); let parent_point_range = Point::new( parent.start_position().row as u32, parent.start_position().column as u32, @@ -99,7 +99,7 @@ fn expand_range( if remaining_tokens > 0 && expanded_range.start.row > 0 { expanded_range.start.row -= 1; let line_tokens = - tokens_for_bytes(snapshot.line_len(expanded_range.start.row) as usize); + guess_token_count(snapshot.line_len(expanded_range.start.row) as usize); remaining_tokens = remaining_tokens.saturating_sub(line_tokens); expanded = true; } @@ -107,7 +107,7 @@ fn expand_range( if remaining_tokens > 0 && expanded_range.end.row < snapshot.max_point().row { expanded_range.end.row += 1; expanded_range.end.column = snapshot.line_len(expanded_range.end.row); - let line_tokens = tokens_for_bytes(expanded_range.end.column as usize); + let line_tokens = guess_token_count(expanded_range.end.column as usize); remaining_tokens = remaining_tokens.saturating_sub(line_tokens); expanded = true; } diff --git a/crates/zeta/src/license_detection.rs b/crates/zeta/src/license_detection.rs index 5f207a44e8bd2028e6a2b416e978f101cfe5bd57..e06e1577a66cc160efa00213b80c6ca407f7be85 100644 --- a/crates/zeta/src/license_detection.rs +++ b/crates/zeta/src/license_detection.rs @@ -358,7 +358,6 @@ impl LicenseDetectionWatcher { #[cfg(test)] mod tests { - use fs::FakeFs; use gpui::TestAppContext; use serde_json::json; diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index 3851d16755783209fd9da4f468a494779a7d9fe7..dfcf98f025c2e020d6545efca64d4ab12579e370 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -29,7 +29,7 @@ use gpui::{ use http_client::{AsyncBody, HttpClient, Method, Request, Response}; use input_excerpt::excerpt_for_cursor_position; use language::{ - Anchor, Buffer, BufferSnapshot, EditPreview, OffsetRangeExt, ToOffset, ToPoint, text_diff, + Anchor, Buffer, BufferSnapshot, EditPreview, File, OffsetRangeExt, ToOffset, ToPoint, text_diff, }; use language_model::{LlmApiToken, RefreshLlmTokenListener}; use project::{Project, ProjectPath}; @@ -65,7 +65,6 @@ const ZED_PREDICT_DATA_COLLECTION_CHOICE: &str = "zed_predict_data_collection_ch const MAX_CONTEXT_TOKENS: usize = 150; const MAX_REWRITE_TOKENS: usize = 350; const MAX_EVENT_TOKENS: usize = 500; -const MAX_DIAGNOSTIC_GROUPS: usize = 10; /// Maximum number of events to track. const MAX_EVENT_COUNT: usize = 16; @@ -216,7 +215,7 @@ pub struct Zeta { client: Arc, shown_completions: VecDeque, rated_completions: HashSet, - data_collection_choice: Entity, + data_collection_choice: DataCollectionChoice, llm_token: LlmApiToken, _llm_token_subscription: Subscription, /// Whether an update to a newer version of Zed is required to continue using Zeta. @@ -271,10 +270,7 @@ impl Zeta { fn new(client: Arc, user_store: Entity, cx: &mut Context) -> Self { let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx); - - let data_collection_choice = Self::load_data_collection_choices(); - let data_collection_choice = cx.new(|_| data_collection_choice); - + let data_collection_choice = Self::load_data_collection_choice(); Self { projects: HashMap::default(), client, @@ -408,7 +404,6 @@ impl Zeta { project: &Entity, buffer: &Entity, cursor: language::Anchor, - can_collect_data: bool, cx: &mut Context, perform_predict_edits: F, ) -> Task>> @@ -422,15 +417,25 @@ impl Zeta { let buffer_snapshotted_at = Instant::now(); let snapshot = self.report_changes_for_buffer(&buffer, project, cx); let zeta = cx.entity(); - let events = self.get_or_init_zeta_project(project, cx).events.clone(); let client = self.client.clone(); let llm_token = self.llm_token.clone(); let app_version = AppVersion::global(cx); - let git_info = if let (true, Some(file)) = (can_collect_data, snapshot.file()) { - git_info_for_file(project, &ProjectPath::from_file(file.as_ref(), cx), cx) + let zeta_project = self.get_or_init_zeta_project(project, cx); + let mut events = Vec::with_capacity(zeta_project.events.len()); + events.extend(zeta_project.events.iter().cloned()); + let events = Arc::new(events); + + let (git_info, can_collect_file) = if let Some(file) = snapshot.file() { + let can_collect_file = self.can_collect_file(file, cx); + let git_info = if can_collect_file { + git_info_for_file(project, &ProjectPath::from_file(file.as_ref(), cx), cx) + } else { + None + }; + (git_info, can_collect_file) } else { - None + (None, false) }; let full_path: Arc = snapshot @@ -440,25 +445,35 @@ impl Zeta { let full_path_str = full_path.to_string_lossy().to_string(); let cursor_point = cursor.to_point(&snapshot); let cursor_offset = cursor_point.to_offset(&snapshot); - let make_events_prompt = move || prompt_for_events(&events, MAX_EVENT_TOKENS); + let prompt_for_events = { + let events = events.clone(); + move || prompt_for_events_impl(&events, MAX_EVENT_TOKENS) + }; let gather_task = gather_context( - project, full_path_str, &snapshot, cursor_point, - make_events_prompt, - can_collect_data, - git_info, + prompt_for_events, cx, ); cx.spawn(async move |this, cx| { let GatherContextOutput { - body, + mut body, editable_range, + included_events_count, } = gather_task.await?; let done_gathering_context_at = Instant::now(); + let included_events = &events[events.len() - included_events_count..events.len()]; + body.can_collect_data = can_collect_file + && this + .read_with(cx, |this, cx| this.can_collect_events(included_events, cx)) + .unwrap_or(false); + if body.can_collect_data { + body.git_info = git_info; + } + log::debug!( "Events:\n{}\nExcerpt:\n{:?}", body.input_events, @@ -563,10 +578,8 @@ impl Zeta { response: PredictEditsResponse, cx: &mut Context, ) -> Task>> { - use std::future::ready; - - self.request_completion_impl(project, buffer, position, false, cx, |_params| { - ready(Ok((response, None))) + self.request_completion_impl(project, buffer, position, cx, |_params| { + std::future::ready(Ok((response, None))) }) } @@ -575,17 +588,9 @@ impl Zeta { project: &Entity, buffer: &Entity, position: language::Anchor, - can_collect_data: bool, cx: &mut Context, ) -> Task>> { - self.request_completion_impl( - project, - buffer, - position, - can_collect_data, - cx, - Self::perform_predict_edits, - ) + self.request_completion_impl(project, buffer, position, cx, Self::perform_predict_edits) } pub fn perform_predict_edits( @@ -954,7 +959,58 @@ impl Zeta { new_snapshot } - fn load_data_collection_choices() -> DataCollectionChoice { + fn can_collect_file(&self, file: &Arc, cx: &App) -> bool { + self.data_collection_choice.is_enabled() && self.is_file_open_source(file, cx) + } + + fn can_collect_events(&self, events: &[Event], cx: &App) -> bool { + if !self.data_collection_choice.is_enabled() { + return false; + } + let mut last_checked_file = None; + for event in events { + match event { + Event::BufferChange { + old_snapshot, + new_snapshot, + .. + } => { + if let Some(old_file) = old_snapshot.file() + && let Some(new_file) = new_snapshot.file() + { + if let Some(last_checked_file) = last_checked_file + && Arc::ptr_eq(last_checked_file, old_file) + && Arc::ptr_eq(last_checked_file, new_file) + { + continue; + } + if !self.can_collect_file(old_file, cx) { + return false; + } + if !Arc::ptr_eq(old_file, new_file) && !self.can_collect_file(new_file, cx) + { + return false; + } + last_checked_file = Some(new_file); + } else { + return false; + } + } + } + } + true + } + + fn is_file_open_source(&self, file: &Arc, cx: &App) -> bool { + if !file.is_local() || file.is_private() { + return false; + } + self.license_detection_watchers + .get(&file.worktree_id(cx)) + .is_some_and(|watcher| watcher.is_project_open_source()) + } + + fn load_data_collection_choice() -> DataCollectionChoice { let choice = KEY_VALUE_STORE .read_kvp(ZED_PREDICT_DATA_COLLECTION_CHOICE) .log_err() @@ -970,6 +1026,17 @@ impl Zeta { None => DataCollectionChoice::NotAnswered, } } + + fn toggle_data_collection_choice(&mut self, cx: &mut Context) { + self.data_collection_choice = self.data_collection_choice.toggle(); + let new_choice = self.data_collection_choice; + db::write_and_log(cx, move || { + KEY_VALUE_STORE.write_kvp( + ZED_PREDICT_DATA_COLLECTION_CHOICE.into(), + new_choice.is_enabled().to_string(), + ) + }); + } } pub struct PerformPredictEditsParams { @@ -1026,48 +1093,19 @@ fn git_info_for_file( pub struct GatherContextOutput { pub body: PredictEditsBody, pub editable_range: Range, + pub included_events_count: usize, } pub fn gather_context( - project: &Entity, full_path_str: String, snapshot: &BufferSnapshot, cursor_point: language::Point, - make_events_prompt: impl FnOnce() -> String + Send + 'static, - can_collect_data: bool, - git_info: Option, + prompt_for_events: impl FnOnce() -> (String, usize) + Send + 'static, cx: &App, ) -> Task> { - let local_lsp_store = project.read(cx).lsp_store().read(cx).as_local(); - let diagnostic_groups: Vec<(String, serde_json::Value)> = - if can_collect_data && let Some(local_lsp_store) = local_lsp_store { - snapshot - .diagnostic_groups(None) - .into_iter() - .filter_map(|(language_server_id, diagnostic_group)| { - let language_server = - local_lsp_store.running_language_server_for_id(language_server_id)?; - let diagnostic_group = diagnostic_group.resolve::(snapshot); - let language_server_name = language_server.name().to_string(); - let serialized = serde_json::to_value(diagnostic_group).unwrap(); - Some((language_server_name, serialized)) - }) - .collect::>() - } else { - Vec::new() - }; - cx.background_spawn({ let snapshot = snapshot.clone(); async move { - let diagnostic_groups = if diagnostic_groups.is_empty() - || diagnostic_groups.len() >= MAX_DIAGNOSTIC_GROUPS - { - None - } else { - Some(diagnostic_groups) - }; - let input_excerpt = excerpt_for_cursor_position( cursor_point, &full_path_str, @@ -1075,15 +1113,15 @@ pub fn gather_context( MAX_REWRITE_TOKENS, MAX_CONTEXT_TOKENS, ); - let input_events = make_events_prompt(); + let (input_events, included_events_count) = prompt_for_events(); let editable_range = input_excerpt.editable_range.to_offset(&snapshot); let body = PredictEditsBody { input_events, input_excerpt: input_excerpt.prompt, - can_collect_data, - diagnostic_groups, - git_info, + can_collect_data: false, + diagnostic_groups: None, + git_info: None, outline: None, speculated_output: None, }; @@ -1091,18 +1129,19 @@ pub fn gather_context( Ok(GatherContextOutput { body, editable_range, + included_events_count, }) } }) } -fn prompt_for_events(events: &VecDeque, mut remaining_tokens: usize) -> String { +fn prompt_for_events_impl(events: &[Event], mut remaining_tokens: usize) -> (String, usize) { let mut result = String::new(); - for event in events.iter().rev() { + for (ix, event) in events.iter().rev().enumerate() { let event_string = event.to_prompt(); - let event_tokens = tokens_for_bytes(event_string.len()); + let event_tokens = guess_token_count(event_string.len()); if event_tokens > remaining_tokens { - break; + return (result, ix); } if !result.is_empty() { @@ -1111,7 +1150,7 @@ fn prompt_for_events(events: &VecDeque, mut remaining_tokens: usize) -> S result.insert_str(0, &event_string); remaining_tokens -= event_tokens; } - result + return (result, events.len()); } struct RegisteredBuffer { @@ -1222,6 +1261,7 @@ impl DataCollectionChoice { } } + #[must_use] pub fn toggle(&self) -> DataCollectionChoice { match self { Self::Enabled => Self::Disabled, @@ -1240,79 +1280,6 @@ impl From for DataCollectionChoice { } } -pub struct ProviderDataCollection { - /// When set to None, data collection is not possible in the provider buffer - choice: Option>, - license_detection_watcher: Option>, -} - -impl ProviderDataCollection { - pub fn new(zeta: Entity, buffer: Option>, cx: &mut App) -> Self { - let choice_and_watcher = buffer.and_then(|buffer| { - let file = buffer.read(cx).file()?; - - if !file.is_local() || file.is_private() { - return None; - } - - let zeta = zeta.read(cx); - let choice = zeta.data_collection_choice.clone(); - - let license_detection_watcher = zeta - .license_detection_watchers - .get(&file.worktree_id(cx)) - .cloned()?; - - Some((choice, license_detection_watcher)) - }); - - if let Some((choice, watcher)) = choice_and_watcher { - ProviderDataCollection { - choice: Some(choice), - license_detection_watcher: Some(watcher), - } - } else { - ProviderDataCollection { - choice: None, - license_detection_watcher: None, - } - } - } - - pub fn can_collect_data(&self, cx: &App) -> bool { - self.is_data_collection_enabled(cx) && self.is_project_open_source() - } - - pub fn is_data_collection_enabled(&self, cx: &App) -> bool { - self.choice - .as_ref() - .is_some_and(|choice| choice.read(cx).is_enabled()) - } - - fn is_project_open_source(&self) -> bool { - self.license_detection_watcher - .as_ref() - .is_some_and(|watcher| watcher.is_project_open_source()) - } - - pub fn toggle(&mut self, cx: &mut App) { - if let Some(choice) = self.choice.as_mut() { - let new_choice = choice.update(cx, |choice, _cx| { - let new_choice = choice.toggle(); - *choice = new_choice; - new_choice - }); - - db::write_and_log(cx, move || { - KEY_VALUE_STORE.write_kvp( - ZED_PREDICT_DATA_COLLECTION_CHOICE.into(), - new_choice.is_enabled().to_string(), - ) - }); - } - } -} - async fn llm_token_retry( llm_token: &LlmApiToken, client: &Arc, @@ -1343,24 +1310,23 @@ async fn llm_token_retry( pub struct ZetaEditPredictionProvider { zeta: Entity, + singleton_buffer: Option>, pending_completions: ArrayVec, next_pending_completion_id: usize, current_completion: Option, - /// None if this is entirely disabled for this provider - provider_data_collection: ProviderDataCollection, last_request_timestamp: Instant, } impl ZetaEditPredictionProvider { pub const THROTTLE_TIMEOUT: Duration = Duration::from_millis(300); - pub fn new(zeta: Entity, provider_data_collection: ProviderDataCollection) -> Self { + pub fn new(zeta: Entity, singleton_buffer: Option>) -> Self { Self { zeta, + singleton_buffer, pending_completions: ArrayVec::new(), next_pending_completion_id: 0, current_completion: None, - provider_data_collection, last_request_timestamp: Instant::now(), } } @@ -1384,21 +1350,29 @@ impl edit_prediction::EditPredictionProvider for ZetaEditPredictionProvider { } fn data_collection_state(&self, cx: &App) -> DataCollectionState { - let is_project_open_source = self.provider_data_collection.is_project_open_source(); - - if self.provider_data_collection.is_data_collection_enabled(cx) { - DataCollectionState::Enabled { - is_project_open_source, + if let Some(buffer) = &self.singleton_buffer + && let Some(file) = buffer.read(cx).file() + { + let is_project_open_source = self.zeta.read(cx).is_file_open_source(file, cx); + if self.zeta.read(cx).data_collection_choice.is_enabled() { + DataCollectionState::Enabled { + is_project_open_source, + } + } else { + DataCollectionState::Disabled { + is_project_open_source, + } } } else { - DataCollectionState::Disabled { - is_project_open_source, - } + return DataCollectionState::Disabled { + is_project_open_source: false, + }; } } fn toggle_data_collection(&mut self, cx: &mut App) { - self.provider_data_collection.toggle(cx); + self.zeta + .update(cx, |zeta, cx| zeta.toggle_data_collection_choice(cx)); } fn usage(&self, cx: &App) -> Option { @@ -1456,7 +1430,6 @@ impl edit_prediction::EditPredictionProvider for ZetaEditPredictionProvider { let pending_completion_id = self.next_pending_completion_id; self.next_pending_completion_id += 1; - let can_collect_data = self.provider_data_collection.can_collect_data(cx); let last_request_timestamp = self.last_request_timestamp; let task = cx.spawn(async move |this, cx| { @@ -1469,7 +1442,7 @@ impl edit_prediction::EditPredictionProvider for ZetaEditPredictionProvider { let completion_request = this.update(cx, |this, cx| { this.last_request_timestamp = Instant::now(); this.zeta.update(cx, |zeta, cx| { - zeta.request_completion(&project, &buffer, position, can_collect_data, cx) + zeta.request_completion(&project, &buffer, position, cx) }) }); @@ -1638,10 +1611,11 @@ impl edit_prediction::EditPredictionProvider for ZetaEditPredictionProvider { } } -fn tokens_for_bytes(bytes: usize) -> usize { - /// Typical number of string bytes per token for the purposes of limiting model input. This is - /// intentionally low to err on the side of underestimating limits. - const BYTES_PER_TOKEN_GUESS: usize = 3; +/// Typical number of string bytes per token for the purposes of limiting model input. This is +/// intentionally low to err on the side of underestimating limits. +const BYTES_PER_TOKEN_GUESS: usize = 3; + +fn guess_token_count(bytes: usize) -> usize { bytes / BYTES_PER_TOKEN_GUESS } @@ -1654,11 +1628,15 @@ mod tests { use http_client::FakeHttpClient; use indoc::indoc; use language::Point; + use parking_lot::Mutex; + use serde_json::json; use settings::SettingsStore; use util::path; use super::*; + const BSD_0_TXT: &str = include_str!("../license_examples/0bsd.txt"); + #[gpui::test] async fn test_edit_prediction_basic_interpolation(cx: &mut TestAppContext) { let buffer = cx.new(|cx| Buffer::local("Lorem ipsum dolor", cx)); @@ -1778,77 +1756,65 @@ mod tests { #[gpui::test] async fn test_clean_up_diff(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - client::init_settings(cx); - Project::init_settings(cx); - }); + init_test(cx); - let edits = edits_for_prediction( - indoc! {" - fn main() { - let word_1 = \"lorem\"; - let range = word.len()..word.len(); - } - "}, + assert_eq!( + apply_edit_prediction( + indoc! {" + fn main() { + let word_1 = \"lorem\"; + let range = word.len()..word.len(); + } + "}, + indoc! {" + <|editable_region_start|> + fn main() { + let word_1 = \"lorem\"; + let range = word_1.len()..word_1.len(); + } + + <|editable_region_end|> + "}, + cx, + ) + .await, indoc! {" - <|editable_region_start|> fn main() { let word_1 = \"lorem\"; let range = word_1.len()..word_1.len(); } - - <|editable_region_end|> "}, - cx, - ) - .await; - assert_eq!( - edits, - [ - (Point::new(2, 20)..Point::new(2, 20), "_1".to_string()), - (Point::new(2, 32)..Point::new(2, 32), "_1".to_string()), - ] ); - let edits = edits_for_prediction( - indoc! {" - fn main() { - let story = \"the quick\" - } - "}, + assert_eq!( + apply_edit_prediction( + indoc! {" + fn main() { + let story = \"the quick\" + } + "}, + indoc! {" + <|editable_region_start|> + fn main() { + let story = \"the quick brown fox jumps over the lazy dog\"; + } + + <|editable_region_end|> + "}, + cx, + ) + .await, indoc! {" - <|editable_region_start|> fn main() { let story = \"the quick brown fox jumps over the lazy dog\"; } - - <|editable_region_end|> "}, - cx, - ) - .await; - assert_eq!( - edits, - [ - ( - Point::new(1, 26)..Point::new(1, 26), - " brown fox jumps over the lazy dog".to_string() - ), - (Point::new(1, 27)..Point::new(1, 27), ";".to_string()), - ] ); } #[gpui::test] async fn test_edit_prediction_end_of_buffer(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - client::init_settings(cx); - Project::init_settings(cx); - }); + init_test(cx); let buffer_content = "lorem\n"; let completion_response = indoc! {" @@ -1860,98 +1826,404 @@ mod tests { <|editable_region_end|> ```"}; - let http_client = FakeHttpClient::create(move |req| async move { - match (req.method(), req.uri().path()) { - (&Method::POST, "/client/llm_tokens") => Ok(http_client::Response::builder() - .status(200) - .body( - serde_json::to_string(&CreateLlmTokenResponse { - token: LlmToken("the-llm-token".to_string()), - }) - .unwrap() - .into(), - ) - .unwrap()), - (&Method::POST, "/predict_edits/v2") => Ok(http_client::Response::builder() - .status(200) - .body( - serde_json::to_string(&PredictEditsResponse { - request_id: Uuid::parse_str("7e86480f-3536-4d2c-9334-8213e3445d45") - .unwrap(), - output_excerpt: completion_response.to_string(), - }) - .unwrap() - .into(), - ) - .unwrap()), - _ => Ok(http_client::Response::builder() - .status(404) - .body("Not Found".into()) - .unwrap()), - } + assert_eq!( + apply_edit_prediction(buffer_content, completion_response, cx).await, + "lorem\nipsum" + ); + } + + #[gpui::test] + async fn test_can_collect_data(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree(path!("/project"), json!({ "LICENSE": BSD_0_TXT })) + .await; + + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/project/src/main.rs"), cx) + }) + .await + .unwrap(); + + let (zeta, captured_request, _) = make_test_zeta(&project, cx).await; + zeta.update(cx, |zeta, _cx| { + zeta.data_collection_choice = DataCollectionChoice::Enabled }); - let client = cx.update(|cx| Client::new(Arc::new(FakeSystemClock::new()), http_client, cx)); - cx.update(|cx| { - RefreshLlmTokenListener::register(client.clone(), cx); + run_edit_prediction(&buffer, &project, &zeta, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + true + ); + + zeta.update(cx, |zeta, _cx| { + zeta.data_collection_choice = DataCollectionChoice::Disabled }); - // Construct the fake server to authenticate. - let _server = FakeServer::for_client(42, &client, cx).await; + + run_edit_prediction(&buffer, &project, &zeta, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + false + ); + } + + #[gpui::test] + async fn test_no_data_collection_for_remote_file(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + let project = Project::test(fs.clone(), [], cx).await; + + let buffer = cx.new(|_cx| { + Buffer::remote( + language::BufferId::new(1).unwrap(), + 1, + language::Capability::ReadWrite, + "fn main() {\n println!(\"Hello\");\n}", + ) + }); + + let (zeta, captured_request, _) = make_test_zeta(&project, cx).await; + zeta.update(cx, |zeta, _cx| { + zeta.data_collection_choice = DataCollectionChoice::Enabled + }); + + run_edit_prediction(&buffer, &project, &zeta, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + false + ); + } + + #[gpui::test] + async fn test_no_data_collection_for_private_file(cx: &mut TestAppContext) { + init_test(cx); + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/project"), + json!({ + "LICENSE": BSD_0_TXT, + ".env": "SECRET_KEY=secret" + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let buffer = cx.new(|cx| Buffer::local(buffer_content, cx)); - let cursor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(1, 0))); + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/project/.env", cx) + }) + .await + .unwrap(); - let zeta = cx.new(|cx| Zeta::new(client, project.read(cx).user_store(), cx)); - let completion_task = zeta.update(cx, |zeta, cx| { - zeta.request_completion(&project, &buffer, cursor, false, cx) + let (zeta, captured_request, _) = make_test_zeta(&project, cx).await; + zeta.update(cx, |zeta, _cx| { + zeta.data_collection_choice = DataCollectionChoice::Enabled + }); + + run_edit_prediction(&buffer, &project, &zeta, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + false + ); + } + + #[gpui::test] + async fn test_no_data_collection_for_untitled_buffer(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + let project = Project::test(fs.clone(), [], cx).await; + let buffer = cx.new(|cx| Buffer::local("", cx)); + + let (zeta, captured_request, _) = make_test_zeta(&project, cx).await; + zeta.update(cx, |zeta, _cx| { + zeta.data_collection_choice = DataCollectionChoice::Enabled + }); + + run_edit_prediction(&buffer, &project, &zeta, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + false + ); + } + + #[gpui::test] + async fn test_no_data_collection_when_closed_source(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree(path!("/project"), json!({ "main.rs": "fn main() {}" })) + .await; + + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer("/project/main.rs", cx) + }) + .await + .unwrap(); + + let (zeta, captured_request, _) = make_test_zeta(&project, cx).await; + zeta.update(cx, |zeta, _cx| { + zeta.data_collection_choice = DataCollectionChoice::Enabled + }); + + run_edit_prediction(&buffer, &project, &zeta, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + false + ); + } + + #[gpui::test] + async fn test_data_collection_status_changes_on_move(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/open_source_worktree"), + json!({ "LICENSE": BSD_0_TXT, "main.rs": "" }), + ) + .await; + fs.insert_tree(path!("/closed_source_worktree"), json!({ "main.rs": "" })) + .await; + + let project = Project::test( + fs.clone(), + [ + path!("/open_source_worktree").as_ref(), + path!("/closed_source_worktree").as_ref(), + ], + cx, + ) + .await; + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/open_source_worktree/main.rs"), cx) + }) + .await + .unwrap(); + + let (zeta, captured_request, _) = make_test_zeta(&project, cx).await; + zeta.update(cx, |zeta, _cx| { + zeta.data_collection_choice = DataCollectionChoice::Enabled }); - let completion = completion_task.await.unwrap().unwrap(); + run_edit_prediction(&buffer, &project, &zeta, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + true + ); + + let closed_source_file = project + .update(cx, |project, cx| { + let worktree2 = project + .worktree_for_root_name("closed_source_worktree", cx) + .unwrap(); + worktree2.update(cx, |worktree2, cx| { + worktree2.load_file(Path::new("main.rs"), cx) + }) + }) + .await + .unwrap() + .file; + buffer.update(cx, |buffer, cx| { - buffer.edit(completion.edits.iter().cloned(), None, cx) + buffer.file_updated(closed_source_file, cx); }); + + run_edit_prediction(&buffer, &project, &zeta, cx).await; assert_eq!( - buffer.read_with(cx, |buffer, _| buffer.text()), - "lorem\nipsum" + captured_request.lock().clone().unwrap().can_collect_data, + false ); } - async fn edits_for_prediction( + #[gpui::test] + async fn test_no_data_collection_for_events_in_uncollectable_buffers(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/worktree1"), + json!({ "LICENSE": BSD_0_TXT, "main.rs": "", "other.rs": "" }), + ) + .await; + fs.insert_tree(path!("/worktree2"), json!({ "private.rs": "" })) + .await; + + let project = Project::test( + fs.clone(), + [path!("/worktree1").as_ref(), path!("/worktree2").as_ref()], + cx, + ) + .await; + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/worktree1/main.rs"), cx) + }) + .await + .unwrap(); + let private_buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/worktree2/file.rs"), cx) + }) + .await + .unwrap(); + + let (zeta, captured_request, _) = make_test_zeta(&project, cx).await; + zeta.update(cx, |zeta, _cx| { + zeta.data_collection_choice = DataCollectionChoice::Enabled + }); + + run_edit_prediction(&buffer, &project, &zeta, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + true + ); + + // this has a side effect of registering the buffer to watch for edits + run_edit_prediction(&private_buffer, &project, &zeta, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + false + ); + + private_buffer.update(cx, |private_buffer, cx| { + private_buffer.edit([(0..0, "An edit for the history!")], None, cx); + }); + + run_edit_prediction(&buffer, &project, &zeta, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + false + ); + + // make an edit that uses too many bytes, causing private_buffer edit to not be able to be + // included + buffer.update(cx, |buffer, cx| { + buffer.edit( + [(0..0, " ".repeat(MAX_EVENT_TOKENS * BYTES_PER_TOKEN_GUESS))], + None, + cx, + ); + }); + + run_edit_prediction(&buffer, &project, &zeta, cx).await; + assert_eq!( + captured_request.lock().clone().unwrap().can_collect_data, + true + ); + } + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + language::init(cx); + client::init_settings(cx); + Project::init_settings(cx); + }); + } + + async fn apply_edit_prediction( buffer_content: &str, completion_response: &str, cx: &mut TestAppContext, - ) -> Vec<(Range, String)> { - let completion_response = completion_response.to_string(); - let http_client = FakeHttpClient::create(move |req| { - let completion = completion_response.clone(); - async move { - match (req.method(), req.uri().path()) { - (&Method::POST, "/client/llm_tokens") => Ok(http_client::Response::builder() - .status(200) - .body( - serde_json::to_string(&CreateLlmTokenResponse { - token: LlmToken("the-llm-token".to_string()), - }) - .unwrap() - .into(), - ) - .unwrap()), - (&Method::POST, "/predict_edits/v2") => Ok(http_client::Response::builder() - .status(200) - .body( - serde_json::to_string(&PredictEditsResponse { - request_id: Uuid::new_v4(), - output_excerpt: completion, - }) - .unwrap() - .into(), - ) - .unwrap()), - _ => Ok(http_client::Response::builder() - .status(404) - .body("Not Found".into()) - .unwrap()), + ) -> String { + let fs = project::FakeFs::new(cx.executor()); + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let buffer = cx.new(|cx| Buffer::local(buffer_content, cx)); + let (zeta, _, response) = make_test_zeta(&project, cx).await; + *response.lock() = completion_response.to_string(); + let edit_prediction = run_edit_prediction(&buffer, &project, &zeta, cx).await; + buffer.update(cx, |buffer, cx| { + buffer.edit(edit_prediction.edits.iter().cloned(), None, cx) + }); + buffer.read_with(cx, |buffer, _| buffer.text()) + } + + async fn run_edit_prediction( + buffer: &Entity, + project: &Entity, + zeta: &Entity, + cx: &mut TestAppContext, + ) -> EditPrediction { + let cursor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(1, 0))); + zeta.update(cx, |zeta, cx| zeta.register_buffer(buffer, &project, cx)); + cx.background_executor.run_until_parked(); + let completion_task = zeta.update(cx, |zeta, cx| { + zeta.request_completion(&project, buffer, cursor, cx) + }); + completion_task.await.unwrap().unwrap() + } + + async fn make_test_zeta( + project: &Entity, + cx: &mut TestAppContext, + ) -> ( + Entity, + Arc>>, + Arc>, + ) { + let default_response = indoc! {" + ```main.rs + <|start_of_file|> + <|editable_region_start|> + hello world + <|editable_region_end|> + ```" + }; + let captured_request: Arc>> = Arc::new(Mutex::new(None)); + let completion_response: Arc> = + Arc::new(Mutex::new(default_response.to_string())); + let http_client = FakeHttpClient::create({ + let captured_request = captured_request.clone(); + let completion_response = completion_response.clone(); + move |req| { + let captured_request = captured_request.clone(); + let completion_response = completion_response.clone(); + async move { + match (req.method(), req.uri().path()) { + (&Method::POST, "/client/llm_tokens") => { + Ok(http_client::Response::builder() + .status(200) + .body( + serde_json::to_string(&CreateLlmTokenResponse { + token: LlmToken("the-llm-token".to_string()), + }) + .unwrap() + .into(), + ) + .unwrap()) + } + (&Method::POST, "/predict_edits/v2") => { + let mut request_body = String::new(); + req.into_body().read_to_string(&mut request_body).await?; + *captured_request.lock() = + Some(serde_json::from_str(&request_body).unwrap()); + Ok(http_client::Response::builder() + .status(200) + .body( + serde_json::to_string(&PredictEditsResponse { + request_id: Uuid::new_v4(), + output_excerpt: completion_response.lock().clone(), + }) + .unwrap() + .into(), + ) + .unwrap()) + } + _ => Ok(http_client::Response::builder() + .status(404) + .body("Not Found".into()) + .unwrap()), + } } } }); @@ -1960,25 +2232,23 @@ mod tests { cx.update(|cx| { RefreshLlmTokenListener::register(client.clone(), cx); }); - // Construct the fake server to authenticate. let _server = FakeServer::for_client(42, &client, cx).await; - let fs = project::FakeFs::new(cx.executor()); - let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; - let buffer = cx.new(|cx| Buffer::local(buffer_content, cx)); - let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - let cursor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(1, 0))); - let zeta = cx.new(|cx| Zeta::new(client, project.read(cx).user_store(), cx)); - let completion_task = zeta.update(cx, |zeta, cx| { - zeta.request_completion(&project, &buffer, cursor, false, cx) + let zeta = cx.new(|cx| { + let mut zeta = Zeta::new(client, project.read(cx).user_store(), cx); + + let worktrees = project.read(cx).worktrees(cx).collect::>(); + for worktree in worktrees { + let worktree_id = worktree.read(cx).id(); + zeta.license_detection_watchers + .entry(worktree_id) + .or_insert_with(|| Rc::new(LicenseDetectionWatcher::new(&worktree, cx))); + } + + zeta }); - let completion = completion_task.await.unwrap().unwrap(); - completion - .edits - .iter() - .map(|(old_range, new_text)| (old_range.to_point(&snapshot), new_text.clone())) - .collect::>() + (zeta, captured_request, completion_response) } fn to_completion_edits( diff --git a/crates/zeta_cli/src/main.rs b/crates/zeta_cli/src/main.rs index e66eeed80920a0c31c5c06e119e17d418fbc294c..e7cec26b19358056cee4c8e253c54c0b2c794b33 100644 --- a/crates/zeta_cli/src/main.rs +++ b/crates/zeta_cli/src/main.rs @@ -189,30 +189,17 @@ async fn get_context( Some(events) => events.read_to_string().await?, None => String::new(), }; - // Enable gathering extra data not currently needed for edit predictions - let can_collect_data = true; - let git_info = None; - let mut gather_context_output = cx - .update(|cx| { - gather_context( - &project, - full_path_str, - &snapshot, - clipped_cursor, - move || events, - can_collect_data, - git_info, - cx, - ) - })? - .await; - - // Disable data collection for these requests, as this is currently just used for evals - if let Ok(gather_context_output) = gather_context_output.as_mut() { - gather_context_output.body.can_collect_data = false - } - - gather_context_output + let prompt_for_events = move || (events, 0); + cx.update(|cx| { + gather_context( + full_path_str, + &snapshot, + clipped_cursor, + prompt_for_events, + cx, + ) + })? + .await } pub async fn open_buffer_with_language_server( From 69bdef38ecef673609e1acf1e7a6d79f5d4d44d3 Mon Sep 17 00:00:00 2001 From: Liu Jinyi Date: Mon, 8 Sep 2025 02:33:17 +0800 Subject: [PATCH 061/109] editor: Fix inconsistent search behavior for untitled/temporary tabs (#37086) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #37597 Release Notes: - N/A --- ## Problem When using "Tab Switcher: Toggle All", temporary files (untitled buffers without associated file paths) cannot be searched by their displayed content. This creates an inconsistent user experience where: - **UI Display**: Shows dynamic titles based on the first line of content (up to 40 characters) - **Search Text**: Only searches for the static text "untitled" ### Example - A temporary file containing `Hello World` is displayed as "Hello World" in the tab - However, searching for "Hello" in Tab Switcher returns no results - Only searching for "untitled" will find this temporary file ## Root Cause The issue stems from inconsistent title generation logic between display and search: 1. **Display Title** (`items.rs:724`): Uses `self.title(cx)` → `MultiBuffer::title()` → `buffer_content_title()` - Returns the first line of content (max 40 chars) for temporary files 2. **Search Text** (`items.rs:650-656`): Uses `tab_content_text()` method - Returns hardcoded "untitled" for files without paths ## Solution Modified the `tab_content_text()` method in `crates/editor/src/items.rs` to use the same logic as the displayed title for consistency: ```rust fn tab_content_text(&self, detail: usize, cx: &App) -> SharedString { if let Some(path) = path_for_buffer(&self.buffer, detail, true, cx) { path.to_string_lossy().to_string().into() } else { // Use the same logic as the displayed title for consistency self.buffer.read(cx).title(cx).to_string().into() } } ``` --- crates/editor/src/items.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 8a07939cf47529d6a7d94b20bd22d7278b3e9d24..48c3a8a41a802392b7dc20d5b935bc7acc3bc10a 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -651,7 +651,8 @@ impl Item for Editor { if let Some(path) = path_for_buffer(&self.buffer, detail, true, cx) { path.to_string_lossy().to_string().into() } else { - "untitled".into() + // Use the same logic as the displayed title for consistency + self.buffer.read(cx).title(cx).to_string().into() } } From 9450bcad25156f373e3b81a7a13d3ae5211a0537 Mon Sep 17 00:00:00 2001 From: marius851000 Date: Mon, 8 Sep 2025 06:26:01 +0200 Subject: [PATCH 062/109] ollama: Properly format tool calls fed back to the model (#34750) Fix an issue that resulted in Ollama models not being able to not being able to access the input of the commands they executed (only being able to access the result). This properly return the function history as shown in https://github.com/ollama/ollama/blob/main/docs/api.md#chat-request-with-history-with-tools Previously, function input where not returned and result where returned as a "user" role. Release Notes: - ollama: Improved format when returning tool results to the models --- crates/language_models/src/provider/ollama.rs | 121 +++++++++++------- crates/ollama/src/ollama.rs | 4 + 2 files changed, 79 insertions(+), 46 deletions(-) diff --git a/crates/language_models/src/provider/ollama.rs b/crates/language_models/src/provider/ollama.rs index 8975115d907875569f63e4247cf7edcdbcb91f8a..a80cacfc4a02521af74b32c34cc3360e9665a7d9 100644 --- a/crates/language_models/src/provider/ollama.rs +++ b/crates/language_models/src/provider/ollama.rs @@ -11,8 +11,8 @@ use language_model::{ LanguageModelToolUseId, MessageContent, RateLimiter, Role, StopReason, TokenUsage, }; use ollama::{ - ChatMessage, ChatOptions, ChatRequest, ChatResponseDelta, KeepAlive, OllamaFunctionTool, - OllamaToolCall, get_models, show_model, stream_chat_completion, + ChatMessage, ChatOptions, ChatRequest, ChatResponseDelta, KeepAlive, OllamaFunctionCall, + OllamaFunctionTool, OllamaToolCall, get_models, show_model, stream_chat_completion, }; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -282,59 +282,85 @@ impl OllamaLanguageModel { fn to_ollama_request(&self, request: LanguageModelRequest) -> ChatRequest { let supports_vision = self.model.supports_vision.unwrap_or(false); - ChatRequest { - model: self.model.name.clone(), - messages: request - .messages - .into_iter() - .map(|msg| { - let images = if supports_vision { - msg.content - .iter() - .filter_map(|content| match content { - MessageContent::Image(image) => Some(image.source.to_string()), - _ => None, - }) - .collect::>() - } else { - vec![] - }; - - match msg.role { - Role::User => ChatMessage::User { + let mut messages = Vec::with_capacity(request.messages.len()); + + for mut msg in request.messages.into_iter() { + let images = if supports_vision { + msg.content + .iter() + .filter_map(|content| match content { + MessageContent::Image(image) => Some(image.source.to_string()), + _ => None, + }) + .collect::>() + } else { + vec![] + }; + + match msg.role { + Role::User => { + for tool_result in msg + .content + .extract_if(.., |x| matches!(x, MessageContent::ToolResult(..))) + { + match tool_result { + MessageContent::ToolResult(tool_result) => { + messages.push(ChatMessage::Tool { + tool_name: tool_result.tool_name.to_string(), + content: tool_result.content.to_str().unwrap_or("").to_string(), + }) + } + _ => unreachable!("Only tool result should be extracted"), + } + } + if !msg.content.is_empty() { + messages.push(ChatMessage::User { content: msg.string_contents(), images: if images.is_empty() { None } else { Some(images) }, - }, - Role::Assistant => { - let content = msg.string_contents(); - let thinking = - msg.content.into_iter().find_map(|content| match content { - MessageContent::Thinking { text, .. } if !text.is_empty() => { - Some(text) - } - _ => None, - }); - ChatMessage::Assistant { - content, - tool_calls: None, - images: if images.is_empty() { - None - } else { - Some(images) - }, - thinking, + }) + } + } + Role::Assistant => { + let content = msg.string_contents(); + let mut thinking = None; + let mut tool_calls = Vec::new(); + for content in msg.content.into_iter() { + match content { + MessageContent::Thinking { text, .. } if !text.is_empty() => { + thinking = Some(text) } + MessageContent::ToolUse(tool_use) => { + tool_calls.push(OllamaToolCall::Function(OllamaFunctionCall { + name: tool_use.name.to_string(), + arguments: tool_use.input, + })); + } + _ => (), } - Role::System => ChatMessage::System { - content: msg.string_contents(), - }, } - }) - .collect(), + messages.push(ChatMessage::Assistant { + content, + tool_calls: Some(tool_calls), + images: if images.is_empty() { + None + } else { + Some(images) + }, + thinking, + }) + } + Role::System => messages.push(ChatMessage::System { + content: msg.string_contents(), + }), + } + } + ChatRequest { + model: self.model.name.clone(), + messages, keep_alive: self.model.keep_alive.clone().unwrap_or_default(), stream: true, options: Some(ChatOptions { @@ -483,6 +509,9 @@ fn map_to_language_model_completion_events( ChatMessage::System { content } => { events.push(Ok(LanguageModelCompletionEvent::Text(content))); } + ChatMessage::Tool { content, .. } => { + events.push(Ok(LanguageModelCompletionEvent::Text(content))); + } ChatMessage::Assistant { content, tool_calls, diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index 64cd1cc0cbc06607ee9b3b72ee81cbeb9489c344..3c935d2152556393829f648abe31a717b239ce76 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -117,6 +117,10 @@ pub enum ChatMessage { System { content: String, }, + Tool { + tool_name: String, + content: String, + }, } #[derive(Serialize, Deserialize, Debug)] From b35959f4c2e5b1550a32ce8fc0dff57415eb11bc Mon Sep 17 00:00:00 2001 From: Umesh Yadav <23421535+imumesh18@users.noreply.github.com> Date: Mon, 8 Sep 2025 10:20:26 +0530 Subject: [PATCH 063/109] agent_ui: Fix `context_server` duplication when name is updated (#35403) Closes #35400 | Before | After | |--------|--------| |