diff --git a/Cargo.lock b/Cargo.lock index ffc66d93a070deb136c59465362a4c761165a449..609edba3ec57bbba341799afbfecbcbf22b1181a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -892,6 +892,7 @@ dependencies = [ "serde", "serde_json", "ui", + "util", "workspace", "workspace-hack", ] @@ -12094,7 +12095,6 @@ dependencies = [ "markdown", "node_runtime", "parking_lot", - "pathdiff", "paths", "postage", "prettier", @@ -12147,7 +12147,6 @@ dependencies = [ "git", "git_ui", "gpui", - "indexmap 2.9.0", "language", "menu", "pretty_assertions", diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index f2327ca70b104de12f44d74aacd1a5a2bb1eca3b..b63e9a2a8403a3e271f2d1f9e07ba3dddfb3cd53 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -573,7 +573,7 @@ impl ToolCallContent { ))), acp::ToolCallContent::Diff { diff } => Ok(Self::Diff(cx.new(|cx| { Diff::finalized( - diff.path, + diff.path.to_string_lossy().to_string(), diff.old_text, diff.new_text, language_registry, diff --git a/crates/acp_thread/src/diff.rs b/crates/acp_thread/src/diff.rs index f75af0543e373b47b0c6de36760ba18b5d9da318..753f157af934d6c92fcd6766aa645ec45c6b22f7 100644 --- a/crates/acp_thread/src/diff.rs +++ b/crates/acp_thread/src/diff.rs @@ -6,12 +6,7 @@ use itertools::Itertools; use language::{ Anchor, Buffer, Capability, LanguageRegistry, OffsetRangeExt as _, Point, Rope, TextBuffer, }; -use std::{ - cmp::Reverse, - ops::Range, - path::{Path, PathBuf}, - sync::Arc, -}; +use std::{cmp::Reverse, ops::Range, path::Path, sync::Arc}; use util::ResultExt; pub enum Diff { @@ -21,7 +16,7 @@ pub enum Diff { impl Diff { pub fn finalized( - path: PathBuf, + path: String, old_text: Option, new_text: String, language_registry: Arc, @@ -36,7 +31,7 @@ impl Diff { let buffer = new_buffer.clone(); async move |_, cx| { let language = language_registry - .language_for_file_path(&path) + .language_for_file_path(Path::new(&path)) .await .log_err(); @@ -152,12 +147,15 @@ impl Diff { let path = match self { Diff::Pending(PendingDiff { new_buffer: buffer, .. - }) => buffer.read(cx).file().map(|file| file.path().as_ref()), - Diff::Finalized(FinalizedDiff { path, .. }) => Some(path.as_path()), + }) => buffer + .read(cx) + .file() + .map(|file| file.path().display(file.path_style(cx))), + Diff::Finalized(FinalizedDiff { path, .. }) => Some(path.as_str().into()), }; format!( "Diff: {}\n```\n{}\n```\n", - path.unwrap_or(Path::new("untitled")).display(), + path.unwrap_or("untitled".into()), buffer_text ) } @@ -244,8 +242,8 @@ impl PendingDiff { .new_buffer .read(cx) .file() - .map(|file| file.path().as_ref()) - .unwrap_or(Path::new("untitled")) + .map(|file| file.path().display(file.path_style(cx))) + .unwrap_or("untitled".into()) .into(); // Replace the buffer in the multibuffer with the snapshot @@ -348,7 +346,7 @@ impl PendingDiff { } pub struct FinalizedDiff { - path: PathBuf, + path: String, base_text: Arc, new_buffer: Entity, multibuffer: Entity, diff --git a/crates/action_log/src/action_log.rs b/crates/action_log/src/action_log.rs index 11ba596ac5a0ecd4ed49744d0eafa9defcde20c1..b7722f211afda3a77bc96292a50acf869e7424d6 100644 --- a/crates/action_log/src/action_log.rs +++ b/crates/action_log/src/action_log.rs @@ -8,10 +8,7 @@ use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint}; use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle}; use std::{cmp, ops::Range, sync::Arc}; use text::{Edit, Patch, Rope}; -use util::{ - RangeExt, ResultExt as _, - paths::{PathStyle, RemotePathBuf}, -}; +use util::{RangeExt, ResultExt as _}; /// Tracks actions performed by tools in a thread pub struct ActionLog { @@ -62,7 +59,13 @@ impl ActionLog { let file_path = buffer .read(cx) .file() - .map(|file| RemotePathBuf::new(file.full_path(cx), PathStyle::Posix).to_proto()) + .map(|file| { + let mut path = file.full_path(cx).to_string_lossy().into_owned(); + if file.path_style(cx).is_windows() { + path = path.replace('\\', "/"); + } + path + }) .unwrap_or_else(|| format!("buffer_{}", buffer.entity_id())); let mut result = String::new(); @@ -2301,7 +2304,7 @@ mod tests { .await; fs.set_head_for_repo( path!("/project/.git").as_ref(), - &[("file.txt".into(), "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())], + &[("file.txt", "a\nb\nc\nd\ne\nf\ng\nh\ni\nj".into())], "0000000", ); cx.run_until_parked(); @@ -2384,7 +2387,7 @@ mod tests { // - Ignores the last line edit (j stays as j) fs.set_head_for_repo( path!("/project/.git").as_ref(), - &[("file.txt".into(), "A\nb\nc\nf\nG\nh\ni\nj".into())], + &[("file.txt", "A\nb\nc\nf\nG\nh\ni\nj".into())], "0000001", ); cx.run_until_parked(); @@ -2415,10 +2418,7 @@ mod tests { // Make another commit that accepts the NEW line but with different content fs.set_head_for_repo( path!("/project/.git").as_ref(), - &[( - "file.txt".into(), - "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into(), - )], + &[("file.txt", "A\nb\nc\nf\nGGG\nh\nDIFFERENT\ni\nj".into())], "0000002", ); cx.run_until_parked(); @@ -2444,7 +2444,7 @@ mod tests { // Final commit that accepts all remaining edits fs.set_head_for_repo( path!("/project/.git").as_ref(), - &[("file.txt".into(), "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())], + &[("file.txt", "A\nb\nc\nf\nGGG\nh\nNEW\ni\nJ".into())], "0000003", ); cx.run_until_parked(); diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index 7e3590f05df18d258fae91fd8aa596c07c5fb516..9cd2a93d9bfc9a8a1940fea150f651b60f1a1073 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -9,12 +9,14 @@ pub mod tool_use; pub use context::{AgentContext, ContextId, ContextLoadResult}; pub use context_store::ContextStore; +use fs::Fs; +use std::sync::Arc; pub use thread::{ LastRestoreCheckpoint, Message, MessageCrease, MessageId, MessageSegment, Thread, ThreadError, ThreadEvent, ThreadFeedback, ThreadId, ThreadSummary, TokenUsageRatio, }; pub use thread_store::{SerializedThread, TextThreadStore, ThreadStore}; -pub fn init(cx: &mut gpui::App) { - thread_store::init(cx); +pub fn init(fs: Arc, cx: &mut gpui::App) { + thread_store::init(fs, cx); } diff --git a/crates/agent/src/context.rs b/crates/agent/src/context.rs index 4510b0d3d3548b3ff807a3e549a9f2dc53951452..fdcf0316a04eb493fb03c14d2c5891fb86ab72c5 100644 --- a/crates/agent/src/context.rs +++ b/crates/agent/src/context.rs @@ -18,6 +18,7 @@ use std::path::PathBuf; use std::{ops::Range, path::Path, sync::Arc}; use text::{Anchor, OffsetRangeExt as _}; use util::markdown::MarkdownCodeBlock; +use util::rel_path::RelPath; use util::{ResultExt as _, post_inc}; pub const RULES_ICON: IconName = IconName::Reader; @@ -242,7 +243,7 @@ pub struct DirectoryContext { #[derive(Debug, Clone)] pub struct DirectoryContextDescendant { /// Path within the directory. - pub rel_path: Arc, + pub rel_path: Arc, pub fenced_codeblock: SharedString, } @@ -968,7 +969,7 @@ pub fn load_context( }) } -fn collect_files_in_path(worktree: &Worktree, path: &Path) -> Vec> { +fn collect_files_in_path(worktree: &Worktree, path: &RelPath) -> Vec> { let mut files = Vec::new(); for entry in worktree.child_entries(path) { diff --git a/crates/agent/src/context_store.rs b/crates/agent/src/context_store.rs index b531852a184ffeaf86862990f03210ceb6033395..9b9653700005306db07e38e6b74a9d1a585fd2b2 100644 --- a/crates/agent/src/context_store.rs +++ b/crates/agent/src/context_store.rs @@ -14,7 +14,10 @@ use futures::{self, FutureExt}; use gpui::{App, Context, Entity, EventEmitter, Image, SharedString, Task, WeakEntity}; use language::{Buffer, File as _}; use language_model::LanguageModelImage; -use project::{Project, ProjectItem, ProjectPath, Symbol, image_store::is_image_file}; +use project::{ + Project, ProjectItem, ProjectPath, Symbol, image_store::is_image_file, + lsp_store::SymbolLocation, +}; use prompt_store::UserPromptId; use ref_cast::RefCast as _; use std::{ @@ -500,7 +503,7 @@ impl ContextStore { let Some(context_path) = buffer.project_path(cx) else { return false; }; - if context_path != symbol.path { + if symbol.path != SymbolLocation::InProject(context_path) { return false; } let context_range = context.range.to_point_utf16(&buffer.snapshot()); diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 8b9d489ccf472ca16435934e48a12b70dc783c40..0790e8b2894dc5495cd46585a315696118afd33a 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -234,7 +234,6 @@ impl MessageSegment { #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub struct ProjectSnapshot { pub worktree_snapshots: Vec, - pub unsaved_buffer_paths: Vec, pub timestamp: DateTime, } @@ -2857,27 +2856,11 @@ impl Thread { .map(|worktree| Self::worktree_snapshot(worktree, git_store.clone(), cx)) .collect(); - cx.spawn(async move |_, cx| { + cx.spawn(async move |_, _| { let worktree_snapshots = futures::future::join_all(worktree_snapshots).await; - let mut unsaved_buffers = Vec::new(); - cx.update(|app_cx| { - let buffer_store = project.read(app_cx).buffer_store(); - for buffer_handle in buffer_store.read(app_cx).buffers() { - let buffer = buffer_handle.read(app_cx); - if buffer.is_dirty() - && let Some(file) = buffer.file() - { - let path = file.path().to_string_lossy().to_string(); - unsaved_buffers.push(path); - } - } - }) - .ok(); - Arc::new(ProjectSnapshot { worktree_snapshots, - unsaved_buffer_paths: unsaved_buffers, timestamp: Utc::now(), }) }) @@ -3275,6 +3258,7 @@ mod tests { use agent_settings::{AgentProfileId, AgentSettings}; use assistant_tool::ToolRegistry; use assistant_tools; + use fs::Fs; use futures::StreamExt; use futures::future::BoxFuture; use futures::stream::BoxStream; @@ -3298,9 +3282,10 @@ mod tests { #[gpui::test] async fn test_message_with_context(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); let project = create_test_project( + &fs, cx, json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}), ) @@ -3375,9 +3360,10 @@ fn main() {{ #[gpui::test] async fn test_only_include_new_contexts(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); let project = create_test_project( + &fs, cx, json!({ "file1.rs": "fn function1() {}\n", @@ -3531,9 +3517,10 @@ fn main() {{ #[gpui::test] async fn test_message_without_files(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); let project = create_test_project( + &fs, cx, json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}), ) @@ -3610,9 +3597,10 @@ fn main() {{ #[gpui::test] #[ignore] // turn this test on when project_notifications tool is re-enabled async fn test_stale_buffer_notification(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); let project = create_test_project( + &fs, cx, json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}), ) @@ -3738,9 +3726,10 @@ fn main() {{ #[gpui::test] async fn test_storing_profile_setting_per_thread(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); let project = create_test_project( + &fs, cx, json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}), ) @@ -3760,9 +3749,10 @@ fn main() {{ #[gpui::test] async fn test_serializing_thread_profile(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); let project = create_test_project( + &fs, cx, json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}), ) @@ -3803,9 +3793,10 @@ fn main() {{ #[gpui::test] async fn test_temperature_setting(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); let project = create_test_project( + &fs, cx, json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}), ) @@ -3897,9 +3888,9 @@ fn main() {{ #[gpui::test] async fn test_thread_summary(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); - let project = create_test_project(cx, json!({})).await; + let project = create_test_project(&fs, cx, json!({})).await; let (_, _thread_store, thread, _context_store, model) = setup_test_environment(cx, project.clone()).await; @@ -3982,9 +3973,9 @@ fn main() {{ #[gpui::test] async fn test_thread_summary_error_set_manually(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); - let project = create_test_project(cx, json!({})).await; + let project = create_test_project(&fs, cx, json!({})).await; let (_, _thread_store, thread, _context_store, model) = setup_test_environment(cx, project.clone()).await; @@ -4004,9 +3995,9 @@ fn main() {{ #[gpui::test] async fn test_thread_summary_error_retry(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); - let project = create_test_project(cx, json!({})).await; + let project = create_test_project(&fs, cx, json!({})).await; let (_, _thread_store, thread, _context_store, model) = setup_test_environment(cx, project.clone()).await; @@ -4158,9 +4149,9 @@ fn main() {{ #[gpui::test] async fn test_retry_on_overloaded_error(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); - let project = create_test_project(cx, json!({})).await; + let project = create_test_project(&fs, cx, json!({})).await; let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await; // Enable Burn Mode to allow retries @@ -4236,9 +4227,9 @@ fn main() {{ #[gpui::test] async fn test_retry_on_internal_server_error(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); - let project = create_test_project(cx, json!({})).await; + let project = create_test_project(&fs, cx, json!({})).await; let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await; // Enable Burn Mode to allow retries @@ -4318,9 +4309,9 @@ fn main() {{ #[gpui::test] async fn test_exponential_backoff_on_retries(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); - let project = create_test_project(cx, json!({})).await; + let project = create_test_project(&fs, cx, json!({})).await; let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await; // Enable Burn Mode to allow retries @@ -4438,9 +4429,9 @@ fn main() {{ #[gpui::test] async fn test_max_retries_exceeded(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); - let project = create_test_project(cx, json!({})).await; + let project = create_test_project(&fs, cx, json!({})).await; let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await; // Enable Burn Mode to allow retries @@ -4529,9 +4520,9 @@ fn main() {{ #[gpui::test] async fn test_retry_message_removed_on_retry(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); - let project = create_test_project(cx, json!({})).await; + let project = create_test_project(&fs, cx, json!({})).await; let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await; // Enable Burn Mode to allow retries @@ -4702,9 +4693,9 @@ fn main() {{ #[gpui::test] async fn test_successful_completion_clears_retry_state(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); - let project = create_test_project(cx, json!({})).await; + let project = create_test_project(&fs, cx, json!({})).await; let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await; // Enable Burn Mode to allow retries @@ -4868,9 +4859,9 @@ fn main() {{ #[gpui::test] async fn test_rate_limit_retry_single_attempt(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); - let project = create_test_project(cx, json!({})).await; + let project = create_test_project(&fs, cx, json!({})).await; let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await; // Enable Burn Mode to allow retries @@ -5053,9 +5044,9 @@ fn main() {{ #[gpui::test] async fn test_ui_only_messages_not_sent_to_model(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); - let project = create_test_project(cx, json!({})).await; + let project = create_test_project(&fs, cx, json!({})).await; let (_, _, thread, _, model) = setup_test_environment(cx, project.clone()).await; // Insert a regular user message @@ -5153,9 +5144,9 @@ fn main() {{ #[gpui::test] async fn test_no_retry_without_burn_mode(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); - let project = create_test_project(cx, json!({})).await; + let project = create_test_project(&fs, cx, json!({})).await; let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await; // Ensure we're in Normal mode (not Burn mode) @@ -5226,9 +5217,9 @@ fn main() {{ #[gpui::test] async fn test_retry_canceled_on_stop(cx: &mut TestAppContext) { - init_test_settings(cx); + let fs = init_test_settings(cx); - let project = create_test_project(cx, json!({})).await; + let project = create_test_project(&fs, cx, json!({})).await; let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await; // Enable Burn Mode to allow retries @@ -5334,7 +5325,8 @@ fn main() {{ cx.run_until_parked(); } - fn init_test_settings(cx: &mut TestAppContext) { + fn init_test_settings(cx: &mut TestAppContext) -> Arc { + let fs = FakeFs::new(cx.executor()); cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); @@ -5342,7 +5334,7 @@ fn main() {{ Project::init_settings(cx); AgentSettings::register(cx); prompt_store::init(cx); - thread_store::init(cx); + thread_store::init(fs.clone(), cx); workspace::init_settings(cx); language_model::init_settings(cx); ThemeSettings::register(cx); @@ -5356,16 +5348,17 @@ fn main() {{ )); assistant_tools::init(http_client, cx); }); + fs } // Helper to create a test project with test files async fn create_test_project( + fs: &Arc, cx: &mut TestAppContext, files: serde_json::Value, ) -> Entity { - let fs = FakeFs::new(cx.executor()); - fs.insert_tree(path!("/test"), files).await; - Project::test(fs, [path!("/test").as_ref()], cx).await + fs.as_fake().insert_tree(path!("/test"), files).await; + Project::test(fs.clone(), [path!("/test").as_ref()], cx).await } async fn setup_test_environment( diff --git a/crates/agent/src/thread_store.rs b/crates/agent/src/thread_store.rs index 2eae758b835d5d79ccf86f18be032f2d9bb87c2b..fe73b959b7fbe96f139a2b8267748c78cce88e2e 100644 --- a/crates/agent/src/thread_store.rs +++ b/crates/agent/src/thread_store.rs @@ -10,6 +10,7 @@ use assistant_tool::{Tool, ToolId, ToolWorkingSet}; use chrono::{DateTime, Utc}; use collections::HashMap; use context_server::ContextServerId; +use fs::{Fs, RemoveOptions}; use futures::{ FutureExt as _, StreamExt as _, channel::{mpsc, oneshot}, @@ -39,7 +40,7 @@ use std::{ rc::Rc, sync::{Arc, Mutex}, }; -use util::ResultExt as _; +use util::{ResultExt as _, rel_path::RelPath}; use zed_env_vars::ZED_STATELESS; @@ -85,8 +86,8 @@ const RULES_FILE_NAMES: [&str; 9] = [ "GEMINI.md", ]; -pub fn init(cx: &mut App) { - ThreadsDatabase::init(cx); +pub fn init(fs: Arc, cx: &mut App) { + ThreadsDatabase::init(fs, cx); } /// A system prompt shared by all threads created by this ThreadStore @@ -234,7 +235,7 @@ impl ThreadStore { if items.iter().any(|(path, _, _)| { RULES_FILE_NAMES .iter() - .any(|name| path.as_ref() == Path::new(name)) + .any(|name| path.as_ref() == RelPath::new(name).unwrap()) }) { self.enqueue_system_prompt_reload(); } @@ -327,7 +328,7 @@ impl ThreadStore { cx: &mut App, ) -> Task<(WorktreeContext, Option)> { let tree = worktree.read(cx); - let root_name = tree.root_name().into(); + let root_name = tree.root_name_str().into(); let abs_path = tree.abs_path(); let mut context = WorktreeContext { @@ -367,7 +368,7 @@ impl ThreadStore { .into_iter() .filter_map(|name| { worktree - .entry_for_path(name) + .entry_for_path(RelPath::new(name).unwrap()) .filter(|entry| entry.is_file()) .map(|entry| entry.path.clone()) }) @@ -869,13 +870,13 @@ impl ThreadsDatabase { GlobalThreadsDatabase::global(cx).0.clone() } - fn init(cx: &mut App) { + fn init(fs: Arc, cx: &mut App) { let executor = cx.background_executor().clone(); let database_future = executor .spawn({ let executor = executor.clone(); let threads_dir = paths::data_dir().join("threads"); - async move { ThreadsDatabase::new(threads_dir, executor) } + async move { ThreadsDatabase::new(fs, threads_dir, executor).await } }) .then(|result| future::ready(result.map(Arc::new).map_err(Arc::new))) .boxed() @@ -884,13 +885,17 @@ impl ThreadsDatabase { cx.set_global(GlobalThreadsDatabase(database_future)); } - pub fn new(threads_dir: PathBuf, executor: BackgroundExecutor) -> Result { - std::fs::create_dir_all(&threads_dir)?; + pub async fn new( + fs: Arc, + threads_dir: PathBuf, + executor: BackgroundExecutor, + ) -> Result { + fs.create_dir(&threads_dir).await?; let sqlite_path = threads_dir.join("threads.db"); let mdb_path = threads_dir.join("threads-db.1.mdb"); - let needs_migration_from_heed = mdb_path.exists(); + let needs_migration_from_heed = fs.is_file(&mdb_path).await; let connection = if *ZED_STATELESS { Connection::open_memory(Some("THREAD_FALLBACK_DB")) @@ -932,7 +937,14 @@ impl ThreadsDatabase { .spawn(async move { log::info!("Starting threads.db migration"); Self::migrate_from_heed(&mdb_path, db_connection, executor_clone)?; - std::fs::remove_dir_all(mdb_path)?; + fs.remove_dir( + &mdb_path, + RemoveOptions { + recursive: true, + ignore_if_not_exists: true, + }, + ) + .await?; log::info!("threads.db migrated to sqlite"); Ok::<(), anyhow::Error>(()) }) diff --git a/crates/agent2/src/agent.rs b/crates/agent2/src/agent.rs index 36ab1be9ef79221b530258c4fdd55be2ac1e8b29..f3324da448b40f6197b529f8915117df4ae6030e 100644 --- a/crates/agent2/src/agent.rs +++ b/crates/agent2/src/agent.rs @@ -27,6 +27,7 @@ use std::path::{Path, PathBuf}; use std::rc::Rc; use std::sync::Arc; use util::ResultExt; +use util::rel_path::RelPath; const RULES_FILE_NAMES: [&str; 9] = [ ".rules", @@ -434,7 +435,7 @@ impl NativeAgent { cx: &mut App, ) -> Task<(WorktreeContext, Option)> { let tree = worktree.read(cx); - let root_name = tree.root_name().into(); + let root_name = tree.root_name_str().into(); let abs_path = tree.abs_path(); let mut context = WorktreeContext { @@ -474,7 +475,7 @@ impl NativeAgent { .into_iter() .filter_map(|name| { worktree - .entry_for_path(name) + .entry_for_path(RelPath::new(name).unwrap()) .filter(|entry| entry.is_file()) .map(|entry| entry.path.clone()) }) @@ -558,7 +559,7 @@ impl NativeAgent { if items.iter().any(|(path, _, _)| { RULES_FILE_NAMES .iter() - .any(|name| path.as_ref() == Path::new(name)) + .any(|name| path.as_ref() == RelPath::new(name).unwrap()) }) { self.project_context_needs_refresh.send(()).ok(); } @@ -1208,7 +1209,7 @@ mod tests { use language_model::fake_provider::FakeLanguageModel; use serde_json::json; use settings::SettingsStore; - use util::path; + use util::{path, rel_path::rel_path}; #[gpui::test] async fn test_maintaining_project_context(cx: &mut TestAppContext) { @@ -1258,14 +1259,17 @@ mod tests { fs.insert_file("/a/.rules", Vec::new()).await; cx.run_until_parked(); agent.read_with(cx, |agent, cx| { - let rules_entry = worktree.read(cx).entry_for_path(".rules").unwrap(); + let rules_entry = worktree + .read(cx) + .entry_for_path(rel_path(".rules")) + .unwrap(); assert_eq!( agent.project_context.read(cx).worktrees, vec![WorktreeContext { root_name: "a".into(), abs_path: Path::new("/a").into(), rules_file: Some(RulesFileContext { - path_in_worktree: Path::new(".rules").into(), + path_in_worktree: rel_path(".rules").into(), text: "".into(), project_entry_id: rules_entry.id.to_usize() }) diff --git a/crates/agent2/src/db.rs b/crates/agent2/src/db.rs index 3be37bbb55a9b6820b59245ba05143e3432ab397..563ccdd7ca5b2c2cc63a8c7f30c59b9443f8a0bd 100644 --- a/crates/agent2/src/db.rs +++ b/crates/agent2/src/db.rs @@ -422,17 +422,15 @@ mod tests { use agent::MessageSegment; use agent::context::LoadedContext; use client::Client; - use fs::FakeFs; + use fs::{FakeFs, Fs}; use gpui::AppContext; use gpui::TestAppContext; use http_client::FakeHttpClient; use language_model::Role; use project::Project; - use serde_json::json; use settings::SettingsStore; - use util::test::TempTree; - fn init_test(cx: &mut TestAppContext) { + fn init_test(fs: Arc, cx: &mut TestAppContext) { env_logger::try_init().ok(); cx.update(|cx| { let settings_store = SettingsStore::test(cx); @@ -443,7 +441,7 @@ mod tests { let http_client = FakeHttpClient::with_404_response(); let clock = Arc::new(clock::FakeSystemClock::new()); let client = Client::new(clock, http_client, cx); - agent::init(cx); + agent::init(fs, cx); agent_settings::init(cx); language_model::init(client, cx); }); @@ -451,10 +449,8 @@ mod tests { #[gpui::test] async fn test_retrieving_old_thread(cx: &mut TestAppContext) { - let tree = TempTree::new(json!({})); - util::paths::set_home_dir(tree.path().into()); - init_test(cx); let fs = FakeFs::new(cx.executor()); + init_test(fs.clone(), cx); let project = Project::test(fs, [], cx).await; // Save a thread using the old agent. diff --git a/crates/agent2/src/thread.rs b/crates/agent2/src/thread.rs index 18f993cbe33ca8bffc2235906baf76c627da0030..630b4f906904df2410716f35ef7d15aa0f706e8b 100644 --- a/crates/agent2/src/thread.rs +++ b/crates/agent2/src/thread.rs @@ -879,27 +879,11 @@ impl Thread { .map(|worktree| Self::worktree_snapshot(worktree, git_store.clone(), cx)) .collect(); - cx.spawn(async move |_, cx| { + cx.spawn(async move |_, _| { let worktree_snapshots = futures::future::join_all(worktree_snapshots).await; - let mut unsaved_buffers = Vec::new(); - cx.update(|app_cx| { - let buffer_store = project.read(app_cx).buffer_store(); - for buffer_handle in buffer_store.read(app_cx).buffers() { - let buffer = buffer_handle.read(app_cx); - if buffer.is_dirty() - && let Some(file) = buffer.file() - { - let path = file.path().to_string_lossy().to_string(); - unsaved_buffers.push(path); - } - } - }) - .ok(); - Arc::new(ProjectSnapshot { worktree_snapshots, - unsaved_buffer_paths: unsaved_buffers, timestamp: Utc::now(), }) }) diff --git a/crates/agent2/src/tools/copy_path_tool.rs b/crates/agent2/src/tools/copy_path_tool.rs index 905c090883192064782be62905c991393249f911..236978c78f0c2fee7ecf611486349bab094b3cec 100644 --- a/crates/agent2/src/tools/copy_path_tool.rs +++ b/crates/agent2/src/tools/copy_path_tool.rs @@ -84,9 +84,7 @@ impl AgentTool for CopyPathTool { .and_then(|project_path| project.entry_for_path(&project_path, cx)) { Some(entity) => match project.find_project_path(&input.destination_path, cx) { - Some(project_path) => { - project.copy_entry(entity.id, None, project_path.path, cx) - } + Some(project_path) => project.copy_entry(entity.id, project_path, cx), None => Task::ready(Err(anyhow!( "Destination path {} was outside the project.", input.destination_path diff --git a/crates/agent2/src/tools/diagnostics_tool.rs b/crates/agent2/src/tools/diagnostics_tool.rs index a38e317d43cb16d8ee652f1a5f7aabd8b1ce4c8f..f07ec4cfe6903ec454eb39a7afc7748327e026ec 100644 --- a/crates/agent2/src/tools/diagnostics_tool.rs +++ b/crates/agent2/src/tools/diagnostics_tool.rs @@ -6,7 +6,7 @@ use language::{DiagnosticSeverity, OffsetRangeExt}; use project::Project; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use std::{fmt::Write, path::Path, sync::Arc}; +use std::{fmt::Write, sync::Arc}; use ui::SharedString; use util::markdown::MarkdownInlineCode; @@ -147,9 +147,7 @@ impl AgentTool for DiagnosticsTool { has_diagnostics = true; output.push_str(&format!( "{}: {} error(s), {} warning(s)\n", - Path::new(worktree.read(cx).root_name()) - .join(project_path.path) - .display(), + worktree.read(cx).absolutize(&project_path.path).display(), summary.error_count, summary.warning_count )); diff --git a/crates/agent2/src/tools/edit_file_tool.rs b/crates/agent2/src/tools/edit_file_tool.rs index 81f340b0b5c83648b1ec92210986b475b71c5bcf..9f2e8e3e313c1a0fbf32e8fe2b3da0c0d822ad69 100644 --- a/crates/agent2/src/tools/edit_file_tool.rs +++ b/crates/agent2/src/tools/edit_file_tool.rs @@ -17,10 +17,12 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::Settings; use smol::stream::StreamExt as _; +use std::ffi::OsStr; use std::path::{Path, PathBuf}; use std::sync::Arc; use ui::SharedString; use util::ResultExt; +use util::rel_path::RelPath; const DEFAULT_UI_TEXT: &str = "Editing file"; @@ -148,12 +150,11 @@ impl EditFileTool { // If any path component matches the local settings folder, then this could affect // the editor in ways beyond the project source, so prompt. - let local_settings_folder = paths::local_settings_folder_relative_path(); + let local_settings_folder = paths::local_settings_folder_name(); let path = Path::new(&input.path); - if path - .components() - .any(|component| component.as_os_str() == local_settings_folder.as_os_str()) - { + if path.components().any(|component| { + component.as_os_str() == <_ as AsRef>::as_ref(&local_settings_folder) + }) { return event_stream.authorize( format!("{} (local settings)", input.display_description), cx, @@ -162,6 +163,7 @@ impl EditFileTool { // It's also possible that the global config dir is configured to be inside the project, // so check for that edge case too. + // TODO this is broken when remoting if let Ok(canonical_path) = std::fs::canonicalize(&input.path) && canonical_path.starts_with(paths::config_dir()) { @@ -216,9 +218,7 @@ impl AgentTool for EditFileTool { .read(cx) .short_full_path_for_project_path(&project_path, cx) }) - .unwrap_or(Path::new(&input.path).into()) - .to_string_lossy() - .to_string() + .unwrap_or(input.path.to_string_lossy().to_string()) .into(), Err(raw_input) => { if let Some(input) = @@ -235,9 +235,7 @@ impl AgentTool for EditFileTool { .read(cx) .short_full_path_for_project_path(&project_path, cx) }) - .unwrap_or(Path::new(&input.path).into()) - .to_string_lossy() - .to_string() + .unwrap_or(input.path) .into(); } @@ -478,7 +476,7 @@ impl AgentTool for EditFileTool { ) -> Result<()> { event_stream.update_diff(cx.new(|cx| { Diff::finalized( - output.input_path, + output.input_path.to_string_lossy().to_string(), Some(output.old_text.to_string()), output.new_text, self.language_registry.clone(), @@ -542,10 +540,12 @@ fn resolve_path( let file_name = input .path .file_name() + .and_then(|file_name| file_name.to_str()) + .and_then(|file_name| RelPath::new(file_name).ok()) .context("Can't create file: invalid filename")?; let new_file_path = parent_project_path.map(|parent| ProjectPath { - path: Arc::from(parent.path.join(file_name)), + path: parent.path.join(file_name), ..parent }); @@ -690,13 +690,10 @@ mod tests { cx.update(|cx| resolve_path(&input, project, cx)) } + #[track_caller] fn assert_resolved_path_eq(path: anyhow::Result, expected: &str) { - let actual = path - .expect("Should return valid path") - .path - .to_str() - .unwrap() - .replace("\\", "/"); // Naive Windows paths normalization + let actual = path.expect("Should return valid path").path; + let actual = actual.as_str(); assert_eq!(actual, expected); } @@ -1408,8 +1405,8 @@ mod tests { // Parent directory references - find_project_path resolves these ( "project/../other", - false, - "Path with .. is resolved by find_project_path", + true, + "Path with .. that goes outside of root directory", ), ( "project/./src/file.rs", @@ -1437,16 +1434,18 @@ mod tests { ) }); + cx.run_until_parked(); + if should_confirm { stream_rx.expect_authorization().await; } else { - auth.await.unwrap(); assert!( stream_rx.try_next().is_err(), "Failed for case: {} - path: {} - expected no confirmation but got one", description, path ); + auth.await.unwrap(); } } } diff --git a/crates/agent2/src/tools/find_path_tool.rs b/crates/agent2/src/tools/find_path_tool.rs index b8b60f79f4cf9808a730b0c6428885b23b32d998..59f203cec98a17fda9e46f6fc222f3157d125060 100644 --- a/crates/agent2/src/tools/find_path_tool.rs +++ b/crates/agent2/src/tools/find_path_tool.rs @@ -156,10 +156,14 @@ impl AgentTool for FindPathTool { } fn search_paths(glob: &str, project: Entity, cx: &mut App) -> Task>> { - let path_matcher = match PathMatcher::new([ - // Sometimes models try to search for "". In this case, return all paths in the project. - if glob.is_empty() { "*" } else { glob }, - ]) { + let path_style = project.read(cx).path_style(cx); + let path_matcher = match PathMatcher::new( + [ + // Sometimes models try to search for "". In this case, return all paths in the project. + if glob.is_empty() { "*" } else { glob }, + ], + path_style, + ) { Ok(matcher) => matcher, Err(err) => return Task::ready(Err(anyhow!("Invalid glob: {err}"))), }; @@ -173,9 +177,8 @@ fn search_paths(glob: &str, project: Entity, cx: &mut App) -> Task>(), + path_style, ) { Ok(matcher) => matcher, Err(error) => { @@ -132,7 +135,7 @@ impl AgentTool for GrepTool { .iter() .chain(global_settings.private_files.sources().iter()); - match PathMatcher::new(exclude_patterns) { + match PathMatcher::new(exclude_patterns, path_style) { Ok(matcher) => matcher, Err(error) => { return Task::ready(Err(anyhow!("invalid exclude pattern: {error}"))); diff --git a/crates/agent2/src/tools/list_directory_tool.rs b/crates/agent2/src/tools/list_directory_tool.rs index fe7e2a4d85d115d7b9b87be9a2b90f3f7bd19028..cd8b46ddebc2d9ffb953f8aabef10c30a33dde37 100644 --- a/crates/agent2/src/tools/list_directory_tool.rs +++ b/crates/agent2/src/tools/list_directory_tool.rs @@ -2,12 +2,12 @@ use crate::{AgentTool, ToolCallEventStream}; use agent_client_protocol::ToolKind; use anyhow::{Result, anyhow}; use gpui::{App, Entity, SharedString, Task}; -use project::{Project, WorktreeSettings}; +use project::{Project, ProjectPath, WorktreeSettings}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::Settings; use std::fmt::Write; -use std::{path::Path, sync::Arc}; +use std::sync::Arc; use util::markdown::MarkdownInlineCode; /// Lists files and directories in a given path. Prefer the `grep` or `find_path` tools when searching the codebase. @@ -86,13 +86,13 @@ impl AgentTool for ListDirectoryTool { .read(cx) .worktrees(cx) .filter_map(|worktree| { - worktree.read(cx).root_entry().and_then(|entry| { - if entry.is_dir() { - entry.path.to_str() - } else { - None - } - }) + let worktree = worktree.read(cx); + let root_entry = worktree.root_entry()?; + if root_entry.is_dir() { + Some(root_entry.path.display(worktree.path_style())) + } else { + None + } }) .collect::>() .join("\n"); @@ -143,7 +143,7 @@ impl AgentTool for ListDirectoryTool { } let worktree_snapshot = worktree.read(cx).snapshot(); - let worktree_root_name = worktree.read(cx).root_name().to_string(); + let worktree_root_name = worktree.read(cx).root_name(); let Some(entry) = worktree_snapshot.entry_for_path(&project_path.path) else { return Task::ready(Err(anyhow!("Path not found: {}", input.path))); @@ -165,25 +165,17 @@ impl AgentTool for ListDirectoryTool { continue; } - if self - .project - .read(cx) - .find_project_path(&entry.path, cx) - .map(|project_path| { - let worktree_settings = WorktreeSettings::get(Some((&project_path).into()), cx); - - worktree_settings.is_path_excluded(&project_path.path) - || worktree_settings.is_path_private(&project_path.path) - }) - .unwrap_or(false) + let project_path: ProjectPath = (worktree_snapshot.id(), entry.path.clone()).into(); + if worktree_settings.is_path_excluded(&project_path.path) + || worktree_settings.is_path_private(&project_path.path) { continue; } - let full_path = Path::new(&worktree_root_name) + let full_path = worktree_root_name .join(&entry.path) - .display() - .to_string(); + .display(worktree_snapshot.path_style()) + .into_owned(); if entry.is_dir() { folders.push(full_path); } else { diff --git a/crates/agent2/src/tools/move_path_tool.rs b/crates/agent2/src/tools/move_path_tool.rs index 91880c1243e0aa48569ab8e6981ddd45b41ab411..ae58145126f6356beaa1457d719812bb56d6e7db 100644 --- a/crates/agent2/src/tools/move_path_tool.rs +++ b/crates/agent2/src/tools/move_path_tool.rs @@ -98,7 +98,7 @@ impl AgentTool for MovePathTool { .and_then(|project_path| project.entry_for_path(&project_path, cx)) { Some(entity) => match project.find_project_path(&input.destination_path, cx) { - Some(project_path) => project.rename_entry(entity.id, project_path.path, cx), + Some(project_path) => project.rename_entry(entity.id, project_path, cx), None => Task::ready(Err(anyhow!( "Destination path {} was outside the project.", input.destination_path diff --git a/crates/agent2/src/tools/read_file_tool.rs b/crates/agent2/src/tools/read_file_tool.rs index 6fa157630d487c517a536126d6b8dd4d4d53a4e1..fcbe8978583d2b17dd9dc6c31cf369749f81ef99 100644 --- a/crates/agent2/src/tools/read_file_tool.rs +++ b/crates/agent2/src/tools/read_file_tool.rs @@ -82,12 +82,12 @@ impl AgentTool for ReadFileTool { { match (input.start_line, input.end_line) { (Some(start), Some(end)) => { - format!("Read file `{}` (lines {}-{})", path.display(), start, end,) + format!("Read file `{path}` (lines {}-{})", start, end,) } (Some(start), None) => { - format!("Read file `{}` (from line {})", path.display(), start) + format!("Read file `{path}` (from line {})", start) } - _ => format!("Read file `{}`", path.display()), + _ => format!("Read file `{path}`"), } .into() } else { diff --git a/crates/agent_ui/src/acp/completion_provider.rs b/crates/agent_ui/src/acp/completion_provider.rs index 8c14bd1642b05623b9236701630127c23f5e30af..8cbae5a5420a2f89d0f7ca478ae77a5d5d350411 100644 --- a/crates/agent_ui/src/acp/completion_provider.rs +++ b/crates/agent_ui/src/acp/completion_provider.rs @@ -1,5 +1,6 @@ use std::cell::RefCell; use std::ops::Range; +use std::path::PathBuf; use std::rc::Rc; use std::sync::Arc; use std::sync::atomic::AtomicBool; @@ -13,7 +14,7 @@ use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{App, Entity, Task, WeakEntity}; use language::{Buffer, CodeLabel, HighlightId}; use lsp::CompletionContext; -use project::lsp_store::CompletionDocumentation; +use project::lsp_store::{CompletionDocumentation, SymbolLocation}; use project::{ Completion, CompletionDisplayOptions, CompletionIntent, CompletionResponse, Project, ProjectPath, Symbol, WorktreeId, @@ -22,6 +23,7 @@ use prompt_store::PromptStore; use rope::Point; use text::{Anchor, ToPoint as _}; use ui::prelude::*; +use util::rel_path::RelPath; use workspace::Workspace; use crate::AgentPanel; @@ -187,7 +189,7 @@ impl ContextPickerCompletionProvider { pub(crate) fn completion_for_path( project_path: ProjectPath, - path_prefix: &str, + path_prefix: &RelPath, is_recent: bool, is_directory: bool, source_range: Range, @@ -195,10 +197,12 @@ impl ContextPickerCompletionProvider { project: Entity, cx: &mut App, ) -> Option { + let path_style = project.read(cx).path_style(cx); let (file_name, directory) = crate::context_picker::file_context_picker::extract_file_name_and_directory( &project_path.path, path_prefix, + path_style, ); let label = @@ -250,7 +254,15 @@ impl ContextPickerCompletionProvider { let label = CodeLabel::plain(symbol.name.clone(), None); - let abs_path = project.read(cx).absolute_path(&symbol.path, cx)?; + let abs_path = match &symbol.path { + SymbolLocation::InProject(project_path) => { + project.read(cx).absolute_path(&project_path, cx)? + } + SymbolLocation::OutsideProject { + abs_path, + signature: _, + } => PathBuf::from(abs_path.as_ref()), + }; let uri = MentionUri::Symbol { abs_path, name: symbol.name.clone(), diff --git a/crates/agent_ui/src/acp/message_editor.rs b/crates/agent_ui/src/acp/message_editor.rs index c369d36d78eb179ec109c90365237d39159735fa..689d3e25eb239e2bd09952f887f1969fb5b510b5 100644 --- a/crates/agent_ui/src/acp/message_editor.rs +++ b/crates/agent_ui/src/acp/message_editor.rs @@ -48,7 +48,7 @@ use std::{ use text::OffsetRangeExt; use theme::ThemeSettings; use ui::{ButtonLike, TintColor, Toggleable, prelude::*}; -use util::{ResultExt, debug_panic}; +use util::{ResultExt, debug_panic, paths::PathStyle, rel_path::RelPath}; use workspace::{Workspace, notifications::NotifyResultExt as _}; use zed_actions::agent::Chat; @@ -108,6 +108,11 @@ impl MessageEditor { available_commands.clone(), )); let mention_set = MentionSet::default(); + // TODO: fix mentions when remoting with mixed path styles. + let host_and_guest_paths_differ = project + .read(cx) + .remote_client() + .is_some_and(|client| client.read(cx).path_style() != PathStyle::local()); let editor = cx.new(|cx| { let buffer = cx.new(|cx| Buffer::local("", cx).with_language(Arc::new(language), cx)); let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx)); @@ -117,7 +122,9 @@ impl MessageEditor { editor.set_show_indent_guides(false, cx); editor.set_soft_wrap(); editor.set_use_modal_editing(true); - editor.set_completion_provider(Some(completion_provider.clone())); + if !host_and_guest_paths_differ { + editor.set_completion_provider(Some(completion_provider.clone())); + } editor.set_context_menu_options(ContextMenuOptions { min_entries_visible: 12, max_entries_visible: 12, @@ -947,6 +954,7 @@ impl MessageEditor { window: &mut Window, cx: &mut Context, ) { + let path_style = self.project.read(cx).path_style(cx); let buffer = self.editor.read(cx).buffer().clone(); let Some(buffer) = buffer.read(cx).as_singleton() else { return; @@ -956,18 +964,15 @@ impl MessageEditor { let Some(entry) = self.project.read(cx).entry_for_path(&path, cx) else { continue; }; - let Some(abs_path) = self.project.read(cx).absolute_path(&path, cx) else { + let Some(worktree) = self.project.read(cx).worktree_for_id(path.worktree_id, cx) else { continue; }; - let path_prefix = abs_path - .file_name() - .unwrap_or(path.path.as_os_str()) - .display() - .to_string(); + let abs_path = worktree.read(cx).absolutize(&path.path); let (file_name, _) = crate::context_picker::file_context_picker::extract_file_name_and_directory( &path.path, - &path_prefix, + worktree.read(cx).root_name(), + path_style, ); let uri = if entry.is_dir() { @@ -1176,7 +1181,7 @@ fn full_mention_for_directory( abs_path: &Path, cx: &mut App, ) -> Task> { - fn collect_files_in_path(worktree: &Worktree, path: &Path) -> Vec<(Arc, PathBuf)> { + fn collect_files_in_path(worktree: &Worktree, path: &RelPath) -> Vec<(Arc, PathBuf)> { let mut files = Vec::new(); for entry in worktree.child_entries(path) { @@ -1261,7 +1266,7 @@ fn full_mention_for_directory( }) } -fn render_directory_contents(entries: Vec<(Arc, PathBuf, String)>) -> String { +fn render_directory_contents(entries: Vec<(Arc, PathBuf, String)>) -> String { let mut output = String::new(); for (_relative_path, full_path, content) in entries { let fence = codeblock_fence_for_path(Some(&full_path), None); @@ -1595,7 +1600,7 @@ mod tests { use serde_json::json; use text::Point; use ui::{App, Context, IntoElement, Render, SharedString, Window}; - use util::{path, uri}; + use util::{path, paths::PathStyle, rel_path::rel_path, uri}; use workspace::{AppState, Item, Workspace}; use crate::acp::{ @@ -2105,16 +2110,18 @@ mod tests { let mut cx = VisualTestContext::from_window(*window, cx); let paths = vec![ - path!("a/one.txt"), - path!("a/two.txt"), - path!("a/three.txt"), - path!("a/four.txt"), - path!("b/five.txt"), - path!("b/six.txt"), - path!("b/seven.txt"), - path!("b/eight.txt"), + rel_path("a/one.txt"), + rel_path("a/two.txt"), + rel_path("a/three.txt"), + rel_path("a/four.txt"), + rel_path("b/five.txt"), + rel_path("b/six.txt"), + rel_path("b/seven.txt"), + rel_path("b/eight.txt"), ]; + let slash = PathStyle::local().separator(); + let mut opened_editors = Vec::new(); for path in paths { let buffer = workspace @@ -2122,7 +2129,7 @@ mod tests { workspace.open_path( ProjectPath { worktree_id, - path: Path::new(path).into(), + path: path.into(), }, None, false, @@ -2183,10 +2190,10 @@ mod tests { assert_eq!( current_completion_labels(editor), &[ - "eight.txt dir/b/", - "seven.txt dir/b/", - "six.txt dir/b/", - "five.txt dir/b/", + format!("eight.txt dir{slash}b{slash}"), + format!("seven.txt dir{slash}b{slash}"), + format!("six.txt dir{slash}b{slash}"), + format!("five.txt dir{slash}b{slash}"), ] ); editor.set_text("", window, cx); @@ -2214,14 +2221,14 @@ mod tests { assert_eq!( current_completion_labels(editor), &[ - "eight.txt dir/b/", - "seven.txt dir/b/", - "six.txt dir/b/", - "five.txt dir/b/", - "Files & Directories", - "Symbols", - "Threads", - "Fetch" + format!("eight.txt dir{slash}b{slash}"), + format!("seven.txt dir{slash}b{slash}"), + format!("six.txt dir{slash}b{slash}"), + format!("five.txt dir{slash}b{slash}"), + "Files & Directories".into(), + "Symbols".into(), + "Threads".into(), + "Fetch".into() ] ); }); @@ -2248,7 +2255,10 @@ mod tests { editor.update(&mut cx, |editor, cx| { assert_eq!(editor.text(cx), "Lorem @file one"); assert!(editor.has_visible_completions_menu()); - assert_eq!(current_completion_labels(editor), vec!["one.txt dir/a/"]); + assert_eq!( + current_completion_labels(editor), + vec![format!("one.txt dir{slash}a{slash}")] + ); }); editor.update_in(&mut cx, |editor, window, cx| { @@ -2505,7 +2515,7 @@ mod tests { format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({url_one}?symbol=MySymbol#L1:1) @file x.png") ); assert!(editor.has_visible_completions_menu()); - assert_eq!(current_completion_labels(editor), &["x.png dir/"]); + assert_eq!(current_completion_labels(editor), &[format!("x.png dir{slash}")]); }); editor.update_in(&mut cx, |editor, window, cx| { @@ -2544,7 +2554,7 @@ mod tests { format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({url_one}?symbol=MySymbol#L1:1) @file x.png") ); assert!(editor.has_visible_completions_menu()); - assert_eq!(current_completion_labels(editor), &["x.png dir/"]); + assert_eq!(current_completion_labels(editor), &[format!("x.png dir{slash}")]); }); editor.update_in(&mut cx, |editor, window, cx| { diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index 4a91a9fff7b9902bc51301752a578114159bf680..104b06049d44c2b9b08262b545ebe3a499f3155b 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -3704,29 +3704,32 @@ impl AcpThreadView { |(index, (buffer, _diff))| { let file = buffer.read(cx).file()?; let path = file.path(); + let path_style = file.path_style(cx); + let separator = file.path_style(cx).separator(); let file_path = path.parent().and_then(|parent| { - let parent_str = parent.to_string_lossy(); - - if parent_str.is_empty() { + if parent.is_empty() { None } else { Some( - Label::new(format!("/{}{}", parent_str, std::path::MAIN_SEPARATOR_STR)) - .color(Color::Muted) - .size(LabelSize::XSmall) - .buffer_font(cx), + Label::new(format!( + "{separator}{}{separator}", + parent.display(path_style) + )) + .color(Color::Muted) + .size(LabelSize::XSmall) + .buffer_font(cx), ) } }); let file_name = path.file_name().map(|name| { - Label::new(name.to_string_lossy().to_string()) + Label::new(name.to_string()) .size(LabelSize::XSmall) .buffer_font(cx) }); - let file_icon = FileIcons::get_icon(path, cx) + let file_icon = FileIcons::get_icon(path.as_std_path(), cx) .map(Icon::from_path) .map(|icon| icon.color(Color::Muted).size(IconSize::Small)) .unwrap_or_else(|| { @@ -4569,7 +4572,7 @@ impl AcpThreadView { .read(cx) .visible_worktrees(cx) .next() - .map(|worktree| worktree.read(cx).root_name().to_string()) + .map(|worktree| worktree.read(cx).root_name_str().to_string()) }); if let Some(screen_window) = cx diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index e34789d62d2c95b06f5c4f03b93b60f01c6dbf6a..b64334f403bed8cfcf86e80e4fe4589ba920b06d 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -264,7 +264,7 @@ pub fn init( init_language_model_settings(cx); } assistant_slash_command::init(cx); - agent::init(cx); + agent::init(fs.clone(), cx); agent_panel::init(cx); context_server_configuration::init(language_registry.clone(), fs.clone(), cx); TextThreadEditor::init(cx); diff --git a/crates/agent_ui/src/context_picker.rs b/crates/agent_ui/src/context_picker.rs index bc309302896eb58c318fa79c108ab9adb5a41ab1..58edecdf3da6b16bca82a7d4c0e73dcac3969e03 100644 --- a/crates/agent_ui/src/context_picker.rs +++ b/crates/agent_ui/src/context_picker.rs @@ -33,6 +33,8 @@ use thread_context_picker::{ use ui::{ ButtonLike, ContextMenu, ContextMenuEntry, ContextMenuItem, Disclosure, TintColor, prelude::*, }; +use util::paths::PathStyle; +use util::rel_path::RelPath; use workspace::{Workspace, notifications::NotifyResultExt}; use agent::{ @@ -228,12 +230,19 @@ impl ContextPicker { let context_picker = cx.entity(); let menu = ContextMenu::build(window, cx, move |menu, _window, cx| { + let Some(workspace) = self.workspace.upgrade() else { + return menu; + }; + let path_style = workspace.read(cx).path_style(cx); let recent = self.recent_entries(cx); let has_recent = !recent.is_empty(); let recent_entries = recent .into_iter() .enumerate() - .map(|(ix, entry)| self.recent_menu_item(context_picker.clone(), ix, entry)); + .map(|(ix, entry)| { + self.recent_menu_item(context_picker.clone(), ix, entry, path_style) + }) + .collect::>(); let entries = self .workspace @@ -395,6 +404,7 @@ impl ContextPicker { context_picker: Entity, ix: usize, entry: RecentEntry, + path_style: PathStyle, ) -> ContextMenuItem { match entry { RecentEntry::File { @@ -413,6 +423,7 @@ impl ContextPicker { &path, &path_prefix, false, + path_style, context_store.clone(), cx, ) @@ -586,7 +597,7 @@ impl Render for ContextPicker { pub(crate) enum RecentEntry { File { project_path: ProjectPath, - path_prefix: Arc, + path_prefix: Arc, }, Thread(ThreadContextEntry), } diff --git a/crates/agent_ui/src/context_picker/completion_provider.rs b/crates/agent_ui/src/context_picker/completion_provider.rs index 01a7a51316eee4709eaf9c17c8840e3cd637a62b..33a5a621a1d1ea23ccdb49fd97010fea1856ce80 100644 --- a/crates/agent_ui/src/context_picker/completion_provider.rs +++ b/crates/agent_ui/src/context_picker/completion_provider.rs @@ -13,6 +13,7 @@ use http_client::HttpClientWithUrl; use itertools::Itertools; use language::{Buffer, CodeLabel, HighlightId}; use lsp::CompletionContext; +use project::lsp_store::SymbolLocation; use project::{ Completion, CompletionDisplayOptions, CompletionIntent, CompletionResponse, ProjectPath, Symbol, WorktreeId, @@ -22,6 +23,8 @@ use rope::Point; use text::{Anchor, OffsetRangeExt, ToPoint}; use ui::prelude::*; use util::ResultExt as _; +use util::paths::PathStyle; +use util::rel_path::RelPath; use workspace::Workspace; use agent::{ @@ -574,11 +577,12 @@ impl ContextPickerCompletionProvider { fn completion_for_path( project_path: ProjectPath, - path_prefix: &str, + path_prefix: &RelPath, is_recent: bool, is_directory: bool, excerpt_id: ExcerptId, source_range: Range, + path_style: PathStyle, editor: Entity, context_store: Entity, cx: &App, @@ -586,6 +590,7 @@ impl ContextPickerCompletionProvider { let (file_name, directory) = super::file_context_picker::extract_file_name_and_directory( &project_path.path, path_prefix, + path_style, ); let label = @@ -657,17 +662,22 @@ impl ContextPickerCompletionProvider { workspace: Entity, cx: &mut App, ) -> Option { + let path_style = workspace.read(cx).path_style(cx); + let SymbolLocation::InProject(symbol_path) = &symbol.path else { + return None; + }; let path_prefix = workspace .read(cx) .project() .read(cx) - .worktree_for_id(symbol.path.worktree_id, cx)? + .worktree_for_id(symbol_path.worktree_id, cx)? .read(cx) .root_name(); let (file_name, directory) = super::file_context_picker::extract_file_name_and_directory( - &symbol.path.path, + &symbol_path.path, path_prefix, + path_style, ); let full_path = if let Some(directory) = directory { format!("{}{}", directory, file_name) @@ -768,6 +778,7 @@ impl CompletionProvider for ContextPickerCompletionProvider { let text_thread_store = self.text_thread_store.clone(); let editor = self.editor.clone(); let http_client = workspace.read(cx).client().http_client(); + let path_style = workspace.read(cx).path_style(cx); let MentionCompletion { mode, argument, .. } = state; let query = argument.unwrap_or_else(|| "".to_string()); @@ -834,6 +845,7 @@ impl CompletionProvider for ContextPickerCompletionProvider { mat.is_dir, excerpt_id, source_range.clone(), + path_style, editor.clone(), context_store.clone(), cx, @@ -1064,7 +1076,7 @@ mod tests { use serde_json::json; use settings::SettingsStore; use std::{ops::Deref, rc::Rc}; - use util::path; + use util::{path, rel_path::rel_path}; use workspace::{AppState, Item}; #[test] @@ -1215,16 +1227,18 @@ mod tests { let mut cx = VisualTestContext::from_window(*window.deref(), cx); let paths = vec![ - path!("a/one.txt"), - path!("a/two.txt"), - path!("a/three.txt"), - path!("a/four.txt"), - path!("b/five.txt"), - path!("b/six.txt"), - path!("b/seven.txt"), - path!("b/eight.txt"), + rel_path("a/one.txt"), + rel_path("a/two.txt"), + rel_path("a/three.txt"), + rel_path("a/four.txt"), + rel_path("b/five.txt"), + rel_path("b/six.txt"), + rel_path("b/seven.txt"), + rel_path("b/eight.txt"), ]; + let slash = PathStyle::local().separator(); + let mut opened_editors = Vec::new(); for path in paths { let buffer = workspace @@ -1232,7 +1246,7 @@ mod tests { workspace.open_path( ProjectPath { worktree_id, - path: Path::new(path).into(), + path: path.into(), }, None, false, @@ -1308,13 +1322,13 @@ mod tests { assert_eq!( current_completion_labels(editor), &[ - "seven.txt dir/b/", - "six.txt dir/b/", - "five.txt dir/b/", - "four.txt dir/a/", - "Files & Directories", - "Symbols", - "Fetch" + format!("seven.txt dir{slash}b{slash}"), + format!("six.txt dir{slash}b{slash}"), + format!("five.txt dir{slash}b{slash}"), + format!("four.txt dir{slash}a{slash}"), + "Files & Directories".into(), + "Symbols".into(), + "Fetch".into() ] ); }); @@ -1341,7 +1355,10 @@ mod tests { editor.update(&mut cx, |editor, cx| { assert_eq!(editor.text(cx), "Lorem @file one"); assert!(editor.has_visible_completions_menu()); - assert_eq!(current_completion_labels(editor), vec!["one.txt dir/a/"]); + assert_eq!( + current_completion_labels(editor), + vec![format!("one.txt dir{slash}a{slash}")] + ); }); editor.update_in(&mut cx, |editor, window, cx| { @@ -1350,7 +1367,10 @@ mod tests { }); editor.update(&mut cx, |editor, cx| { - assert_eq!(editor.text(cx), "Lorem [@one.txt](@file:dir/a/one.txt) "); + assert_eq!( + editor.text(cx), + format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) ") + ); assert!(!editor.has_visible_completions_menu()); assert_eq!( fold_ranges(editor, cx), @@ -1361,7 +1381,10 @@ mod tests { cx.simulate_input(" "); editor.update(&mut cx, |editor, cx| { - assert_eq!(editor.text(cx), "Lorem [@one.txt](@file:dir/a/one.txt) "); + assert_eq!( + editor.text(cx), + format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) ") + ); assert!(!editor.has_visible_completions_menu()); assert_eq!( fold_ranges(editor, cx), @@ -1374,7 +1397,7 @@ mod tests { editor.update(&mut cx, |editor, cx| { assert_eq!( editor.text(cx), - "Lorem [@one.txt](@file:dir/a/one.txt) Ipsum ", + format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum "), ); assert!(!editor.has_visible_completions_menu()); assert_eq!( @@ -1388,7 +1411,7 @@ mod tests { editor.update(&mut cx, |editor, cx| { assert_eq!( editor.text(cx), - "Lorem [@one.txt](@file:dir/a/one.txt) Ipsum @file ", + format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum @file "), ); assert!(editor.has_visible_completions_menu()); assert_eq!( @@ -1406,7 +1429,7 @@ mod tests { editor.update(&mut cx, |editor, cx| { assert_eq!( editor.text(cx), - "Lorem [@one.txt](@file:dir/a/one.txt) Ipsum [@seven.txt](@file:dir/b/seven.txt) " + format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum [@seven.txt](@file:dir{slash}b{slash}seven.txt) ") ); assert!(!editor.has_visible_completions_menu()); assert_eq!( @@ -1423,7 +1446,7 @@ mod tests { editor.update(&mut cx, |editor, cx| { assert_eq!( editor.text(cx), - "Lorem [@one.txt](@file:dir/a/one.txt) Ipsum [@seven.txt](@file:dir/b/seven.txt) \n@" + format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum [@seven.txt](@file:dir{slash}b{slash}seven.txt) \n@") ); assert!(editor.has_visible_completions_menu()); assert_eq!( @@ -1444,7 +1467,7 @@ mod tests { editor.update(&mut cx, |editor, cx| { assert_eq!( editor.text(cx), - "Lorem [@one.txt](@file:dir/a/one.txt) Ipsum [@seven.txt](@file:dir/b/seven.txt) \n[@six.txt](@file:dir/b/six.txt) " + format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum [@seven.txt](@file:dir{slash}b{slash}seven.txt) \n[@six.txt](@file:dir{slash}b{slash}six.txt) ") ); assert!(!editor.has_visible_completions_menu()); assert_eq!( diff --git a/crates/agent_ui/src/context_picker/file_context_picker.rs b/crates/agent_ui/src/context_picker/file_context_picker.rs index d6f2af7083eb4049e168f6409cef22022cbe404b..4f7a4308406f9d9fbdfa42cc86adc1ffe7593396 100644 --- a/crates/agent_ui/src/context_picker/file_context_picker.rs +++ b/crates/agent_ui/src/context_picker/file_context_picker.rs @@ -1,4 +1,3 @@ -use std::path::Path; use std::sync::Arc; use std::sync::atomic::AtomicBool; @@ -10,7 +9,7 @@ use gpui::{ use picker::{Picker, PickerDelegate}; use project::{PathMatchCandidateSet, ProjectPath, WorktreeId}; use ui::{ListItem, Tooltip, prelude::*}; -use util::ResultExt as _; +use util::{ResultExt as _, paths::PathStyle, rel_path::RelPath}; use workspace::Workspace; use crate::context_picker::ContextPicker; @@ -161,6 +160,8 @@ impl PickerDelegate for FileContextPickerDelegate { cx: &mut Context>, ) -> Option { let FileMatch { mat, .. } = &self.matches.get(ix)?; + let workspace = self.workspace.upgrade()?; + let path_style = workspace.read(cx).path_style(cx); Some( ListItem::new(ix) @@ -172,6 +173,7 @@ impl PickerDelegate for FileContextPickerDelegate { &mat.path, &mat.path_prefix, mat.is_dir, + path_style, self.context_store.clone(), cx, )), @@ -214,14 +216,13 @@ pub(crate) fn search_files( let file_matches = project.worktrees(cx).flat_map(|worktree| { let worktree = worktree.read(cx); - let path_prefix: Arc = worktree.root_name().into(); worktree.entries(false, 0).map(move |entry| FileMatch { mat: PathMatch { score: 0., positions: Vec::new(), worktree_id: worktree.id().to_usize(), path: entry.path.clone(), - path_prefix: path_prefix.clone(), + path_prefix: worktree.root_name().into(), distance_to_relative_ancestor: 0, is_dir: entry.is_dir(), }, @@ -269,51 +270,31 @@ pub(crate) fn search_files( } pub fn extract_file_name_and_directory( - path: &Path, - path_prefix: &str, + path: &RelPath, + path_prefix: &RelPath, + path_style: PathStyle, ) -> (SharedString, Option) { - if path == Path::new("") { - ( - SharedString::from( - path_prefix - .trim_end_matches(std::path::MAIN_SEPARATOR) - .to_string(), - ), - None, - ) - } else { - let file_name = path - .file_name() - .unwrap_or_default() - .to_string_lossy() - .to_string() - .into(); - - let mut directory = path_prefix - .trim_end_matches(std::path::MAIN_SEPARATOR) - .to_string(); - if !directory.ends_with('/') { - directory.push('/'); - } - if let Some(parent) = path.parent().filter(|parent| parent != &Path::new("")) { - directory.push_str(&parent.to_string_lossy()); - directory.push('/'); - } - - (file_name, Some(directory.into())) - } + let full_path = path_prefix.join(path); + let file_name = full_path.file_name().unwrap_or_default(); + let display_path = full_path.display(path_style); + let (directory, file_name) = display_path.split_at(display_path.len() - file_name.len()); + ( + file_name.to_string().into(), + Some(SharedString::new(directory)).filter(|dir| !dir.is_empty()), + ) } pub fn render_file_context_entry( id: ElementId, worktree_id: WorktreeId, - path: &Arc, - path_prefix: &Arc, + path: &Arc, + path_prefix: &Arc, is_directory: bool, + path_style: PathStyle, context_store: WeakEntity, cx: &App, ) -> Stateful
{ - let (file_name, directory) = extract_file_name_and_directory(path, path_prefix); + let (file_name, directory) = extract_file_name_and_directory(path, path_prefix, path_style); let added = context_store.upgrade().and_then(|context_store| { let project_path = ProjectPath { @@ -330,9 +311,9 @@ pub fn render_file_context_entry( }); let file_icon = if is_directory { - FileIcons::get_folder_icon(false, path, cx) + FileIcons::get_folder_icon(false, path.as_std_path(), cx) } else { - FileIcons::get_icon(path, cx) + FileIcons::get_icon(path.as_std_path(), cx) } .map(Icon::from_path) .unwrap_or_else(|| Icon::new(IconName::File)); diff --git a/crates/agent_ui/src/context_picker/symbol_context_picker.rs b/crates/agent_ui/src/context_picker/symbol_context_picker.rs index 993d65bd12ee4e01ca8d9767ccd46dd3fd645dd3..5b89f09de884067a94832c7bf474a2949e78c420 100644 --- a/crates/agent_ui/src/context_picker/symbol_context_picker.rs +++ b/crates/agent_ui/src/context_picker/symbol_context_picker.rs @@ -2,13 +2,14 @@ use std::cmp::Reverse; use std::sync::Arc; use std::sync::atomic::AtomicBool; -use anyhow::Result; +use anyhow::{Result, anyhow}; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{ App, AppContext, DismissEvent, Entity, FocusHandle, Focusable, Stateful, Task, WeakEntity, }; use ordered_float::OrderedFloat; use picker::{Picker, PickerDelegate}; +use project::lsp_store::SymbolLocation; use project::{DocumentSymbol, Symbol}; use ui::{ListItem, prelude::*}; use util::ResultExt as _; @@ -191,7 +192,10 @@ pub(crate) fn add_symbol( ) -> Task, bool)>> { let project = workspace.read(cx).project().clone(); let open_buffer_task = project.update(cx, |project, cx| { - project.open_buffer(symbol.path.clone(), cx) + let SymbolLocation::InProject(symbol_path) = &symbol.path else { + return Task::ready(Err(anyhow!("can't add symbol from outside of project"))); + }; + project.open_buffer(symbol_path.clone(), cx) }); cx.spawn(async move |cx| { let buffer = open_buffer_task.await?; @@ -291,10 +295,11 @@ pub(crate) fn search_symbols( .map(|(id, symbol)| { StringMatchCandidate::new(id, symbol.label.filter_text()) }) - .partition(|candidate| { - project - .entry_for_path(&symbols[candidate.id].path, cx) - .is_some_and(|e| !e.is_ignored) + .partition(|candidate| match &symbols[candidate.id].path { + SymbolLocation::InProject(project_path) => project + .entry_for_path(project_path, cx) + .is_some_and(|e| !e.is_ignored), + SymbolLocation::OutsideProject { .. } => false, }) }) .log_err() @@ -360,13 +365,18 @@ fn compute_symbol_entries( } pub fn render_symbol_context_entry(id: ElementId, entry: &SymbolEntry) -> Stateful
{ - let path = entry - .symbol - .path - .path - .file_name() - .map(|s| s.to_string_lossy()) - .unwrap_or_default(); + let path = match &entry.symbol.path { + SymbolLocation::InProject(project_path) => { + project_path.path.file_name().unwrap_or_default().into() + } + SymbolLocation::OutsideProject { + abs_path, + signature: _, + } => abs_path + .file_name() + .map(|f| f.to_string_lossy()) + .unwrap_or_default(), + }; let symbol_location = format!("{} L{}", path, entry.symbol.range.start.0.row + 1); h_flex() diff --git a/crates/agent_ui/src/text_thread_editor.rs b/crates/agent_ui/src/text_thread_editor.rs index 3c09e47852ffae8f45a5315859a7bb3392b1680d..e01bfc1d0d0745ed612429bc2dae71967fde36d5 100644 --- a/crates/agent_ui/src/text_thread_editor.rs +++ b/crates/agent_ui/src/text_thread_editor.rs @@ -1431,10 +1431,14 @@ impl TextThreadEditor { else { continue; }; - let worktree_root_name = worktree.read(cx).root_name().to_string(); - let mut full_path = PathBuf::from(worktree_root_name.clone()); - full_path.push(&project_path.path); - file_slash_command_args.push(full_path.to_string_lossy().to_string()); + let path_style = worktree.read(cx).path_style(); + let full_path = worktree + .read(cx) + .root_name() + .join(&project_path.path) + .display(path_style) + .into_owned(); + file_slash_command_args.push(full_path); } let cmd_name = FileSlashCommand.name(); diff --git a/crates/assistant_slash_command/Cargo.toml b/crates/assistant_slash_command/Cargo.toml index f7b7af9b879492cbb48f4e88d8379b45cbc2d053..0908cd61653d35dbb54ae325118a6091cd345a4e 100644 --- a/crates/assistant_slash_command/Cargo.toml +++ b/crates/assistant_slash_command/Cargo.toml @@ -25,6 +25,7 @@ parking_lot.workspace = true serde.workspace = true serde_json.workspace = true ui.workspace = true +util.workspace = true workspace.workspace = true workspace-hack.workspace = true diff --git a/crates/assistant_slash_command/src/extension_slash_command.rs b/crates/assistant_slash_command/src/extension_slash_command.rs index e47ae52c98740af17c90fe657386bb0120773d9b..301cf65cb45895ff2475b74510264260d827da61 100644 --- a/crates/assistant_slash_command/src/extension_slash_command.rs +++ b/crates/assistant_slash_command/src/extension_slash_command.rs @@ -1,12 +1,11 @@ -use std::path::PathBuf; -use std::sync::{Arc, atomic::AtomicBool}; - use anyhow::Result; use async_trait::async_trait; use extension::{Extension, ExtensionHostProxy, ExtensionSlashCommandProxy, WorktreeDelegate}; use gpui::{App, Task, WeakEntity, Window}; use language::{BufferSnapshot, LspAdapterDelegate}; +use std::sync::{Arc, atomic::AtomicBool}; use ui::prelude::*; +use util::rel_path::RelPath; use workspace::Workspace; use crate::{ @@ -54,7 +53,7 @@ impl WorktreeDelegate for WorktreeDelegateAdapter { self.0.worktree_root_path().to_string_lossy().to_string() } - async fn read_text_file(&self, path: PathBuf) -> Result { + async fn read_text_file(&self, path: &RelPath) -> Result { self.0.read_text_file(path).await } diff --git a/crates/assistant_slash_commands/Cargo.toml b/crates/assistant_slash_commands/Cargo.toml index f151515d4235b7ecb150539aceb1c5478960517b..5844d21a51b0642a89fd13f29f53a074331ee10e 100644 --- a/crates/assistant_slash_commands/Cargo.toml +++ b/crates/assistant_slash_commands/Cargo.toml @@ -41,6 +41,9 @@ worktree.workspace = true workspace-hack.workspace = true [dev-dependencies] +fs = { workspace = true, features = ["test-support"] } +gpui = { workspace = true, features = ["test-support"] } pretty_assertions.workspace = true -settings.workspace = true +project = { workspace = true, features = ["test-support"] } +settings = { workspace = true, features = ["test-support"] } zlog.workspace = true diff --git a/crates/assistant_slash_commands/src/cargo_workspace_command.rs b/crates/assistant_slash_commands/src/cargo_workspace_command.rs new file mode 100644 index 0000000000000000000000000000000000000000..8a6950a4a2ff40d0452669dd388886d05d71022a --- /dev/null +++ b/crates/assistant_slash_commands/src/cargo_workspace_command.rs @@ -0,0 +1,159 @@ +use anyhow::{Context as _, Result, anyhow}; +use assistant_slash_command::{ + ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection, + SlashCommandResult, +}; +use fs::Fs; +use gpui::{App, Entity, Task, WeakEntity}; +use language::{BufferSnapshot, LspAdapterDelegate}; +use project::{Project, ProjectPath}; +use std::{ + fmt::Write, + path::Path, + sync::{Arc, atomic::AtomicBool}, +}; +use ui::prelude::*; +use util::rel_path::RelPath; +use workspace::Workspace; + +pub struct CargoWorkspaceSlashCommand; + +impl CargoWorkspaceSlashCommand { + async fn build_message(fs: Arc, path_to_cargo_toml: &Path) -> Result { + let buffer = fs.load(path_to_cargo_toml).await?; + let cargo_toml: cargo_toml::Manifest = toml::from_str(&buffer)?; + + let mut message = String::new(); + writeln!(message, "You are in a Rust project.")?; + + if let Some(workspace) = cargo_toml.workspace { + writeln!( + message, + "The project is a Cargo workspace with the following members:" + )?; + for member in workspace.members { + writeln!(message, "- {member}")?; + } + + if !workspace.default_members.is_empty() { + writeln!(message, "The default members are:")?; + for member in workspace.default_members { + writeln!(message, "- {member}")?; + } + } + + if !workspace.dependencies.is_empty() { + writeln!( + message, + "The following workspace dependencies are installed:" + )?; + for dependency in workspace.dependencies.keys() { + writeln!(message, "- {dependency}")?; + } + } + } else if let Some(package) = cargo_toml.package { + writeln!( + message, + "The project name is \"{name}\".", + name = package.name + )?; + + let description = package + .description + .as_ref() + .and_then(|description| description.get().ok().cloned()); + if let Some(description) = description.as_ref() { + writeln!(message, "It describes itself as \"{description}\".")?; + } + + if !cargo_toml.dependencies.is_empty() { + writeln!(message, "The following dependencies are installed:")?; + for dependency in cargo_toml.dependencies.keys() { + writeln!(message, "- {dependency}")?; + } + } + } + + Ok(message) + } + + fn path_to_cargo_toml(project: Entity, cx: &mut App) -> Option> { + let worktree = project.read(cx).worktrees(cx).next()?; + let worktree = worktree.read(cx); + let entry = worktree.entry_for_path(RelPath::new("Cargo.toml").unwrap())?; + let path = ProjectPath { + worktree_id: worktree.id(), + path: entry.path.clone(), + }; + Some(Arc::from( + project.read(cx).absolute_path(&path, cx)?.as_path(), + )) + } +} + +impl SlashCommand for CargoWorkspaceSlashCommand { + fn name(&self) -> String { + "cargo-workspace".into() + } + + fn description(&self) -> String { + "insert project workspace metadata".into() + } + + fn menu_text(&self) -> String { + "Insert Project Workspace Metadata".into() + } + + fn complete_argument( + self: Arc, + _arguments: &[String], + _cancel: Arc, + _workspace: Option>, + _window: &mut Window, + _cx: &mut App, + ) -> Task>> { + Task::ready(Err(anyhow!("this command does not require argument"))) + } + + fn requires_argument(&self) -> bool { + false + } + + fn run( + self: Arc, + _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, + workspace: WeakEntity, + _delegate: Option>, + _window: &mut Window, + cx: &mut App, + ) -> Task { + let output = workspace.update(cx, |workspace, cx| { + let project = workspace.project().clone(); + let fs = workspace.project().read(cx).fs().clone(); + let path = Self::path_to_cargo_toml(project, cx); + let output = cx.background_spawn(async move { + let path = path.with_context(|| "Cargo.toml not found")?; + Self::build_message(fs, &path).await + }); + + cx.foreground_executor().spawn(async move { + let text = output.await?; + let range = 0..text.len(); + Ok(SlashCommandOutput { + text, + sections: vec![SlashCommandOutputSection { + range, + icon: IconName::FileTree, + label: "Project".into(), + metadata: None, + }], + run_commands_in_text: false, + } + .into_event_stream()) + }) + }); + output.unwrap_or_else(|error| Task::ready(Err(error))) + } +} diff --git a/crates/assistant_slash_commands/src/diagnostics_command.rs b/crates/assistant_slash_commands/src/diagnostics_command.rs index dd54565c2abc168bb995325f2ebf930bbde90793..77048b1108577689ade4a5e149e54e9784ba0d33 100644 --- a/crates/assistant_slash_commands/src/diagnostics_command.rs +++ b/crates/assistant_slash_commands/src/diagnostics_command.rs @@ -13,12 +13,12 @@ use project::{DiagnosticSummary, PathMatchCandidateSet, Project}; use rope::Point; use std::{ fmt::Write, - path::{Path, PathBuf}, + path::Path, sync::{Arc, atomic::AtomicBool}, }; use ui::prelude::*; -use util::ResultExt; -use util::paths::PathMatcher; +use util::paths::{PathMatcher, PathStyle}; +use util::{ResultExt, rel_path::RelPath}; use workspace::Workspace; use crate::create_label_for_command; @@ -36,7 +36,7 @@ impl DiagnosticsSlashCommand { if query.is_empty() { let workspace = workspace.read(cx); let entries = workspace.recent_navigation_history(Some(10), cx); - let path_prefix: Arc = Arc::default(); + let path_prefix: Arc = RelPath::empty().into(); Task::ready( entries .into_iter() @@ -125,6 +125,7 @@ impl SlashCommand for DiagnosticsSlashCommand { let Some(workspace) = workspace.and_then(|workspace| workspace.upgrade()) else { return Task::ready(Err(anyhow!("workspace was dropped"))); }; + let path_style = workspace.read(cx).project().read(cx).path_style(cx); let query = arguments.last().cloned().unwrap_or_default(); let paths = self.search_paths(query.clone(), cancellation_flag.clone(), &workspace, cx); @@ -134,11 +135,11 @@ impl SlashCommand for DiagnosticsSlashCommand { .await .into_iter() .map(|path_match| { - format!( - "{}{}", - path_match.path_prefix, - path_match.path.to_string_lossy() - ) + path_match + .path_prefix + .join(&path_match.path) + .display(path_style) + .to_string() }) .collect(); @@ -183,9 +184,11 @@ impl SlashCommand for DiagnosticsSlashCommand { return Task::ready(Err(anyhow!("workspace was dropped"))); }; - let options = Options::parse(arguments); + let project = workspace.read(cx).project(); + let path_style = project.read(cx).path_style(cx); + let options = Options::parse(arguments, path_style); - let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx); + let task = collect_diagnostics(project.clone(), options, cx); window.spawn(cx, async move |_| { task.await? @@ -204,14 +207,14 @@ struct Options { const INCLUDE_WARNINGS_ARGUMENT: &str = "--include-warnings"; impl Options { - fn parse(arguments: &[String]) -> Self { + fn parse(arguments: &[String], path_style: PathStyle) -> Self { let mut include_warnings = false; let mut path_matcher = None; for arg in arguments { if arg == INCLUDE_WARNINGS_ARGUMENT { include_warnings = true; } else { - path_matcher = PathMatcher::new(&[arg.to_owned()]).log_err(); + path_matcher = PathMatcher::new(&[arg.to_owned()], path_style).log_err(); } } Self { @@ -237,21 +240,15 @@ fn collect_diagnostics( None }; + let path_style = project.read(cx).path_style(cx); let glob_is_exact_file_match = if let Some(path) = options .path_matcher .as_ref() .and_then(|pm| pm.sources().first()) { - PathBuf::try_from(path) - .ok() - .and_then(|path| { - project.read(cx).worktrees(cx).find_map(|worktree| { - let worktree = worktree.read(cx); - let worktree_root_path = Path::new(worktree.root_name()); - let relative_path = path.strip_prefix(worktree_root_path).ok()?; - worktree.absolutize(relative_path).ok() - }) - }) + project + .read(cx) + .find_project_path(Path::new(path), cx) .is_some() } else { false @@ -263,9 +260,8 @@ fn collect_diagnostics( .diagnostic_summaries(false, cx) .flat_map(|(path, _, summary)| { let worktree = project.read(cx).worktree_for_id(path.worktree_id, cx)?; - let mut path_buf = PathBuf::from(worktree.read(cx).root_name()); - path_buf.push(&path.path); - Some((path, path_buf, summary)) + let full_path = worktree.read(cx).root_name().join(&path.path); + Some((path, full_path, summary)) }) .collect(); @@ -281,7 +277,7 @@ fn collect_diagnostics( let mut project_summary = DiagnosticSummary::default(); for (project_path, path, summary) in diagnostic_summaries { if let Some(path_matcher) = &options.path_matcher - && !path_matcher.is_match(&path) + && !path_matcher.is_match(&path.as_std_path()) { continue; } @@ -294,7 +290,7 @@ fn collect_diagnostics( } let last_end = output.text.len(); - let file_path = path.to_string_lossy().to_string(); + let file_path = path.display(path_style).to_string(); if !glob_is_exact_file_match { writeln!(&mut output.text, "{file_path}").unwrap(); } diff --git a/crates/assistant_slash_commands/src/file_command.rs b/crates/assistant_slash_commands/src/file_command.rs index 4bf53bad9b5364c7fd488cf74644701c6f176b99..afb3d942fefe13b679cf9fbe7b711dfe32eb0195 100644 --- a/crates/assistant_slash_commands/src/file_command.rs +++ b/crates/assistant_slash_commands/src/file_command.rs @@ -14,11 +14,11 @@ use smol::stream::StreamExt; use std::{ fmt::Write, ops::{Range, RangeInclusive}, - path::{Path, PathBuf}, + path::Path, sync::{Arc, atomic::AtomicBool}, }; use ui::prelude::*; -use util::ResultExt; +use util::{ResultExt, rel_path::RelPath}; use workspace::Workspace; use worktree::ChildEntriesOptions; @@ -48,7 +48,7 @@ impl FileSlashCommand { include_dirs: true, include_ignored: false, }; - let entries = worktree.child_entries_with_options(Path::new(""), options); + let entries = worktree.child_entries_with_options(RelPath::empty(), options); entries.map(move |entry| { ( project::ProjectPath { @@ -61,19 +61,18 @@ impl FileSlashCommand { })) .collect::>(); - let path_prefix: Arc = Arc::default(); + let path_prefix: Arc = RelPath::empty().into(); Task::ready( entries .into_iter() .filter_map(|(entry, is_dir)| { let worktree = project.worktree_for_id(entry.worktree_id, cx)?; - let mut full_path = PathBuf::from(worktree.read(cx).root_name()); - full_path.push(&entry.path); + let full_path = worktree.read(cx).root_name().join(&entry.path); Some(PathMatch { score: 0., positions: Vec::new(), worktree_id: entry.worktree_id.to_usize(), - path: full_path.into(), + path: full_path, path_prefix: path_prefix.clone(), distance_to_relative_ancestor: 0, is_dir, @@ -149,6 +148,8 @@ impl SlashCommand for FileSlashCommand { return Task::ready(Err(anyhow!("workspace was dropped"))); }; + let path_style = workspace.read(cx).path_style(cx); + let paths = self.search_paths( arguments.last().cloned().unwrap_or_default(), cancellation_flag, @@ -161,14 +162,14 @@ impl SlashCommand for FileSlashCommand { .await .into_iter() .filter_map(|path_match| { - let text = format!( - "{}{}", - path_match.path_prefix, - path_match.path.to_string_lossy() - ); + let text = path_match + .path_prefix + .join(&path_match.path) + .display(path_style) + .to_string(); let mut label = CodeLabel::default(); - let file_name = path_match.path.file_name()?.to_string_lossy(); + let file_name = path_match.path.file_name()?; let label_text = if path_match.is_dir { format!("{}/ ", file_name) } else { @@ -247,14 +248,13 @@ fn collect_files( cx.spawn(async move |cx| { for snapshot in snapshots { let worktree_id = snapshot.id(); - let mut directory_stack: Vec> = Vec::new(); - let mut folded_directory_names_stack = Vec::new(); + let path_style = snapshot.path_style(); + let mut directory_stack: Vec> = Vec::new(); + let mut folded_directory_names: Arc = RelPath::empty().into(); let mut is_top_level_directory = true; for entry in snapshot.entries(false, 0) { - let mut path_including_worktree_name = PathBuf::new(); - path_including_worktree_name.push(snapshot.root_name()); - path_including_worktree_name.push(&entry.path); + let path_including_worktree_name = snapshot.root_name().join(&entry.path); if !matchers .iter() @@ -277,13 +277,7 @@ fn collect_files( )))?; } - let filename = entry - .path - .file_name() - .unwrap_or_default() - .to_str() - .unwrap_or_default() - .to_string(); + let filename = entry.path.file_name().unwrap_or_default().to_string(); if entry.is_dir() { // Auto-fold directories that contain no files @@ -292,24 +286,23 @@ fn collect_files( if child_entries.next().is_none() && child.kind.is_dir() { if is_top_level_directory { is_top_level_directory = false; - folded_directory_names_stack.push( - path_including_worktree_name.to_string_lossy().to_string(), - ); + folded_directory_names = + folded_directory_names.join(&path_including_worktree_name); } else { - folded_directory_names_stack.push(filename.to_string()); + folded_directory_names = + folded_directory_names.join(RelPath::new(&filename).unwrap()); } continue; } } else { // Skip empty directories - folded_directory_names_stack.clear(); + folded_directory_names = RelPath::empty().into(); continue; } - let prefix_paths = folded_directory_names_stack.drain(..).as_slice().join("/"); - if prefix_paths.is_empty() { + if folded_directory_names.is_empty() { let label = if is_top_level_directory { is_top_level_directory = false; - path_including_worktree_name.to_string_lossy().to_string() + path_including_worktree_name.display(path_style).to_string() } else { filename }; @@ -320,28 +313,23 @@ fn collect_files( }))?; events_tx.unbounded_send(Ok(SlashCommandEvent::Content( SlashCommandContent::Text { - text: label, + text: label.to_string(), run_commands_in_text: false, }, )))?; directory_stack.push(entry.path.clone()); } else { - // todo(windows) - // Potential bug: this assumes that the path separator is always `\` on Windows - let entry_name = format!( - "{}{}{}", - prefix_paths, - std::path::MAIN_SEPARATOR_STR, - &filename - ); + let entry_name = + folded_directory_names.join(RelPath::new(&filename).unwrap()); + let entry_name = entry_name.display(path_style); events_tx.unbounded_send(Ok(SlashCommandEvent::StartSection { icon: IconName::Folder, - label: entry_name.clone().into(), + label: entry_name.to_string().into(), metadata: None, }))?; events_tx.unbounded_send(Ok(SlashCommandEvent::Content( SlashCommandContent::Text { - text: entry_name, + text: entry_name.to_string(), run_commands_in_text: false, }, )))?; @@ -356,7 +344,7 @@ fn collect_files( } else if entry.is_file() { let Some(open_buffer_task) = project_handle .update(cx, |project, cx| { - project.open_buffer((worktree_id, &entry.path), cx) + project.open_buffer((worktree_id, entry.path.clone()), cx) }) .ok() else { @@ -367,7 +355,9 @@ fn collect_files( let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?; append_buffer_to_output( &snapshot, - Some(&path_including_worktree_name), + Some(Path::new( + path_including_worktree_name.display(path_style).as_ref(), + )), &mut output, ) .log_err(); @@ -462,10 +452,9 @@ pub fn build_entry_output_section( /// This contains a small fork of the util::paths::PathMatcher, that is stricter about the prefix /// check. Only subpaths pass the prefix check, rather than any prefix. mod custom_path_matcher { - use std::{fmt::Debug as _, path::Path}; - use globset::{Glob, GlobSet, GlobSetBuilder}; - use util::paths::SanitizedPath; + use std::fmt::Debug as _; + use util::{paths::SanitizedPath, rel_path::RelPath}; #[derive(Clone, Debug, Default)] pub struct PathMatcher { @@ -492,12 +481,12 @@ mod custom_path_matcher { pub fn new(globs: &[String]) -> Result { let globs = globs .iter() - .map(|glob| Glob::new(&SanitizedPath::new(glob).to_glob_string())) + .map(|glob| Glob::new(&SanitizedPath::new(glob).to_string())) .collect::, _>>()?; let sources = globs.iter().map(|glob| glob.glob().to_owned()).collect(); let sources_with_trailing_slash = globs .iter() - .map(|glob| glob.glob().to_string() + std::path::MAIN_SEPARATOR_STR) + .map(|glob| glob.glob().to_string() + "/") .collect(); let mut glob_builder = GlobSetBuilder::new(); for single_glob in globs { @@ -511,16 +500,13 @@ mod custom_path_matcher { }) } - pub fn is_match>(&self, other: P) -> bool { - let other_path = other.as_ref(); + pub fn is_match(&self, other: &RelPath) -> bool { self.sources .iter() .zip(self.sources_with_trailing_slash.iter()) .any(|(source, with_slash)| { - let as_bytes = other_path.as_os_str().as_encoded_bytes(); - // todo(windows) - // Potential bug: this assumes that the path separator is always `\` on Windows - let with_slash = if source.ends_with(std::path::MAIN_SEPARATOR_STR) { + let as_bytes = other.as_str().as_bytes(); + let with_slash = if source.ends_with('/') { source.as_bytes() } else { with_slash.as_bytes() @@ -528,13 +514,13 @@ mod custom_path_matcher { as_bytes.starts_with(with_slash) || as_bytes.ends_with(source.as_bytes()) }) - || self.glob.is_match(other_path) - || self.check_with_end_separator(other_path) + || self.glob.is_match(other) + || self.check_with_end_separator(other) } - fn check_with_end_separator(&self, path: &Path) -> bool { - let path_str = path.to_string_lossy(); - let separator = std::path::MAIN_SEPARATOR_STR; + fn check_with_end_separator(&self, path: &RelPath) -> bool { + let path_str = path.as_str(); + let separator = "/"; if path_str.ends_with(separator) { false } else { diff --git a/crates/assistant_tools/src/copy_path_tool.rs b/crates/assistant_tools/src/copy_path_tool.rs index c56a864bd45efd83d605607962f6103f8da7d1da..572eddcb1079557b464ba29d125aa44929409cc5 100644 --- a/crates/assistant_tools/src/copy_path_tool.rs +++ b/crates/assistant_tools/src/copy_path_tool.rs @@ -96,9 +96,7 @@ impl Tool for CopyPathTool { .and_then(|project_path| project.entry_for_path(&project_path, cx)) { Some(entity) => match project.find_project_path(&input.destination_path, cx) { - Some(project_path) => { - project.copy_entry(entity.id, None, project_path.path, cx) - } + Some(project_path) => project.copy_entry(entity.id, project_path, cx), None => Task::ready(Err(anyhow!( "Destination path {} was outside the project.", input.destination_path diff --git a/crates/assistant_tools/src/diagnostics_tool.rs b/crates/assistant_tools/src/diagnostics_tool.rs index 4ec794e12783746e4e330e79f0c0cb14c84f5d2e..75bd683512b58d2fdb6c43fc319d266f6609f926 100644 --- a/crates/assistant_tools/src/diagnostics_tool.rs +++ b/crates/assistant_tools/src/diagnostics_tool.rs @@ -8,7 +8,7 @@ use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchem use project::Project; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use std::{fmt::Write, path::Path, sync::Arc}; +use std::{fmt::Write, sync::Arc}; use ui::IconName; use util::markdown::MarkdownInlineCode; @@ -150,9 +150,7 @@ impl Tool for DiagnosticsTool { has_diagnostics = true; output.push_str(&format!( "{}: {} error(s), {} warning(s)\n", - Path::new(worktree.read(cx).root_name()) - .join(project_path.path) - .display(), + worktree.read(cx).absolutize(&project_path.path).display(), summary.error_count, summary.warning_count )); diff --git a/crates/assistant_tools/src/edit_file_tool.rs b/crates/assistant_tools/src/edit_file_tool.rs index 1fcd7bbf14fb2e37646902102d51392bc8a470f8..f7ed5e28bf8cf8e4d1452620c7c13732ad28ff8f 100644 --- a/crates/assistant_tools/src/edit_file_tool.rs +++ b/crates/assistant_tools/src/edit_file_tool.rs @@ -38,6 +38,7 @@ use settings::Settings; use std::{ cmp::Reverse, collections::HashSet, + ffi::OsStr, ops::Range, path::{Path, PathBuf}, sync::Arc, @@ -45,7 +46,7 @@ use std::{ }; use theme::ThemeSettings; use ui::{CommonAnimationExt, Disclosure, Tooltip, prelude::*}; -use util::ResultExt; +use util::{ResultExt, rel_path::RelPath}; use workspace::Workspace; pub struct EditFileTool; @@ -146,11 +147,11 @@ impl Tool for EditFileTool { // If any path component matches the local settings folder, then this could affect // the editor in ways beyond the project source, so prompt. - let local_settings_folder = paths::local_settings_folder_relative_path(); + let local_settings_folder = paths::local_settings_folder_name(); let path = Path::new(&input.path); if path .components() - .any(|component| component.as_os_str() == local_settings_folder.as_os_str()) + .any(|c| c.as_os_str() == >::as_ref(local_settings_folder)) { return true; } @@ -195,10 +196,10 @@ impl Tool for EditFileTool { let mut description = input.display_description.clone(); // Add context about why confirmation may be needed - let local_settings_folder = paths::local_settings_folder_relative_path(); + let local_settings_folder = paths::local_settings_folder_name(); if path .components() - .any(|c| c.as_os_str() == local_settings_folder.as_os_str()) + .any(|c| c.as_os_str() == >::as_ref(local_settings_folder)) { description.push_str(" (local settings)"); } else if let Ok(canonical_path) = std::fs::canonicalize(&input.path) @@ -377,7 +378,7 @@ impl Tool for EditFileTool { .await; let output = EditFileToolOutput { - original_path: project_path.path.to_path_buf(), + original_path: project_path.path.as_std_path().to_owned(), new_text, old_text, raw_output: Some(agent_output), @@ -549,10 +550,11 @@ fn resolve_path( let file_name = input .path .file_name() + .and_then(|file_name| file_name.to_str()) .context("Can't create file: invalid filename")?; let new_file_path = parent_project_path.map(|parent| ProjectPath { - path: Arc::from(parent.path.join(file_name)), + path: parent.path.join(RelPath::new(file_name).unwrap()), ..parent }); @@ -1236,7 +1238,7 @@ mod tests { use serde_json::json; use settings::SettingsStore; use std::fs; - use util::path; + use util::{path, rel_path::rel_path}; #[gpui::test] async fn test_edit_nonexistent_file(cx: &mut TestAppContext) { @@ -1355,14 +1357,10 @@ mod tests { cx.update(|cx| resolve_path(&input, project, cx)) } + #[track_caller] fn assert_resolved_path_eq(path: anyhow::Result, expected: &str) { - let actual = path - .expect("Should return valid path") - .path - .to_str() - .unwrap() - .replace("\\", "/"); // Naive Windows paths normalization - assert_eq!(actual, expected); + let actual = path.expect("Should return valid path").path; + assert_eq!(actual.as_ref(), rel_path(expected)); } #[test] @@ -1976,25 +1974,22 @@ mod tests { let project = Project::test(fs.clone(), [path!("/home/user/myproject").as_ref()], cx).await; // Get the actual local settings folder name - let local_settings_folder = paths::local_settings_folder_relative_path(); + let local_settings_folder = paths::local_settings_folder_name(); // Test various config path patterns let test_cases = vec![ ( - format!("{}/settings.json", local_settings_folder.display()), + format!("{local_settings_folder}/settings.json"), true, "Top-level local settings file".to_string(), ), ( - format!( - "myproject/{}/settings.json", - local_settings_folder.display() - ), + format!("myproject/{local_settings_folder}/settings.json"), true, "Local settings in project path".to_string(), ), ( - format!("src/{}/config.toml", local_settings_folder.display()), + format!("src/{local_settings_folder}/config.toml"), true, "Local settings in subdirectory".to_string(), ), @@ -2205,12 +2200,7 @@ mod tests { ("", false, "Empty path is treated as project root"), // Root directory ("/", true, "Root directory should be outside project"), - // Parent directory references - find_project_path resolves these - ( - "project/../other", - false, - "Path with .. is resolved by find_project_path", - ), + ("project/../other", true, "Path with .. is outside project"), ( "project/./src/file.rs", false, diff --git a/crates/assistant_tools/src/find_path_tool.rs b/crates/assistant_tools/src/find_path_tool.rs index d1451132aeb066a5d4ff9e05f81db3855c1d513a..53da3106d2f9fa5fd7928b4291cc8e80daa3bfdb 100644 --- a/crates/assistant_tools/src/find_path_tool.rs +++ b/crates/assistant_tools/src/find_path_tool.rs @@ -161,10 +161,13 @@ impl Tool for FindPathTool { } fn search_paths(glob: &str, project: Entity, cx: &mut App) -> Task>> { - let path_matcher = match PathMatcher::new([ - // Sometimes models try to search for "". In this case, return all paths in the project. - if glob.is_empty() { "*" } else { glob }, - ]) { + let path_matcher = match PathMatcher::new( + [ + // Sometimes models try to search for "". In this case, return all paths in the project. + if glob.is_empty() { "*" } else { glob }, + ], + project.read(cx).path_style(cx), + ) { Ok(matcher) => matcher, Err(err) => return Task::ready(Err(anyhow!("Invalid glob: {err}"))), }; @@ -178,10 +181,15 @@ fn search_paths(glob: &str, project: Entity, cx: &mut App) -> Task>(), + project.read(cx).path_style(cx), ) { Ok(matcher) => matcher, Err(error) => { @@ -141,7 +142,7 @@ impl Tool for GrepTool { .iter() .chain(global_settings.private_files.sources().iter()); - match PathMatcher::new(exclude_patterns) { + match PathMatcher::new(exclude_patterns, project.read(cx).path_style(cx)) { Ok(matcher) => matcher, Err(error) => { return Task::ready(Err(anyhow!("invalid exclude pattern: {error}"))).into(); diff --git a/crates/assistant_tools/src/list_directory_tool.rs b/crates/assistant_tools/src/list_directory_tool.rs index 9303a50468c428ddd4e603c69d75030dc860e876..d46ac3ac0dc6bd1210472cad13acadd4ce209def 100644 --- a/crates/assistant_tools/src/list_directory_tool.rs +++ b/crates/assistant_tools/src/list_directory_tool.rs @@ -4,11 +4,11 @@ use anyhow::{Result, anyhow}; use assistant_tool::{Tool, ToolResult}; use gpui::{AnyWindowHandle, App, Entity, Task}; use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat}; -use project::{Project, WorktreeSettings}; +use project::{Project, ProjectPath, WorktreeSettings}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::Settings; -use std::{fmt::Write, path::Path, sync::Arc}; +use std::{fmt::Write, sync::Arc}; use ui::IconName; use util::markdown::MarkdownInlineCode; @@ -100,7 +100,7 @@ impl Tool for ListDirectoryTool { .filter_map(|worktree| { worktree.read(cx).root_entry().and_then(|entry| { if entry.is_dir() { - entry.path.to_str() + Some(entry.path.as_str()) } else { None } @@ -158,7 +158,6 @@ impl Tool for ListDirectoryTool { } let worktree_snapshot = worktree.read(cx).snapshot(); - let worktree_root_name = worktree.read(cx).root_name().to_string(); let Some(entry) = worktree_snapshot.entry_for_path(&project_path.path) else { return Task::ready(Err(anyhow!("Path not found: {}", input.path))).into(); @@ -180,23 +179,22 @@ impl Tool for ListDirectoryTool { continue; } - if project - .read(cx) - .find_project_path(&entry.path, cx) - .map(|project_path| { - let worktree_settings = WorktreeSettings::get(Some((&project_path).into()), cx); + let project_path = ProjectPath { + worktree_id: worktree_snapshot.id(), + path: entry.path.clone(), + }; + let worktree_settings = WorktreeSettings::get(Some((&project_path).into()), cx); - worktree_settings.is_path_excluded(&project_path.path) - || worktree_settings.is_path_private(&project_path.path) - }) - .unwrap_or(false) + if worktree_settings.is_path_excluded(&project_path.path) + || worktree_settings.is_path_private(&project_path.path) { continue; } - let full_path = Path::new(&worktree_root_name) + let full_path = worktree_snapshot + .root_name() .join(&entry.path) - .display() + .display(worktree_snapshot.path_style()) .to_string(); if entry.is_dir() { folders.push(full_path); diff --git a/crates/assistant_tools/src/move_path_tool.rs b/crates/assistant_tools/src/move_path_tool.rs index 2c065488cea62a73e04c34a659961abc7b94ba54..22dbe9e625468d8c2688b60bdcd94a7da594730e 100644 --- a/crates/assistant_tools/src/move_path_tool.rs +++ b/crates/assistant_tools/src/move_path_tool.rs @@ -108,7 +108,7 @@ impl Tool for MovePathTool { .and_then(|project_path| project.entry_for_path(&project_path, cx)) { Some(entity) => match project.find_project_path(&input.destination_path, cx) { - Some(project_path) => project.rename_entry(entity.id, project_path.path, cx), + Some(project_path) => project.rename_entry(entity.id, project_path, cx), None => Task::ready(Err(anyhow!( "Destination path {} was outside the project.", input.destination_path diff --git a/crates/call/src/call_impl/room.rs b/crates/call/src/call_impl/room.rs index 930846ab8ff37272f9b0fc0652319318c676f3f7..0d44a82c577a841b988808c72bffcff01583b2c2 100644 --- a/crates/call/src/call_impl/room.rs +++ b/crates/call/src/call_impl/room.rs @@ -24,7 +24,7 @@ use postage::{sink::Sink, stream::Stream, watch}; use project::Project; use settings::Settings as _; use std::{future::Future, mem, rc::Rc, sync::Arc, time::Duration}; -use util::{ResultExt, TryFutureExt, post_inc}; +use util::{ResultExt, TryFutureExt, paths::PathStyle, post_inc}; pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30); @@ -1163,6 +1163,7 @@ impl Room { room_id: self.id(), worktrees: project.read(cx).worktree_metadata_protos(cx), is_ssh_project: project.read(cx).is_via_remote_server(), + windows_paths: Some(project.read(cx).path_style(cx) == PathStyle::Windows), }); cx.spawn(async move |this, cx| { diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index 7ab289a0ecdbab0909b90f2ef289af3c5d4a61b8..5d43b4543c2e28b07953567f3eb54d19e8b218b5 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -405,7 +405,7 @@ impl Telemetry { let mut project_types: HashSet<&str> = HashSet::new(); for (path, _, _) in updated_entries_set.iter() { - let Some(file_name) = path.file_name().and_then(|f| f.to_str()) else { + let Some(file_name) = path.file_name() else { continue; }; @@ -601,6 +601,7 @@ mod tests { use http_client::FakeHttpClient; use std::collections::HashMap; use telemetry_events::FlexibleEvent; + use util::rel_path::RelPath; use worktree::{PathChange, ProjectEntryId, WorktreeId}; #[gpui::test] @@ -855,12 +856,12 @@ mod tests { let entries: Vec<_> = file_paths .into_iter() .enumerate() - .map(|(i, path)| { - ( - Arc::from(std::path::Path::new(path)), + .filter_map(|(i, path)| { + Some(( + Arc::from(RelPath::new(path).ok()?), ProjectEntryId::from_proto(i as u64 + 1), PathChange::Added, - ) + )) }) .collect(); let updated_entries: UpdatedEntriesSet = Arc::from(entries.as_slice()); diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index b2e25458ef98b295b4d056a7f59521f4fa896f1a..d498ecd50a0b88a3a83c7e35a962136e7da74aa5 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -61,7 +61,8 @@ CREATE TABLE "projects" ( "host_user_id" INTEGER REFERENCES users (id), "host_connection_id" INTEGER, "host_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE, - "unregistered" BOOLEAN NOT NULL DEFAULT FALSE + "unregistered" BOOLEAN NOT NULL DEFAULT FALSE, + "windows_paths" BOOLEAN NOT NULL DEFAULT FALSE ); CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id"); diff --git a/crates/collab/migrations/20250916173002_add_path_style_to_project.sql b/crates/collab/migrations/20250916173002_add_path_style_to_project.sql new file mode 100644 index 0000000000000000000000000000000000000000..b1244818f14403d38af577be4b14b1a8a765e07b --- /dev/null +++ b/crates/collab/migrations/20250916173002_add_path_style_to_project.sql @@ -0,0 +1 @@ +ALTER TABLE projects ADD COLUMN windows_paths BOOLEAN DEFAULT FALSE; diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 6ec57ce95e1863d973624f57947b28fffec042b1..1152cb97d79ef2c7df437479d79b28a5ca6d2ef7 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -34,6 +34,7 @@ use std::{ }; use time::PrimitiveDateTime; use tokio::sync::{Mutex, OwnedMutexGuard}; +use util::paths::PathStyle; use worktree_settings_file::LocalSettingsKind; #[cfg(test)] @@ -598,6 +599,7 @@ pub struct Project { pub worktrees: BTreeMap, pub repositories: Vec, pub language_servers: Vec, + pub path_style: PathStyle, } pub struct ProjectCollaborator { diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index d83f6de206b414f00ea8f176672aeb41641f289a..8014cd3cab27b472dc5a2fab9e896fb75ff226a2 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -33,6 +33,7 @@ impl Database { connection: ConnectionId, worktrees: &[proto::WorktreeMetadata], is_ssh_project: bool, + windows_paths: bool, ) -> Result> { self.room_transaction(room_id, |tx| async move { let participant = room_participant::Entity::find() @@ -69,6 +70,7 @@ impl Database { connection.owner_id as i32, ))), id: ActiveValue::NotSet, + windows_paths: ActiveValue::set(windows_paths), } .insert(&*tx) .await?; @@ -1046,6 +1048,12 @@ impl Database { .all(tx) .await?; + let path_style = if project.windows_paths { + PathStyle::Windows + } else { + PathStyle::Posix + }; + let project = Project { id: project.id, role, @@ -1073,6 +1081,7 @@ impl Database { capabilities: language_server.capabilities, }) .collect(), + path_style, }; Ok((project, replica_id as ReplicaId)) } diff --git a/crates/collab/src/db/tables/project.rs b/crates/collab/src/db/tables/project.rs index 8a7fea55243b6fdd0352a1fca1cfa4891fbed7fc..11a9b972ebcd7af29d6e6c234096384ce9ff7701 100644 --- a/crates/collab/src/db/tables/project.rs +++ b/crates/collab/src/db/tables/project.rs @@ -12,6 +12,7 @@ pub struct Model { pub host_user_id: Option, pub host_connection_id: Option, pub host_connection_server_id: Option, + pub windows_paths: bool, } impl Model { diff --git a/crates/collab/src/db/tests/db_tests.rs b/crates/collab/src/db/tests/db_tests.rs index 07ab1e62500c8dcddc0ceaeac5cdc9992a6294f8..def0769c373605021653b07a97cbff2ec807d34d 100644 --- a/crates/collab/src/db/tests/db_tests.rs +++ b/crates/collab/src/db/tests/db_tests.rs @@ -558,18 +558,18 @@ async fn test_project_count(db: &Arc) { .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0); - db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false) + db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, false) .await .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1); - db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false) + db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, false) .await .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2); // Projects shared by admins aren't counted. - db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false) + db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false, false) .await .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2); diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 49f97eb11d4b4dd44591ca500668828e30013c03..fa2ca6a890af93979eed759265286d99a5a98bb2 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -36,6 +36,7 @@ use reqwest_client::ReqwestClient; use rpc::proto::split_repository_update; use supermaven_api::{CreateExternalUserRequest, SupermavenAdminApi}; use tracing::Span; +use util::paths::PathStyle; use futures::{ FutureExt, SinkExt, StreamExt, TryStreamExt, channel::oneshot, future::BoxFuture, @@ -1879,6 +1880,7 @@ async fn share_project( session.connection_id, &request.worktrees, request.is_ssh_project, + request.windows_paths.unwrap_or(false), ) .await?; response.send(proto::ShareProjectResponse { @@ -2012,6 +2014,7 @@ async fn join_project( language_servers, language_server_capabilities, role: project.role.into(), + windows_paths: project.path_style == PathStyle::Windows, })?; for (worktree_id, worktree) in mem::take(&mut project.worktrees) { diff --git a/crates/collab/src/tests/channel_buffer_tests.rs b/crates/collab/src/tests/channel_buffer_tests.rs index 0b331ff1e66279f5e2f5e52f9d83f0eaca6cfcdb..a2c7e97c63e6ecd4936b7ff1e839518ad66f936e 100644 --- a/crates/collab/src/tests/channel_buffer_tests.rs +++ b/crates/collab/src/tests/channel_buffer_tests.rs @@ -13,6 +13,7 @@ use gpui::{BackgroundExecutor, Context, Entity, TestAppContext, Window}; use rpc::{RECEIVE_TIMEOUT, proto::PeerId}; use serde_json::json; use std::ops::Range; +use util::rel_path::rel_path; use workspace::CollaboratorId; #[gpui::test] @@ -256,7 +257,13 @@ async fn test_channel_notes_participant_indices( executor.start_waiting(); let editor_a = workspace_a .update_in(cx_a, |workspace, window, cx| { - workspace.open_path((worktree_id_a, "file.txt"), None, true, window, cx) + workspace.open_path( + (worktree_id_a, rel_path("file.txt")), + None, + true, + window, + cx, + ) }) .await .unwrap() @@ -265,7 +272,13 @@ async fn test_channel_notes_participant_indices( executor.start_waiting(); let editor_b = workspace_b .update_in(cx_b, |workspace, window, cx| { - workspace.open_path((worktree_id_a, "file.txt"), None, true, window, cx) + workspace.open_path( + (worktree_id_a, rel_path("file.txt")), + None, + true, + window, + cx, + ) }) .await .unwrap() diff --git a/crates/collab/src/tests/channel_guest_tests.rs b/crates/collab/src/tests/channel_guest_tests.rs index f5051ba876438b6872b3826c1c75258bde990ed8..604dec3194fc8917e66606d4e7b770df337615aa 100644 --- a/crates/collab/src/tests/channel_guest_tests.rs +++ b/crates/collab/src/tests/channel_guest_tests.rs @@ -4,6 +4,7 @@ use chrono::Utc; use editor::Editor; use gpui::{BackgroundExecutor, TestAppContext}; use rpc::proto; +use util::rel_path::rel_path; #[gpui::test] async fn test_channel_guests( @@ -55,7 +56,7 @@ async fn test_channel_guests( project_b .update(cx_b, |project, cx| { let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id(); - project.create_entry((worktree_id, "b.txt"), false, cx) + project.create_entry((worktree_id, rel_path("b.txt")), false, cx) }) .await .is_err() diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index 2169b7123263305e45f11403796f8d7c395c2cdf..947af01224fdc55284dc3a8166f652a9782d5e5f 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -16,6 +16,7 @@ use editor::{ }; use fs::Fs; use futures::{SinkExt, StreamExt, channel::mpsc, lock::Mutex}; +use git::repository::repo_path; use gpui::{App, Rgba, TestAppContext, UpdateGlobal, VisualContext, VisualTestContext}; use indoc::indoc; use language::FakeLspAdapter; @@ -38,7 +39,7 @@ use std::{ }, }; use text::Point; -use util::{path, uri}; +use util::{path, rel_path::rel_path, uri}; use workspace::{CloseIntent, Workspace}; #[gpui::test(iterations = 10)] @@ -97,7 +98,7 @@ async fn test_host_disconnect( let editor_b = workspace_b .update(cx_b, |workspace, window, cx| { - workspace.open_path((worktree_id, "b.txt"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("b.txt")), None, true, window, cx) }) .unwrap() .await @@ -205,7 +206,9 @@ async fn test_newline_above_or_below_does_not_move_guest_cursor( // Open a buffer as client A let buffer_a = project_a - .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .update(cx_a, |p, cx| { + p.open_buffer((worktree_id, rel_path("a.txt")), cx) + }) .await .unwrap(); let cx_a = cx_a.add_empty_window(); @@ -222,7 +225,9 @@ async fn test_newline_above_or_below_does_not_move_guest_cursor( let cx_b = cx_b.add_empty_window(); // Open a buffer as client B let buffer_b = project_b - .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .update(cx_b, |p, cx| { + p.open_buffer((worktree_id, rel_path("a.txt")), cx) + }) .await .unwrap(); let editor_b = cx_b @@ -334,7 +339,9 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu // Open a file in an editor as the guest. let buffer_b = project_b - .update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)) + .update(cx_b, |p, cx| { + p.open_buffer((worktree_id, rel_path("main.rs")), cx) + }) .await .unwrap(); let cx_b = cx_b.add_empty_window(); @@ -408,7 +415,9 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu // Open the buffer on the host. let buffer_a = project_a - .update(cx_a, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)) + .update(cx_a, |p, cx| { + p.open_buffer((worktree_id, rel_path("main.rs")), cx) + }) .await .unwrap(); cx_a.executor().run_until_parked(); @@ -599,7 +608,7 @@ async fn test_collaborating_with_code_actions( let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); let editor_b = workspace_b .update_in(cx_b, |workspace, window, cx| { - workspace.open_path((worktree_id, "main.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx) }) .await .unwrap() @@ -825,7 +834,7 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); let editor_b = workspace_b .update_in(cx_b, |workspace, window, cx| { - workspace.open_path((worktree_id, "one.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("one.rs")), None, true, window, cx) }) .await .unwrap() @@ -1072,7 +1081,7 @@ async fn test_slow_lsp_server(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); let editor_b = workspace_b .update_in(cx_b, |workspace, window, cx| { - workspace.open_path((worktree_id, "one.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("one.rs")), None, true, window, cx) }) .await .unwrap() @@ -1412,7 +1421,10 @@ async fn test_share_project( project_b .update(cx_b, |project, cx| { let worktree = project.worktrees(cx).next().unwrap(); - let entry = worktree.read(cx).entry_for_path("ignored-dir").unwrap(); + let entry = worktree + .read(cx) + .entry_for_path(rel_path("ignored-dir")) + .unwrap(); project.expand_entry(worktree_id, entry.id, cx).unwrap() }) .await @@ -1435,17 +1447,21 @@ async fn test_share_project( // Open the same file as client B and client A. let buffer_b = project_b - .update(cx_b, |p, cx| p.open_buffer((worktree_id, "b.txt"), cx)) + .update(cx_b, |p, cx| { + p.open_buffer((worktree_id, rel_path("b.txt")), cx) + }) .await .unwrap(); buffer_b.read_with(cx_b, |buf, _| assert_eq!(buf.text(), "b-contents")); project_a.read_with(cx_a, |project, cx| { - assert!(project.has_open_buffer((worktree_id, "b.txt"), cx)) + assert!(project.has_open_buffer((worktree_id, rel_path("b.txt")), cx)) }); let buffer_a = project_a - .update(cx_a, |p, cx| p.open_buffer((worktree_id, "b.txt"), cx)) + .update(cx_a, |p, cx| { + p.open_buffer((worktree_id, rel_path("b.txt")), cx) + }) .await .unwrap(); @@ -1553,7 +1569,9 @@ async fn test_on_input_format_from_host_to_guest( // Open a file in an editor as the host. let buffer_a = project_a - .update(cx_a, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)) + .update(cx_a, |p, cx| { + p.open_buffer((worktree_id, rel_path("main.rs")), cx) + }) .await .unwrap(); let cx_a = cx_a.add_empty_window(); @@ -1586,7 +1604,9 @@ async fn test_on_input_format_from_host_to_guest( // Open the buffer on the guest and see that the formatting worked let buffer_b = project_b - .update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)) + .update(cx_b, |p, cx| { + p.open_buffer((worktree_id, rel_path("main.rs")), cx) + }) .await .unwrap(); @@ -1686,7 +1706,9 @@ async fn test_on_input_format_from_guest_to_host( // Open a file in an editor as the guest. let buffer_b = project_b - .update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)) + .update(cx_b, |p, cx| { + p.open_buffer((worktree_id, rel_path("main.rs")), cx) + }) .await .unwrap(); let cx_b = cx_b.add_empty_window(); @@ -1732,7 +1754,9 @@ async fn test_on_input_format_from_guest_to_host( // Open the buffer on the host and see that the formatting worked let buffer_a = project_a - .update(cx_a, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx)) + .update(cx_a, |p, cx| { + p.open_buffer((worktree_id, rel_path("main.rs")), cx) + }) .await .unwrap(); executor.run_until_parked(); @@ -1881,7 +1905,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( .unwrap(); let editor_a = workspace_a .update_in(cx_a, |workspace, window, cx| { - workspace.open_path((worktree_id, "main.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx) }) .await .unwrap() @@ -1931,7 +1955,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); let editor_b = workspace_b .update_in(cx_b, |workspace, window, cx| { - workspace.open_path((worktree_id, "main.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx) }) .await .unwrap() @@ -2126,7 +2150,7 @@ async fn test_inlay_hint_refresh_is_forwarded( let editor_a = workspace_a .update_in(cx_a, |workspace, window, cx| { - workspace.open_path((worktree_id, "main.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx) }) .await .unwrap() @@ -2135,7 +2159,7 @@ async fn test_inlay_hint_refresh_is_forwarded( let editor_b = workspace_b .update_in(cx_b, |workspace, window, cx| { - workspace.open_path((worktree_id, "main.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx) }) .await .unwrap() @@ -2313,7 +2337,7 @@ async fn test_lsp_document_color(cx_a: &mut TestAppContext, cx_b: &mut TestAppCo .unwrap(); let editor_a = workspace_a .update_in(cx_a, |workspace, window, cx| { - workspace.open_path((worktree_id, "main.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx) }) .await .unwrap() @@ -2373,7 +2397,7 @@ async fn test_lsp_document_color(cx_a: &mut TestAppContext, cx_b: &mut TestAppCo let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); let editor_b = workspace_b .update_in(cx_b, |workspace, window, cx| { - workspace.open_path((worktree_id, "main.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx) }) .await .unwrap() @@ -2594,7 +2618,7 @@ async fn test_lsp_pull_diagnostics( .unwrap(); let editor_a_main = workspace_a .update_in(cx_a, |workspace, window, cx| { - workspace.open_path((worktree_id, "main.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx) }) .await .unwrap() @@ -2953,7 +2977,7 @@ async fn test_lsp_pull_diagnostics( let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); let editor_b_main = workspace_b .update_in(cx_b, |workspace, window, cx| { - workspace.open_path((worktree_id, "main.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx) }) .await .unwrap() @@ -3001,7 +3025,7 @@ async fn test_lsp_pull_diagnostics( let editor_b_lib = workspace_b .update_in(cx_b, |workspace, window, cx| { - workspace.open_path((worktree_id, "lib.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("lib.rs")), None, true, window, cx) }) .await .unwrap() @@ -3355,7 +3379,7 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA }; client_a.fs().set_blame_for_repo( Path::new(path!("/my-repo/.git")), - vec![("file.txt".into(), blame)], + vec![(repo_path("file.txt"), blame)], ); let (project_a, worktree_id) = client_a.build_local_project(path!("/my-repo"), cx_a).await; @@ -3368,7 +3392,7 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); let editor_a = workspace_a .update_in(cx_a, |workspace, window, cx| { - workspace.open_path((worktree_id, "file.txt"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("file.txt")), None, true, window, cx) }) .await .unwrap() @@ -3380,7 +3404,7 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); let editor_b = workspace_b .update_in(cx_b, |workspace, window, cx| { - workspace.open_path((worktree_id, "file.txt"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("file.txt")), None, true, window, cx) }) .await .unwrap() @@ -3558,13 +3582,13 @@ async fn test_collaborating_with_editorconfig( .unwrap(); let main_buffer_a = project_a .update(cx_a, |p, cx| { - p.open_buffer((worktree_id, "src/main.rs"), cx) + p.open_buffer((worktree_id, rel_path("src/main.rs")), cx) }) .await .unwrap(); let other_buffer_a = project_a .update(cx_a, |p, cx| { - p.open_buffer((worktree_id, "src/other_mod/other.rs"), cx) + p.open_buffer((worktree_id, rel_path("src/other_mod/other.rs")), cx) }) .await .unwrap(); @@ -3592,13 +3616,13 @@ async fn test_collaborating_with_editorconfig( let project_b = client_b.join_remote_project(project_id, cx_b).await; let main_buffer_b = project_b .update(cx_b, |p, cx| { - p.open_buffer((worktree_id, "src/main.rs"), cx) + p.open_buffer((worktree_id, rel_path("src/main.rs")), cx) }) .await .unwrap(); let other_buffer_b = project_b .update(cx_b, |p, cx| { - p.open_buffer((worktree_id, "src/other_mod/other.rs"), cx) + p.open_buffer((worktree_id, rel_path("src/other_mod/other.rs")), cx) }) .await .unwrap(); @@ -3717,7 +3741,7 @@ fn main() { let foo = other::foo(); }"}; let editorconfig_buffer_b = project_b .update(cx_b, |p, cx| { - p.open_buffer((worktree_id, "src/other_mod/.editorconfig"), cx) + p.open_buffer((worktree_id, rel_path("src/other_mod/.editorconfig")), cx) }) .await .unwrap(); @@ -3794,7 +3818,7 @@ async fn test_add_breakpoints(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await; let project_path = ProjectPath { worktree_id, - path: Arc::from(Path::new(&"test.txt")), + path: rel_path(&"test.txt").into(), }; let abs_path = project_a.read_with(cx_a, |project, cx| { project @@ -4017,7 +4041,7 @@ async fn test_client_can_query_lsp_ext(cx_a: &mut TestAppContext, cx_b: &mut Tes let editor_a = workspace_a .update_in(cx_a, |workspace, window, cx| { - workspace.open_path((worktree_id, "main.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx) }) .await .unwrap() @@ -4026,7 +4050,7 @@ async fn test_client_can_query_lsp_ext(cx_a: &mut TestAppContext, cx_b: &mut Tes let editor_b = workspace_b .update_in(cx_b, |workspace, window, cx| { - workspace.open_path((worktree_id, "main.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx) }) .await .unwrap() diff --git a/crates/collab/src/tests/following_tests.rs b/crates/collab/src/tests/following_tests.rs index e6bab12934d9c262cbba378e0d2a94143e0a7602..1bf0a28e341cf1e74dd12d962f875eb18f4474db 100644 --- a/crates/collab/src/tests/following_tests.rs +++ b/crates/collab/src/tests/following_tests.rs @@ -16,7 +16,7 @@ use rpc::proto::PeerId; use serde_json::json; use settings::SettingsStore; use text::{Point, ToPoint}; -use util::{path, test::sample_text}; +use util::{path, rel_path::rel_path, test::sample_text}; use workspace::{CollaboratorId, SplitDirection, Workspace, item::ItemHandle as _}; use super::TestClient; @@ -86,7 +86,7 @@ async fn test_basic_following( let pane_a = workspace_a.update(cx_a, |workspace, _| workspace.active_pane().clone()); let editor_a1 = workspace_a .update_in(cx_a, |workspace, window, cx| { - workspace.open_path((worktree_id, "1.txt"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("1.txt")), None, true, window, cx) }) .await .unwrap() @@ -94,7 +94,7 @@ async fn test_basic_following( .unwrap(); let editor_a2 = workspace_a .update_in(cx_a, |workspace, window, cx| { - workspace.open_path((worktree_id, "2.txt"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("2.txt")), None, true, window, cx) }) .await .unwrap() @@ -104,7 +104,7 @@ async fn test_basic_following( // Client B opens an editor. let editor_b1 = workspace_b .update_in(cx_b, |workspace, window, cx| { - workspace.open_path((worktree_id, "1.txt"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("1.txt")), None, true, window, cx) }) .await .unwrap() @@ -146,7 +146,7 @@ async fn test_basic_following( }); assert_eq!( cx_b.read(|cx| editor_b2.project_path(cx)), - Some((worktree_id, "2.txt").into()) + Some((worktree_id, rel_path("2.txt")).into()) ); assert_eq!( editor_b2.update(cx_b, |editor, cx| editor.selections.ranges(cx)), @@ -286,12 +286,12 @@ async fn test_basic_following( let multibuffer_a = cx_a.new(|cx| { let buffer_a1 = project_a.update(cx, |project, cx| { project - .get_open_buffer(&(worktree_id, "1.txt").into(), cx) + .get_open_buffer(&(worktree_id, rel_path("1.txt")).into(), cx) .unwrap() }); let buffer_a2 = project_a.update(cx, |project, cx| { project - .get_open_buffer(&(worktree_id, "2.txt").into(), cx) + .get_open_buffer(&(worktree_id, rel_path("2.txt")).into(), cx) .unwrap() }); let mut result = MultiBuffer::new(Capability::ReadWrite); @@ -618,13 +618,13 @@ async fn test_following_tab_order( //Open 1, 3 in that order on client A workspace_a .update_in(cx_a, |workspace, window, cx| { - workspace.open_path((worktree_id, "1.txt"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("1.txt")), None, true, window, cx) }) .await .unwrap(); workspace_a .update_in(cx_a, |workspace, window, cx| { - workspace.open_path((worktree_id, "3.txt"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("3.txt")), None, true, window, cx) }) .await .unwrap(); @@ -632,14 +632,7 @@ async fn test_following_tab_order( let pane_paths = |pane: &Entity, cx: &mut VisualTestContext| { pane.update(cx, |pane, cx| { pane.items() - .map(|item| { - item.project_path(cx) - .unwrap() - .path - .to_str() - .unwrap() - .to_owned() - }) + .map(|item| item.project_path(cx).unwrap().path.as_str().to_owned()) .collect::>() }) }; @@ -656,7 +649,7 @@ async fn test_following_tab_order( //Open just 2 on client B workspace_b .update_in(cx_b, |workspace, window, cx| { - workspace.open_path((worktree_id, "2.txt"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("2.txt")), None, true, window, cx) }) .await .unwrap(); @@ -668,7 +661,7 @@ async fn test_following_tab_order( //Open just 1 on client B workspace_b .update_in(cx_b, |workspace, window, cx| { - workspace.open_path((worktree_id, "1.txt"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("1.txt")), None, true, window, cx) }) .await .unwrap(); @@ -728,7 +721,7 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); workspace_a .update_in(cx_a, |workspace, window, cx| { - workspace.open_path((worktree_id, "1.txt"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("1.txt")), None, true, window, cx) }) .await .unwrap() @@ -739,7 +732,7 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); workspace_b .update_in(cx_b, |workspace, window, cx| { - workspace.open_path((worktree_id, "2.txt"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("2.txt")), None, true, window, cx) }) .await .unwrap() @@ -816,14 +809,14 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T // Clients A and B each open a new file. workspace_a .update_in(cx_a, |workspace, window, cx| { - workspace.open_path((worktree_id, "3.txt"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("3.txt")), None, true, window, cx) }) .await .unwrap(); workspace_b .update_in(cx_b, |workspace, window, cx| { - workspace.open_path((worktree_id, "4.txt"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("4.txt")), None, true, window, cx) }) .await .unwrap(); @@ -1259,7 +1252,7 @@ async fn test_auto_unfollowing(cx_a: &mut TestAppContext, cx_b: &mut TestAppCont let _editor_a1 = workspace_a .update_in(cx_a, |workspace, window, cx| { - workspace.open_path((worktree_id, "1.txt"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("1.txt")), None, true, window, cx) }) .await .unwrap() @@ -1359,7 +1352,7 @@ async fn test_auto_unfollowing(cx_a: &mut TestAppContext, cx_b: &mut TestAppCont // When client B activates a different item in the original pane, it automatically stops following client A. workspace_b .update_in(cx_b, |workspace, window, cx| { - workspace.open_path((worktree_id, "2.txt"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("2.txt")), None, true, window, cx) }) .await .unwrap(); @@ -1492,7 +1485,7 @@ async fn test_following_across_workspaces(cx_a: &mut TestAppContext, cx_b: &mut workspace_a .update_in(cx_a, |workspace, window, cx| { - workspace.open_path((worktree_id_a, "w.rs"), None, true, window, cx) + workspace.open_path((worktree_id_a, rel_path("w.rs")), None, true, window, cx) }) .await .unwrap(); @@ -1545,7 +1538,7 @@ async fn test_following_across_workspaces(cx_a: &mut TestAppContext, cx_b: &mut // b moves to x.rs in a's project, and a follows workspace_b_project_a .update_in(&mut cx_b2, |workspace, window, cx| { - workspace.open_path((worktree_id_a, "x.rs"), None, true, window, cx) + workspace.open_path((worktree_id_a, rel_path("x.rs")), None, true, window, cx) }) .await .unwrap(); @@ -1574,7 +1567,7 @@ async fn test_following_across_workspaces(cx_a: &mut TestAppContext, cx_b: &mut // b moves to y.rs in b's project, a is still following but can't yet see workspace_b .update_in(cx_b, |workspace, window, cx| { - workspace.open_path((worktree_id_b, "y.rs"), None, true, window, cx) + workspace.open_path((worktree_id_b, rel_path("y.rs")), None, true, window, cx) }) .await .unwrap(); @@ -1759,7 +1752,7 @@ async fn test_following_into_excluded_file( // Client A opens editors for a regular file and an excluded file. let editor_for_regular = workspace_a .update_in(cx_a, |workspace, window, cx| { - workspace.open_path((worktree_id, "1.txt"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("1.txt")), None, true, window, cx) }) .await .unwrap() @@ -1767,7 +1760,13 @@ async fn test_following_into_excluded_file( .unwrap(); let editor_for_excluded_a = workspace_a .update_in(cx_a, |workspace, window, cx| { - workspace.open_path((worktree_id, ".git/COMMIT_EDITMSG"), None, true, window, cx) + workspace.open_path( + (worktree_id, rel_path(".git/COMMIT_EDITMSG")), + None, + true, + window, + cx, + ) }) .await .unwrap() @@ -1805,7 +1804,7 @@ async fn test_following_into_excluded_file( }); assert_eq!( cx_b.read(|cx| editor_for_excluded_b.project_path(cx)), - Some((worktree_id, ".git/COMMIT_EDITMSG").into()) + Some((worktree_id, rel_path(".git/COMMIT_EDITMSG")).into()) ); assert_eq!( editor_for_excluded_b.update(cx_b, |editor, cx| editor.selections.ranges(cx)), @@ -2051,7 +2050,7 @@ async fn test_following_to_channel_notes_without_a_shared_project( // Client A opens a local buffer in their unshared project. let _unshared_editor_a1 = workspace_a .update_in(cx_a, |workspace, window, cx| { - workspace.open_path((worktree_id, "1.txt"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("1.txt")), None, true, window, cx) }) .await .unwrap() diff --git a/crates/collab/src/tests/git_tests.rs b/crates/collab/src/tests/git_tests.rs index 3f78bd11585a1746d1309491a58e2ef49258a8fd..ef9f871a2252845e5154e456e258b4f4ab4bf057 100644 --- a/crates/collab/src/tests/git_tests.rs +++ b/crates/collab/src/tests/git_tests.rs @@ -1,7 +1,4 @@ -use std::{ - path::{Path, PathBuf}, - sync::Arc, -}; +use std::path::Path; use call::ActiveCall; use git::status::{FileStatus, StatusCode, TrackedStatus}; @@ -9,7 +6,7 @@ use git_ui::project_diff::ProjectDiff; use gpui::{TestAppContext, VisualTestContext}; use project::ProjectPath; use serde_json::json; -use util::path; +use util::{path, rel_path::rel_path}; use workspace::Workspace; // @@ -41,13 +38,13 @@ async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) ) .await; - client_a.fs().set_git_content_for_repo( + client_a.fs().set_head_and_index_for_repo( Path::new(path!("/a/.git")), &[ - ("changed.txt".into(), "before\n".to_string(), None), - ("unchanged.txt".into(), "unchanged\n".to_string(), None), - ("deleted.txt".into(), "deleted\n".to_string(), None), - ("secret.pem".into(), "shh\n".to_string(), None), + ("changed.txt", "before\n".to_string()), + ("unchanged.txt", "unchanged\n".to_string()), + ("deleted.txt", "deleted\n".to_string()), + ("secret.pem", "shh\n".to_string()), ], ); let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await; @@ -109,7 +106,7 @@ async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) project_b.update(cx_b, |project, cx| { let project_path = ProjectPath { worktree_id, - path: Arc::from(PathBuf::from("unchanged.txt")), + path: rel_path("unchanged.txt").into(), }; let status = project.project_path_git_status(&project_path, cx); assert_eq!( diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index de255830cd154e4180afd32538f13df200bc36d5..93c3665c11edebcb371dc8f1b819f84c00b2dda8 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -14,7 +14,10 @@ use client::{RECEIVE_TIMEOUT, User}; use collections::{HashMap, HashSet}; use fs::{FakeFs, Fs as _, RemoveOptions}; use futures::{StreamExt as _, channel::mpsc}; -use git::status::{FileStatus, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode}; +use git::{ + repository::repo_path, + status::{FileStatus, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode}, +}; use gpui::{ App, BackgroundExecutor, Entity, Modifiers, MouseButton, MouseDownEvent, TestAppContext, UpdateGlobal, px, size, @@ -30,7 +33,7 @@ use parking_lot::Mutex; use pretty_assertions::assert_eq; use project::{ DiagnosticSummary, HoverBlockKind, Project, ProjectPath, - lsp_store::{FormatTrigger, LspFormatTarget}, + lsp_store::{FormatTrigger, LspFormatTarget, SymbolLocation}, search::{SearchQuery, SearchResult}, }; use prompt_store::PromptBuilder; @@ -49,7 +52,7 @@ use std::{ time::Duration, }; use unindent::Unindent as _; -use util::{path, uri}; +use util::{path, rel_path::rel_path, uri}; use workspace::Pane; #[ctor::ctor] @@ -1418,7 +1421,9 @@ async fn test_unshare_project( assert!(worktree_a.read_with(cx_a, |tree, _| tree.has_update_observer())); project_b - .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .update(cx_b, |p, cx| { + p.open_buffer((worktree_id, rel_path("a.txt")), cx) + }) .await .unwrap(); @@ -1454,7 +1459,9 @@ async fn test_unshare_project( assert!(worktree_a.read_with(cx_a, |tree, _| tree.has_update_observer())); project_c2 - .update(cx_c, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .update(cx_c, |p, cx| { + p.open_buffer((worktree_id, rel_path("a.txt")), cx) + }) .await .unwrap(); @@ -1584,11 +1591,15 @@ async fn test_project_reconnect( }); let buffer_a1 = project_a1 - .update(cx_a, |p, cx| p.open_buffer((worktree1_id, "a.txt"), cx)) + .update(cx_a, |p, cx| { + p.open_buffer((worktree1_id, rel_path("a.txt")), cx) + }) .await .unwrap(); let buffer_b1 = project_b1 - .update(cx_b, |p, cx| p.open_buffer((worktree1_id, "a.txt"), cx)) + .update(cx_b, |p, cx| { + p.open_buffer((worktree1_id, rel_path("a.txt")), cx) + }) .await .unwrap(); @@ -1675,20 +1686,15 @@ async fn test_project_reconnect( assert!(project.is_shared()); assert!(worktree_a1.read(cx).has_update_observer()); assert_eq!( - worktree_a1 - .read(cx) - .snapshot() - .paths() - .map(|p| p.to_str().unwrap()) - .collect::>(), + worktree_a1.read(cx).snapshot().paths().collect::>(), vec![ - path!("a.txt"), - path!("b.txt"), - path!("subdir2"), - path!("subdir2/f.txt"), - path!("subdir2/g.txt"), - path!("subdir2/h.txt"), - path!("subdir2/i.txt") + rel_path("a.txt"), + rel_path("b.txt"), + rel_path("subdir2"), + rel_path("subdir2/f.txt"), + rel_path("subdir2/g.txt"), + rel_path("subdir2/h.txt"), + rel_path("subdir2/i.txt") ] ); assert!(worktree_a3.read(cx).has_update_observer()); @@ -1697,7 +1703,7 @@ async fn test_project_reconnect( .read(cx) .snapshot() .paths() - .map(|p| p.to_str().unwrap()) + .map(|p| p.as_str()) .collect::>(), vec!["w.txt", "x.txt", "y.txt"] ); @@ -1712,16 +1718,15 @@ async fn test_project_reconnect( .read(cx) .snapshot() .paths() - .map(|p| p.to_str().unwrap()) .collect::>(), vec![ - path!("a.txt"), - path!("b.txt"), - path!("subdir2"), - path!("subdir2/f.txt"), - path!("subdir2/g.txt"), - path!("subdir2/h.txt"), - path!("subdir2/i.txt") + rel_path("a.txt"), + rel_path("b.txt"), + rel_path("subdir2"), + rel_path("subdir2/f.txt"), + rel_path("subdir2/g.txt"), + rel_path("subdir2/h.txt"), + rel_path("subdir2/i.txt") ] ); assert!(project.worktree_for_id(worktree2_id, cx).is_none()); @@ -1732,7 +1737,7 @@ async fn test_project_reconnect( .read(cx) .snapshot() .paths() - .map(|p| p.to_str().unwrap()) + .map(|p| p.as_str()) .collect::>(), vec!["w.txt", "x.txt", "y.txt"] ); @@ -1809,16 +1814,15 @@ async fn test_project_reconnect( .read(cx) .snapshot() .paths() - .map(|p| p.to_str().unwrap()) .collect::>(), vec![ - path!("a.txt"), - path!("b.txt"), - path!("subdir2"), - path!("subdir2/f.txt"), - path!("subdir2/g.txt"), - path!("subdir2/h.txt"), - path!("subdir2/j.txt") + rel_path("a.txt"), + rel_path("b.txt"), + rel_path("subdir2"), + rel_path("subdir2/f.txt"), + rel_path("subdir2/g.txt"), + rel_path("subdir2/h.txt"), + rel_path("subdir2/j.txt") ] ); assert!(project.worktree_for_id(worktree2_id, cx).is_none()); @@ -1829,7 +1833,7 @@ async fn test_project_reconnect( .read(cx) .snapshot() .paths() - .map(|p| p.to_str().unwrap()) + .map(|p| p.as_str()) .collect::>(), vec!["z.txt"] ); @@ -2370,11 +2374,15 @@ async fn test_propagate_saves_and_fs_changes( // Open and edit a buffer as both guests B and C. let buffer_b = project_b - .update(cx_b, |p, cx| p.open_buffer((worktree_id, "file1.rs"), cx)) + .update(cx_b, |p, cx| { + p.open_buffer((worktree_id, rel_path("file1.rs")), cx) + }) .await .unwrap(); let buffer_c = project_c - .update(cx_c, |p, cx| p.open_buffer((worktree_id, "file1.rs"), cx)) + .update(cx_c, |p, cx| { + p.open_buffer((worktree_id, rel_path("file1.rs")), cx) + }) .await .unwrap(); @@ -2390,7 +2398,9 @@ async fn test_propagate_saves_and_fs_changes( // Open and edit that buffer as the host. let buffer_a = project_a - .update(cx_a, |p, cx| p.open_buffer((worktree_id, "file1.rs"), cx)) + .update(cx_a, |p, cx| { + p.open_buffer((worktree_id, rel_path("file1.rs")), cx) + }) .await .unwrap(); @@ -2461,27 +2471,21 @@ async fn test_propagate_saves_and_fs_changes( worktree_a.read_with(cx_a, |tree, _| { assert_eq!( - tree.paths() - .map(|p| p.to_string_lossy()) - .collect::>(), + tree.paths().map(|p| p.as_str()).collect::>(), ["file1.js", "file3", "file4"] ) }); worktree_b.read_with(cx_b, |tree, _| { assert_eq!( - tree.paths() - .map(|p| p.to_string_lossy()) - .collect::>(), + tree.paths().map(|p| p.as_str()).collect::>(), ["file1.js", "file3", "file4"] ) }); worktree_c.read_with(cx_c, |tree, _| { assert_eq!( - tree.paths() - .map(|p| p.to_string_lossy()) - .collect::>(), + tree.paths().map(|p| p.as_str()).collect::>(), ["file1.js", "file3", "file4"] ) }); @@ -2489,17 +2493,17 @@ async fn test_propagate_saves_and_fs_changes( // Ensure buffer files are updated as well. buffer_a.read_with(cx_a, |buffer, _| { - assert_eq!(buffer.file().unwrap().path().to_str(), Some("file1.js")); + assert_eq!(buffer.file().unwrap().path().as_str(), "file1.js"); assert_eq!(buffer.language().unwrap().name(), "JavaScript".into()); }); buffer_b.read_with(cx_b, |buffer, _| { - assert_eq!(buffer.file().unwrap().path().to_str(), Some("file1.js")); + assert_eq!(buffer.file().unwrap().path().as_str(), "file1.js"); assert_eq!(buffer.language().unwrap().name(), "JavaScript".into()); }); buffer_c.read_with(cx_c, |buffer, _| { - assert_eq!(buffer.file().unwrap().path().to_str(), Some("file1.js")); + assert_eq!(buffer.file().unwrap().path().as_str(), "file1.js"); assert_eq!(buffer.language().unwrap().name(), "JavaScript".into()); }); @@ -2524,7 +2528,7 @@ async fn test_propagate_saves_and_fs_changes( project_a .update(cx_a, |project, cx| { let path = ProjectPath { - path: Arc::from(Path::new("file3.rs")), + path: rel_path("file3.rs").into(), worktree_id: worktree_a.read(cx).id(), }; @@ -2538,7 +2542,7 @@ async fn test_propagate_saves_and_fs_changes( new_buffer_b.read_with(cx_b, |buffer_b, _| { assert_eq!( buffer_b.file().unwrap().path().as_ref(), - Path::new("file3.rs") + rel_path("file3.rs") ); new_buffer_a.read_with(cx_a, |buffer_a, _| { @@ -2621,19 +2625,20 @@ async fn test_git_diff_base_change( " .unindent(); - client_a.fs().set_index_for_repo( - Path::new("/dir/.git"), - &[("a.txt".into(), staged_text.clone())], - ); + client_a + .fs() + .set_index_for_repo(Path::new("/dir/.git"), &[("a.txt", staged_text.clone())]); client_a.fs().set_head_for_repo( Path::new("/dir/.git"), - &[("a.txt".into(), committed_text.clone())], + &[("a.txt", committed_text.clone())], "deadbeef", ); // Create the buffer let buffer_local_a = project_local - .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .update(cx_a, |p, cx| { + p.open_buffer((worktree_id, rel_path("a.txt")), cx) + }) .await .unwrap(); let local_unstaged_diff_a = project_local @@ -2661,7 +2666,9 @@ async fn test_git_diff_base_change( // Create remote buffer let remote_buffer_a = project_remote - .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .update(cx_b, |p, cx| { + p.open_buffer((worktree_id, rel_path("a.txt")), cx) + }) .await .unwrap(); let remote_unstaged_diff_a = project_remote @@ -2717,11 +2724,11 @@ async fn test_git_diff_base_change( // Update the index text of the open buffer client_a.fs().set_index_for_repo( Path::new("/dir/.git"), - &[("a.txt".into(), new_staged_text.clone())], + &[("a.txt", new_staged_text.clone())], ); client_a.fs().set_head_for_repo( Path::new("/dir/.git"), - &[("a.txt".into(), new_committed_text.clone())], + &[("a.txt", new_committed_text.clone())], "deadbeef", ); @@ -2790,12 +2797,14 @@ async fn test_git_diff_base_change( client_a.fs().set_index_for_repo( Path::new("/dir/sub/.git"), - &[("b.txt".into(), staged_text.clone())], + &[("b.txt", staged_text.clone())], ); // Create the buffer let buffer_local_b = project_local - .update(cx_a, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx)) + .update(cx_a, |p, cx| { + p.open_buffer((worktree_id, rel_path("sub/b.txt")), cx) + }) .await .unwrap(); let local_unstaged_diff_b = project_local @@ -2823,7 +2832,9 @@ async fn test_git_diff_base_change( // Create remote buffer let remote_buffer_b = project_remote - .update(cx_b, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx)) + .update(cx_b, |p, cx| { + p.open_buffer((worktree_id, rel_path("sub/b.txt")), cx) + }) .await .unwrap(); let remote_unstaged_diff_b = project_remote @@ -2851,7 +2862,7 @@ async fn test_git_diff_base_change( // Updatet the staged text client_a.fs().set_index_for_repo( Path::new("/dir/sub/.git"), - &[("b.txt".into(), new_staged_text.clone())], + &[("b.txt", new_staged_text.clone())], ); // Wait for buffer_local_b to receive it @@ -3011,21 +3022,21 @@ async fn test_git_status_sync( // and b.txt is unmerged. client_a.fs().set_head_for_repo( path!("/dir/.git").as_ref(), - &[("b.txt".into(), "B".into()), ("c.txt".into(), "c".into())], + &[("b.txt", "B".into()), ("c.txt", "c".into())], "deadbeef", ); client_a.fs().set_index_for_repo( path!("/dir/.git").as_ref(), &[ - ("a.txt".into(), "".into()), - ("b.txt".into(), "B".into()), - ("c.txt".into(), "c".into()), + ("a.txt", "".into()), + ("b.txt", "B".into()), + ("c.txt", "c".into()), ], ); client_a.fs().set_unmerged_paths_for_repo( path!("/dir/.git").as_ref(), &[( - "b.txt".into(), + repo_path("b.txt"), UnmergedStatus { first_head: UnmergedStatusCode::Updated, second_head: UnmergedStatusCode::Deleted, @@ -3056,13 +3067,8 @@ async fn test_git_status_sync( executor.run_until_parked(); #[track_caller] - fn assert_status( - file: impl AsRef, - status: Option, - project: &Project, - cx: &App, - ) { - let file = file.as_ref(); + fn assert_status(file: &str, status: Option, project: &Project, cx: &App) { + let file = repo_path(file); let repos = project .repositories(cx) .values() @@ -3072,7 +3078,7 @@ async fn test_git_status_sync( let repo = repos.into_iter().next().unwrap(); assert_eq!( repo.read(cx) - .status_for_path(&file.into()) + .status_for_path(&file) .map(|entry| entry.status), status ); @@ -3107,7 +3113,7 @@ async fn test_git_status_sync( // and modify c.txt in the working copy. client_a.fs().set_index_for_repo( path!("/dir/.git").as_ref(), - &[("a.txt".into(), "a".into()), ("c.txt".into(), "c".into())], + &[("a.txt", "a".into()), ("c.txt", "c".into())], ); client_a .fs() @@ -3202,7 +3208,7 @@ async fn test_fs_operations( let entry = project_b .update(cx_b, |project, cx| { - project.create_entry((worktree_id, "c.txt"), false, cx) + project.create_entry((worktree_id, rel_path("c.txt")), false, cx) }) .await .unwrap() @@ -3211,27 +3217,21 @@ async fn test_fs_operations( worktree_a.read_with(cx_a, |worktree, _| { assert_eq!( - worktree - .paths() - .map(|p| p.to_string_lossy()) - .collect::>(), + worktree.paths().map(|p| p.as_str()).collect::>(), ["a.txt", "b.txt", "c.txt"] ); }); worktree_b.read_with(cx_b, |worktree, _| { assert_eq!( - worktree - .paths() - .map(|p| p.to_string_lossy()) - .collect::>(), + worktree.paths().map(|p| p.as_str()).collect::>(), ["a.txt", "b.txt", "c.txt"] ); }); project_b .update(cx_b, |project, cx| { - project.rename_entry(entry.id, Path::new("d.txt"), cx) + project.rename_entry(entry.id, (worktree_id, rel_path("d.txt")).into(), cx) }) .await .unwrap() @@ -3240,27 +3240,21 @@ async fn test_fs_operations( worktree_a.read_with(cx_a, |worktree, _| { assert_eq!( - worktree - .paths() - .map(|p| p.to_string_lossy()) - .collect::>(), + worktree.paths().map(|p| p.as_str()).collect::>(), ["a.txt", "b.txt", "d.txt"] ); }); worktree_b.read_with(cx_b, |worktree, _| { assert_eq!( - worktree - .paths() - .map(|p| p.to_string_lossy()) - .collect::>(), + worktree.paths().map(|p| p.as_str()).collect::>(), ["a.txt", "b.txt", "d.txt"] ); }); let dir_entry = project_b .update(cx_b, |project, cx| { - project.create_entry((worktree_id, "DIR"), true, cx) + project.create_entry((worktree_id, rel_path("DIR")), true, cx) }) .await .unwrap() @@ -3269,27 +3263,21 @@ async fn test_fs_operations( worktree_a.read_with(cx_a, |worktree, _| { assert_eq!( - worktree - .paths() - .map(|p| p.to_string_lossy()) - .collect::>(), + worktree.paths().map(|p| p.as_str()).collect::>(), ["DIR", "a.txt", "b.txt", "d.txt"] ); }); worktree_b.read_with(cx_b, |worktree, _| { assert_eq!( - worktree - .paths() - .map(|p| p.to_string_lossy()) - .collect::>(), + worktree.paths().map(|p| p.as_str()).collect::>(), ["DIR", "a.txt", "b.txt", "d.txt"] ); }); project_b .update(cx_b, |project, cx| { - project.create_entry((worktree_id, "DIR/e.txt"), false, cx) + project.create_entry((worktree_id, rel_path("DIR/e.txt")), false, cx) }) .await .unwrap() @@ -3298,7 +3286,7 @@ async fn test_fs_operations( project_b .update(cx_b, |project, cx| { - project.create_entry((worktree_id, "DIR/SUBDIR"), true, cx) + project.create_entry((worktree_id, rel_path("DIR/SUBDIR")), true, cx) }) .await .unwrap() @@ -3307,7 +3295,7 @@ async fn test_fs_operations( project_b .update(cx_b, |project, cx| { - project.create_entry((worktree_id, "DIR/SUBDIR/f.txt"), false, cx) + project.create_entry((worktree_id, rel_path("DIR/SUBDIR/f.txt")), false, cx) }) .await .unwrap() @@ -3316,43 +3304,41 @@ async fn test_fs_operations( worktree_a.read_with(cx_a, |worktree, _| { assert_eq!( - worktree - .paths() - .map(|p| p.to_string_lossy()) - .collect::>(), + worktree.paths().collect::>(), [ - path!("DIR"), - path!("DIR/SUBDIR"), - path!("DIR/SUBDIR/f.txt"), - path!("DIR/e.txt"), - path!("a.txt"), - path!("b.txt"), - path!("d.txt") + rel_path("DIR"), + rel_path("DIR/SUBDIR"), + rel_path("DIR/SUBDIR/f.txt"), + rel_path("DIR/e.txt"), + rel_path("a.txt"), + rel_path("b.txt"), + rel_path("d.txt") ] ); }); worktree_b.read_with(cx_b, |worktree, _| { assert_eq!( - worktree - .paths() - .map(|p| p.to_string_lossy()) - .collect::>(), + worktree.paths().collect::>(), [ - path!("DIR"), - path!("DIR/SUBDIR"), - path!("DIR/SUBDIR/f.txt"), - path!("DIR/e.txt"), - path!("a.txt"), - path!("b.txt"), - path!("d.txt") + rel_path("DIR"), + rel_path("DIR/SUBDIR"), + rel_path("DIR/SUBDIR/f.txt"), + rel_path("DIR/e.txt"), + rel_path("a.txt"), + rel_path("b.txt"), + rel_path("d.txt") ] ); }); project_b .update(cx_b, |project, cx| { - project.copy_entry(entry.id, None, Path::new("f.txt"), cx) + project.copy_entry( + entry.id, + (worktree_b.read(cx).id(), rel_path("f.txt")).into(), + cx, + ) }) .await .unwrap() @@ -3360,38 +3346,32 @@ async fn test_fs_operations( worktree_a.read_with(cx_a, |worktree, _| { assert_eq!( - worktree - .paths() - .map(|p| p.to_string_lossy()) - .collect::>(), + worktree.paths().collect::>(), [ - path!("DIR"), - path!("DIR/SUBDIR"), - path!("DIR/SUBDIR/f.txt"), - path!("DIR/e.txt"), - path!("a.txt"), - path!("b.txt"), - path!("d.txt"), - path!("f.txt") + rel_path("DIR"), + rel_path("DIR/SUBDIR"), + rel_path("DIR/SUBDIR/f.txt"), + rel_path("DIR/e.txt"), + rel_path("a.txt"), + rel_path("b.txt"), + rel_path("d.txt"), + rel_path("f.txt") ] ); }); worktree_b.read_with(cx_b, |worktree, _| { assert_eq!( - worktree - .paths() - .map(|p| p.to_string_lossy()) - .collect::>(), + worktree.paths().collect::>(), [ - path!("DIR"), - path!("DIR/SUBDIR"), - path!("DIR/SUBDIR/f.txt"), - path!("DIR/e.txt"), - path!("a.txt"), - path!("b.txt"), - path!("d.txt"), - path!("f.txt") + rel_path("DIR"), + rel_path("DIR/SUBDIR"), + rel_path("DIR/SUBDIR/f.txt"), + rel_path("DIR/e.txt"), + rel_path("a.txt"), + rel_path("b.txt"), + rel_path("d.txt"), + rel_path("f.txt") ] ); }); @@ -3406,20 +3386,14 @@ async fn test_fs_operations( worktree_a.read_with(cx_a, |worktree, _| { assert_eq!( - worktree - .paths() - .map(|p| p.to_string_lossy()) - .collect::>(), + worktree.paths().map(|p| p.as_str()).collect::>(), ["a.txt", "b.txt", "d.txt", "f.txt"] ); }); worktree_b.read_with(cx_b, |worktree, _| { assert_eq!( - worktree - .paths() - .map(|p| p.to_string_lossy()) - .collect::>(), + worktree.paths().map(|p| p.as_str()).collect::>(), ["a.txt", "b.txt", "d.txt", "f.txt"] ); }); @@ -3433,20 +3407,14 @@ async fn test_fs_operations( worktree_a.read_with(cx_a, |worktree, _| { assert_eq!( - worktree - .paths() - .map(|p| p.to_string_lossy()) - .collect::>(), + worktree.paths().map(|p| p.as_str()).collect::>(), ["a.txt", "b.txt", "f.txt"] ); }); worktree_b.read_with(cx_b, |worktree, _| { assert_eq!( - worktree - .paths() - .map(|p| p.to_string_lossy()) - .collect::>(), + worktree.paths().map(|p| p.as_str()).collect::>(), ["a.txt", "b.txt", "f.txt"] ); }); @@ -3511,8 +3479,8 @@ async fn test_local_settings( )) .collect::>(), &[ - (Path::new("").into(), Some(2)), - (Path::new("a").into(), Some(8)), + (rel_path("").into(), Some(2)), + (rel_path("a").into(), Some(8)), ] ) }); @@ -3533,10 +3501,7 @@ async fn test_local_settings( content.all_languages.defaults.tab_size.map(Into::into) )) .collect::>(), - &[ - (Path::new("").into(), None), - (Path::new("a").into(), Some(8)), - ] + &[(rel_path("").into(), None), (rel_path("a").into(), Some(8)),] ) }); @@ -3567,8 +3532,8 @@ async fn test_local_settings( )) .collect::>(), &[ - (Path::new("a").into(), Some(8)), - (Path::new("b").into(), Some(4)), + (rel_path("a").into(), Some(8)), + (rel_path("b").into(), Some(4)), ] ) }); @@ -3599,7 +3564,7 @@ async fn test_local_settings( .local_settings(worktree_b.read(cx).id()) .map(|(path, content)| (path, content.all_languages.defaults.hard_tabs)) .collect::>(), - &[(Path::new("a").into(), Some(true))], + &[(rel_path("a").into(), Some(true))], ) }); } @@ -3636,7 +3601,9 @@ async fn test_buffer_conflict_after_save( // Open a buffer as client B let buffer_b = project_b - .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .update(cx_b, |p, cx| { + p.open_buffer((worktree_id, rel_path("a.txt")), cx) + }) .await .unwrap(); @@ -3700,7 +3667,9 @@ async fn test_buffer_reloading( // Open a buffer as client B let buffer_b = project_b - .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .update(cx_b, |p, cx| { + p.open_buffer((worktree_id, rel_path("a.txt")), cx) + }) .await .unwrap(); @@ -3758,12 +3727,16 @@ async fn test_editing_while_guest_opens_buffer( // Open a buffer as client A let buffer_a = project_a - .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .update(cx_a, |p, cx| { + p.open_buffer((worktree_id, rel_path("a.txt")), cx) + }) .await .unwrap(); // Start opening the same buffer as client B - let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)); + let open_buffer = project_b.update(cx_b, |p, cx| { + p.open_buffer((worktree_id, rel_path("a.txt")), cx) + }); let buffer_b = cx_b.executor().spawn(open_buffer); // Edit the buffer as client A while client B is still opening it. @@ -3810,7 +3783,9 @@ async fn test_leaving_worktree_while_opening_buffer( project_a.read_with(cx_a, |p, _| assert_eq!(p.collaborators().len(), 1)); // Begin opening a buffer as client B, but leave the project before the open completes. - let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)); + let open_buffer = project_b.update(cx_b, |p, cx| { + p.open_buffer((worktree_id, rel_path("a.txt")), cx) + }); let buffer_b = cx_b.executor().spawn(open_buffer); cx_b.update(|_| drop(project_b)); drop(buffer_b); @@ -3852,7 +3827,9 @@ async fn test_canceling_buffer_opening( let project_b = client_b.join_remote_project(project_id, cx_b).await; let buffer_a = project_a - .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx)) + .update(cx_a, |p, cx| { + p.open_buffer((worktree_id, rel_path("a.txt")), cx) + }) .await .unwrap(); @@ -3928,7 +3905,7 @@ async fn test_leaving_project( let buffer_b1 = project_b1 .update(cx_b, |project, cx| { let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id(); - project.open_buffer((worktree_id, "a.txt"), cx) + project.open_buffer((worktree_id, rel_path("a.txt")), cx) }) .await .unwrap(); @@ -3966,7 +3943,7 @@ async fn test_leaving_project( let buffer_b2 = project_b2 .update(cx_b, |project, cx| { let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id(); - project.open_buffer((worktree_id, "a.txt"), cx) + project.open_buffer((worktree_id, rel_path("a.txt")), cx) }) .await .unwrap(); @@ -4131,7 +4108,7 @@ async fn test_collaborating_with_diagnostics( &[( ProjectPath { worktree_id, - path: Arc::from(Path::new("a.rs")), + path: rel_path("a.rs").into(), }, LanguageServerId(0), DiagnosticSummary { @@ -4167,7 +4144,7 @@ async fn test_collaborating_with_diagnostics( &[( ProjectPath { worktree_id, - path: Arc::from(Path::new("a.rs")), + path: rel_path("a.rs").into(), }, LanguageServerId(0), DiagnosticSummary { @@ -4208,7 +4185,7 @@ async fn test_collaborating_with_diagnostics( [( ProjectPath { worktree_id, - path: Arc::from(Path::new("a.rs")), + path: rel_path("a.rs").into(), }, LanguageServerId(0), DiagnosticSummary { @@ -4225,7 +4202,7 @@ async fn test_collaborating_with_diagnostics( [( ProjectPath { worktree_id, - path: Arc::from(Path::new("a.rs")), + path: rel_path("a.rs").into(), }, LanguageServerId(0), DiagnosticSummary { @@ -4237,7 +4214,9 @@ async fn test_collaborating_with_diagnostics( }); // Open the file with the errors on client B. They should be present. - let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)); + let open_buffer = project_b.update(cx_b, |p, cx| { + p.open_buffer((worktree_id, rel_path("a.rs")), cx) + }); let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap(); buffer_b.read_with(cx_b, |buffer, _| { @@ -4356,7 +4335,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering( let project_b = client_b.join_remote_project(project_id, cx_b).await; let guest_buffers = futures::future::try_join_all(file_names.iter().map(|file_name| { project_b.update(cx_b, |p, cx| { - p.open_buffer_with_lsp((worktree_id, file_name), cx) + p.open_buffer_with_lsp((worktree_id, rel_path(file_name)), cx) }) })) .await @@ -4454,7 +4433,9 @@ async fn test_reloading_buffer_manually( .await; let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await; let buffer_a = project_a - .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)) + .update(cx_a, |p, cx| { + p.open_buffer((worktree_id, rel_path("a.rs")), cx) + }) .await .unwrap(); let project_id = active_call_a @@ -4464,7 +4445,9 @@ async fn test_reloading_buffer_manually( let project_b = client_b.join_remote_project(project_id, cx_b).await; - let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)); + let open_buffer = project_b.update(cx_b, |p, cx| { + p.open_buffer((worktree_id, rel_path("a.rs")), cx) + }); let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap(); buffer_b.update(cx_b, |buffer, cx| { buffer.edit([(4..7, "six")], None, cx); @@ -4562,7 +4545,9 @@ async fn test_formatting_buffer( let project_b = client_b.join_remote_project(project_id, cx_b).await; let buffer_b = project_b - .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx)) + .update(cx_b, |p, cx| { + p.open_buffer((worktree_id, rel_path("a.rs")), cx) + }) .await .unwrap(); @@ -4688,7 +4673,9 @@ async fn test_prettier_formatting_buffer( .await; let (project_a, worktree_id) = client_a.build_local_project(&directory, cx_a).await; let prettier_format_suffix = project::TEST_PRETTIER_FORMAT_SUFFIX; - let open_buffer = project_a.update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx)); + let open_buffer = project_a.update(cx_a, |p, cx| { + p.open_buffer((worktree_id, rel_path("a.ts")), cx) + }); let buffer_a = cx_a.executor().spawn(open_buffer).await.unwrap(); let project_id = active_call_a @@ -4698,7 +4685,7 @@ async fn test_prettier_formatting_buffer( let project_b = client_b.join_remote_project(project_id, cx_b).await; let (buffer_b, _) = project_b .update(cx_b, |p, cx| { - p.open_buffer_with_lsp((worktree_id, "a.ts"), cx) + p.open_buffer_with_lsp((worktree_id, rel_path("a.ts")), cx) }) .await .unwrap(); @@ -4838,7 +4825,7 @@ async fn test_definition( // Open the file on client B. let (buffer_b, _handle) = project_b .update(cx_b, |p, cx| { - p.open_buffer_with_lsp((worktree_id, "a.rs"), cx) + p.open_buffer_with_lsp((worktree_id, rel_path("a.rs")), cx) }) .await .unwrap(); @@ -5016,7 +5003,7 @@ async fn test_references( // Open the file on client B. let (buffer_b, _handle) = project_b .update(cx_b, |p, cx| { - p.open_buffer_with_lsp((worktree_id, "one.rs"), cx) + p.open_buffer_with_lsp((worktree_id, rel_path("one.rs")), cx) }) .await .unwrap(); @@ -5088,7 +5075,7 @@ async fn test_references( let three_buffer = references[2].buffer.read(cx); assert_eq!( two_buffer.file().unwrap().path().as_ref(), - Path::new("two.rs") + rel_path("two.rs") ); assert_eq!(references[1].buffer, references[0].buffer); assert_eq!( @@ -5288,7 +5275,7 @@ async fn test_document_highlights( // Open the file on client B. let (buffer_b, _handle) = project_b .update(cx_b, |p, cx| { - p.open_buffer_with_lsp((worktree_id, "main.rs"), cx) + p.open_buffer_with_lsp((worktree_id, rel_path("main.rs")), cx) }) .await .unwrap(); @@ -5431,7 +5418,7 @@ async fn test_lsp_hover( // Open the file as the guest let (buffer_b, _handle) = project_b .update(cx_b, |p, cx| { - p.open_buffer_with_lsp((worktree_id, "main.rs"), cx) + p.open_buffer_with_lsp((worktree_id, rel_path("main.rs")), cx) }) .await .unwrap(); @@ -5623,7 +5610,7 @@ async fn test_project_symbols( // Cause the language server to start. let _buffer = project_b .update(cx_b, |p, cx| { - p.open_buffer_with_lsp((worktree_id, "one.rs"), cx) + p.open_buffer_with_lsp((worktree_id, rel_path("one.rs")), cx) }) .await .unwrap(); @@ -5673,7 +5660,10 @@ async fn test_project_symbols( // Attempt to craft a symbol and violate host's privacy by opening an arbitrary file. let mut fake_symbol = symbols[0].clone(); - fake_symbol.path.path = Path::new(path!("/code/secrets")).into(); + fake_symbol.path = SymbolLocation::OutsideProject { + abs_path: Path::new(path!("/code/secrets")).into(), + signature: [0x17; 32], + }; let error = project_b .update(cx_b, |project, cx| { project.open_buffer_for_symbol(&fake_symbol, cx) @@ -5738,7 +5728,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it( let (buffer_b1, _lsp) = project_b .update(cx_b, |p, cx| { - p.open_buffer_with_lsp((worktree_id, "a.rs"), cx) + p.open_buffer_with_lsp((worktree_id, rel_path("a.rs")), cx) }) .await .unwrap(); @@ -5763,14 +5753,14 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it( definitions = project_b.update(cx_b, |p, cx| p.definitions(&buffer_b1, 23, cx)); (buffer_b2, _) = project_b .update(cx_b, |p, cx| { - p.open_buffer_with_lsp((worktree_id, "b.rs"), cx) + p.open_buffer_with_lsp((worktree_id, rel_path("b.rs")), cx) }) .await .unwrap(); } else { (buffer_b2, _) = project_b .update(cx_b, |p, cx| { - p.open_buffer_with_lsp((worktree_id, "b.rs"), cx) + p.open_buffer_with_lsp((worktree_id, rel_path("b.rs")), cx) }) .await .unwrap(); @@ -6587,15 +6577,15 @@ async fn test_preview_tabs(cx: &mut TestAppContext) { let path_1 = ProjectPath { worktree_id, - path: Path::new("1.txt").into(), + path: rel_path("1.txt").into(), }; let path_2 = ProjectPath { worktree_id, - path: Path::new("2.js").into(), + path: rel_path("2.js").into(), }; let path_3 = ProjectPath { worktree_id, - path: Path::new("3.rs").into(), + path: rel_path("3.rs").into(), }; let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone()); diff --git a/crates/collab/src/tests/random_project_collaboration_tests.rs b/crates/collab/src/tests/random_project_collaboration_tests.rs index 326f64cb244b88a64728f4347e3cfc31a8c252bf..0cc4a4eadea213dc4f40d14be2cdf379915686c7 100644 --- a/crates/collab/src/tests/random_project_collaboration_tests.rs +++ b/crates/collab/src/tests/random_project_collaboration_tests.rs @@ -27,7 +27,11 @@ use std::{ rc::Rc, sync::Arc, }; -use util::{ResultExt, path}; +use util::{ + ResultExt, path, + paths::PathStyle, + rel_path::{RelPath, RelPathBuf, rel_path}, +}; #[gpui::test( iterations = 100, @@ -66,7 +70,7 @@ enum ClientOperation { OpenBuffer { project_root_name: String, is_local: bool, - full_path: PathBuf, + full_path: RelPathBuf, }, SearchProject { project_root_name: String, @@ -77,24 +81,24 @@ enum ClientOperation { EditBuffer { project_root_name: String, is_local: bool, - full_path: PathBuf, + full_path: RelPathBuf, edits: Vec<(Range, Arc)>, }, CloseBuffer { project_root_name: String, is_local: bool, - full_path: PathBuf, + full_path: RelPathBuf, }, SaveBuffer { project_root_name: String, is_local: bool, - full_path: PathBuf, + full_path: RelPathBuf, detach: bool, }, RequestLspDataInBuffer { project_root_name: String, is_local: bool, - full_path: PathBuf, + full_path: RelPathBuf, offset: usize, kind: LspRequestKind, detach: bool, @@ -102,7 +106,7 @@ enum ClientOperation { CreateWorktreeEntry { project_root_name: String, is_local: bool, - full_path: PathBuf, + full_path: RelPathBuf, is_dir: bool, }, WriteFsEntry { @@ -119,7 +123,7 @@ enum ClientOperation { enum GitOperation { WriteGitIndex { repo_path: PathBuf, - contents: Vec<(PathBuf, String)>, + contents: Vec<(RelPathBuf, String)>, }, WriteGitBranch { repo_path: PathBuf, @@ -127,7 +131,7 @@ enum GitOperation { }, WriteGitStatuses { repo_path: PathBuf, - statuses: Vec<(PathBuf, FileStatus)>, + statuses: Vec<(RelPathBuf, FileStatus)>, }, } @@ -311,8 +315,8 @@ impl RandomizedTest for ProjectCollaborationTest { let Some(worktree) = worktree else { continue }; let is_dir = rng.random::(); let mut full_path = - worktree.read_with(cx, |w, _| PathBuf::from(w.root_name())); - full_path.push(gen_file_name(rng)); + worktree.read_with(cx, |w, _| w.root_name().to_rel_path_buf()); + full_path.push(rel_path(&gen_file_name(rng))); if !is_dir { full_path.set_extension("rs"); } @@ -346,8 +350,18 @@ impl RandomizedTest for ProjectCollaborationTest { continue; }; - let full_path = buffer - .read_with(cx, |buffer, cx| buffer.file().unwrap().full_path(cx)); + let full_path = buffer.read_with(cx, |buffer, cx| { + let file = buffer.file().unwrap(); + let worktree = project + .read(cx) + .worktree_for_id(file.worktree_id(cx), cx) + .unwrap(); + worktree + .read(cx) + .root_name() + .join(file.path()) + .to_rel_path_buf() + }); match rng.random_range(0..100_u32) { // Close the buffer @@ -436,16 +450,16 @@ impl RandomizedTest for ProjectCollaborationTest { .filter(|e| e.is_file()) .choose(rng) .unwrap(); - if entry.path.as_ref() == Path::new("") { - Path::new(worktree.root_name()).into() + if entry.path.as_ref().is_empty() { + worktree.root_name().into() } else { - Path::new(worktree.root_name()).join(&entry.path) + worktree.root_name().join(&entry.path) } }); break ClientOperation::OpenBuffer { project_root_name, is_local, - full_path, + full_path: full_path.to_rel_path_buf(), }; } } @@ -940,7 +954,11 @@ impl RandomizedTest for ProjectCollaborationTest { } for (path, _) in contents.iter() { - if !client.fs().files().contains(&repo_path.join(path)) { + if !client + .fs() + .files() + .contains(&repo_path.join(path.as_std_path())) + { return Err(TestError::Inapplicable); } } @@ -954,8 +972,8 @@ impl RandomizedTest for ProjectCollaborationTest { let dot_git_dir = repo_path.join(".git"); let contents = contents - .into_iter() - .map(|(path, contents)| (path.into(), contents)) + .iter() + .map(|(path, contents)| (path.as_str(), contents.clone())) .collect::>(); if client.fs().metadata(&dot_git_dir).await?.is_none() { client.fs().create_dir(&dot_git_dir).await?; @@ -993,7 +1011,11 @@ impl RandomizedTest for ProjectCollaborationTest { return Err(TestError::Inapplicable); } for (path, _) in statuses.iter() { - if !client.fs().files().contains(&repo_path.join(path)) { + if !client + .fs() + .files() + .contains(&repo_path.join(path.as_std_path())) + { return Err(TestError::Inapplicable); } } @@ -1009,7 +1031,7 @@ impl RandomizedTest for ProjectCollaborationTest { let statuses = statuses .iter() - .map(|(path, val)| (path.as_path(), *val)) + .map(|(path, val)| (path.as_str(), *val)) .collect::>(); if client.fs().metadata(&dot_git_dir).await?.is_none() { @@ -1426,7 +1448,7 @@ fn generate_git_operation(rng: &mut StdRng, client: &TestClient) -> GitOperation repo_path: &Path, rng: &mut StdRng, client: &TestClient, - ) -> Vec { + ) -> Vec { let mut paths = client .fs() .files() @@ -1440,7 +1462,11 @@ fn generate_git_operation(rng: &mut StdRng, client: &TestClient) -> GitOperation paths .iter() - .map(|path| path.strip_prefix(repo_path).unwrap().to_path_buf()) + .map(|path| { + RelPath::from_std_path(path.strip_prefix(repo_path).unwrap(), PathStyle::local()) + .unwrap() + .to_rel_path_buf() + }) .collect::>() } @@ -1487,7 +1513,7 @@ fn generate_git_operation(rng: &mut StdRng, client: &TestClient) -> GitOperation fn buffer_for_full_path( client: &TestClient, project: &Entity, - full_path: &PathBuf, + full_path: &RelPath, cx: &TestAppContext, ) -> Option> { client @@ -1495,7 +1521,12 @@ fn buffer_for_full_path( .iter() .find(|buffer| { buffer.read_with(cx, |buffer, cx| { - buffer.file().unwrap().full_path(cx) == *full_path + let file = buffer.file().unwrap(); + let Some(worktree) = project.read(cx).worktree_for_id(file.worktree_id(cx), cx) + else { + return false; + }; + worktree.read(cx).root_name().join(&file.path()).as_ref() == full_path }) }) .cloned() @@ -1536,23 +1567,23 @@ fn root_name_for_project(project: &Entity, cx: &TestAppContext) -> Stri .next() .unwrap() .read(cx) - .root_name() + .root_name_str() .to_string() }) } fn project_path_for_full_path( project: &Entity, - full_path: &Path, + full_path: &RelPath, cx: &TestAppContext, ) -> Option { let mut components = full_path.components(); - let root_name = components.next().unwrap().as_os_str().to_str().unwrap(); - let path = components.as_path().into(); + let root_name = components.next().unwrap(); + let path = components.rest().into(); let worktree_id = project.read_with(cx, |project, cx| { project.worktrees(cx).find_map(|worktree| { let worktree = worktree.read(cx); - if worktree.root_name() == root_name { + if worktree.root_name_str() == root_name { Some(worktree.id()) } else { None diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index e2e6d7b724386afafad27e72f867be70671263bc..84ee9a33906b976a68da5da7b81c1e89c96190b1 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -33,7 +33,7 @@ use std::{ sync::{Arc, atomic::AtomicUsize}, }; use task::TcpArgumentsTemplate; -use util::path; +use util::{path, rel_path::rel_path}; #[gpui::test(iterations = 10)] async fn test_sharing_an_ssh_remote_project( @@ -124,26 +124,26 @@ async fn test_sharing_an_ssh_remote_project( worktree_a.update(cx_a, |worktree, _cx| { assert_eq!( - worktree.paths().map(Arc::as_ref).collect::>(), + worktree.paths().collect::>(), vec![ - Path::new(".zed"), - Path::new(".zed/settings.json"), - Path::new("README.md"), - Path::new("src"), - Path::new("src/lib.rs"), + rel_path(".zed"), + rel_path(".zed/settings.json"), + rel_path("README.md"), + rel_path("src"), + rel_path("src/lib.rs"), ] ); }); worktree_b.update(cx_b, |worktree, _cx| { assert_eq!( - worktree.paths().map(Arc::as_ref).collect::>(), + worktree.paths().collect::>(), vec![ - Path::new(".zed"), - Path::new(".zed/settings.json"), - Path::new("README.md"), - Path::new("src"), - Path::new("src/lib.rs"), + rel_path(".zed"), + rel_path(".zed/settings.json"), + rel_path("README.md"), + rel_path("src"), + rel_path("src/lib.rs"), ] ); }); @@ -151,7 +151,7 @@ async fn test_sharing_an_ssh_remote_project( // User B can open buffers in the remote project. let buffer_b = project_b .update(cx_b, |project, cx| { - project.open_buffer((worktree_id, "src/lib.rs"), cx) + project.open_buffer((worktree_id, rel_path("src/lib.rs")), cx) }) .await .unwrap(); @@ -177,7 +177,7 @@ async fn test_sharing_an_ssh_remote_project( buffer_b.clone(), ProjectPath { worktree_id: worktree_id.to_owned(), - path: Arc::from(Path::new("src/renamed.rs")), + path: rel_path("src/renamed.rs").into(), }, cx, ) @@ -194,14 +194,8 @@ async fn test_sharing_an_ssh_remote_project( cx_b.run_until_parked(); cx_b.update(|cx| { assert_eq!( - buffer_b - .read(cx) - .file() - .unwrap() - .path() - .to_string_lossy() - .to_string(), - path!("src/renamed.rs").to_string() + buffer_b.read(cx).file().unwrap().path().as_ref(), + rel_path("src/renamed.rs") ); }); } @@ -489,7 +483,7 @@ async fn test_ssh_collaboration_formatting_with_prettier( // Opens the buffer and formats it let (buffer_b, _handle) = project_b .update(cx_b, |p, cx| { - p.open_buffer_with_lsp((worktree_id, "a.ts"), cx) + p.open_buffer_with_lsp((worktree_id, rel_path("a.ts")), cx) }) .await .expect("user B opens buffer for formatting"); @@ -547,7 +541,9 @@ async fn test_ssh_collaboration_formatting_with_prettier( // User A opens and formats the same buffer too let buffer_a = project_a - .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx)) + .update(cx_a, |p, cx| { + p.open_buffer((worktree_id, rel_path("a.ts")), cx) + }) .await .expect("user A opens buffer for formatting"); diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index 61b7a4e18e4e679c29e26185735352737983c4d1..49753c3a6d9460b86d50e395d394b6af9a819693 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -40,6 +40,7 @@ use std::{ sync::Arc, }; use sum_tree::Dimensions; +use util::rel_path::RelPath; use util::{ResultExt, fs::remove_matching}; use workspace::Workspace; @@ -963,8 +964,7 @@ impl Copilot { let hard_tabs = settings.hard_tabs; let relative_path = buffer .file() - .map(|file| file.path().to_path_buf()) - .unwrap_or_default(); + .map_or(RelPath::empty().into(), |file| file.path().clone()); cx.background_spawn(async move { let (version, snapshot) = snapshot.await?; @@ -975,7 +975,7 @@ impl Copilot { tab_size: tab_size.into(), indent_size: 1, insert_spaces: !hard_tabs, - relative_path: relative_path.to_string_lossy().into(), + relative_path: relative_path.to_proto(), position: point_to_lsp(position), version: version.try_into().unwrap(), }, @@ -1194,7 +1194,7 @@ async fn get_copilot_lsp(fs: Arc, node_runtime: NodeRuntime) -> anyhow:: mod tests { use super::*; use gpui::TestAppContext; - use util::path; + use util::{path, paths::PathStyle, rel_path::rel_path}; #[gpui::test(iterations = 10)] async fn test_buffer_management(cx: &mut TestAppContext) { @@ -1258,7 +1258,7 @@ mod tests { buffer.file_updated( Arc::new(File { abs_path: path!("/root/child/buffer-1").into(), - path: Path::new("child/buffer-1").into(), + path: rel_path("child/buffer-1").into(), }), cx, ) @@ -1355,7 +1355,7 @@ mod tests { struct File { abs_path: PathBuf, - path: Arc, + path: Arc, } impl language::File for File { @@ -1369,15 +1369,19 @@ mod tests { } } - fn path(&self) -> &Arc { + fn path(&self) -> &Arc { &self.path } + fn path_style(&self, _: &App) -> PathStyle { + PathStyle::local() + } + fn full_path(&self, _: &App) -> PathBuf { unimplemented!() } - fn file_name<'a>(&'a self, _: &'a App) -> &'a std::ffi::OsStr { + fn file_name<'a>(&'a self, _: &'a App) -> &'a str { unimplemented!() } diff --git a/crates/dap/src/adapters.rs b/crates/dap/src/adapters.rs index 2cef266677c1314f6fd253b9caf77914050ceb96..b952a603c786a240feb09264ba834f62df261386 100644 --- a/crates/dap/src/adapters.rs +++ b/crates/dap/src/adapters.rs @@ -24,7 +24,7 @@ use std::{ sync::Arc, }; use task::{DebugScenario, TcpArgumentsTemplate, ZedDebugConfig}; -use util::archive::extract_zip; +use util::{archive::extract_zip, rel_path::RelPath}; #[derive(Clone, Debug, PartialEq, Eq)] pub enum DapStatus { @@ -44,7 +44,7 @@ pub trait DapDelegate: Send + Sync + 'static { fn fs(&self) -> Arc; fn output_to_console(&self, msg: String); async fn which(&self, command: &OsStr) -> Option; - async fn read_text_file(&self, path: PathBuf) -> Result; + async fn read_text_file(&self, path: &RelPath) -> Result; async fn shell_env(&self) -> collections::HashMap; } diff --git a/crates/dap_adapters/src/python.rs b/crates/dap_adapters/src/python.rs index 6781e5cbd62d1abc9abfa58223b0771f26cc0c88..468edf5664f1a56c45ea5308747be2b8bcd0a468 100644 --- a/crates/dap_adapters/src/python.rs +++ b/crates/dap_adapters/src/python.rs @@ -20,7 +20,7 @@ use std::{ ffi::OsStr, path::{Path, PathBuf}, }; -use util::{ResultExt, maybe}; +use util::{ResultExt, maybe, paths::PathStyle, rel_path::RelPath}; #[derive(Default)] pub(crate) struct PythonDebugAdapter { @@ -726,13 +726,16 @@ impl DebugAdapter for PythonDebugAdapter { .config .get("cwd") .and_then(|cwd| { - cwd.as_str() - .map(Path::new)? - .strip_prefix(delegate.worktree_root_path()) - .ok() + RelPath::from_std_path( + cwd.as_str() + .map(Path::new)? + .strip_prefix(delegate.worktree_root_path()) + .ok()?, + PathStyle::local(), + ) + .ok() }) - .unwrap_or_else(|| "".as_ref()) - .into(); + .unwrap_or_else(|| RelPath::empty().into()); let toolchain = delegate .toolchain_store() .active_toolchain( diff --git a/crates/debug_adapter_extension/src/extension_dap_adapter.rs b/crates/debug_adapter_extension/src/extension_dap_adapter.rs index b656bed9bc2ec972528c4b4c237e8ae0fceedc5a..be225a0c44682f41f7c071641cd4df07798370c8 100644 --- a/crates/debug_adapter_extension/src/extension_dap_adapter.rs +++ b/crates/debug_adapter_extension/src/extension_dap_adapter.rs @@ -15,6 +15,7 @@ use dap::{ use extension::{Extension, WorktreeDelegate}; use gpui::AsyncApp; use task::{DebugScenario, ZedDebugConfig}; +use util::rel_path::RelPath; pub(crate) struct ExtensionDapAdapter { extension: Arc, @@ -57,7 +58,7 @@ impl WorktreeDelegate for WorktreeDelegateAdapter { self.0.worktree_root_path().to_string_lossy().to_string() } - async fn read_text_file(&self, path: PathBuf) -> Result { + async fn read_text_file(&self, path: &RelPath) -> Result { self.0.read_text_file(path).await } diff --git a/crates/debugger_ui/src/debugger_panel.rs b/crates/debugger_ui/src/debugger_panel.rs index 787bca01acb204a4a50b18a34f3567137f92aa0e..57f17e577e82bc8f97c6d9a82544324182e3d2f9 100644 --- a/crates/debugger_ui/src/debugger_panel.rs +++ b/crates/debugger_ui/src/debugger_panel.rs @@ -33,6 +33,7 @@ use std::sync::{Arc, LazyLock}; use task::{DebugScenario, TaskContext}; use tree_sitter::{Query, StreamingIterator as _}; use ui::{ContextMenu, Divider, PopoverMenuHandle, Tab, Tooltip, prelude::*}; +use util::rel_path::RelPath; use util::{ResultExt, debug_panic, maybe}; use workspace::SplitDirection; use workspace::item::SaveOptions; @@ -1061,14 +1062,14 @@ impl DebugPanel { directory_in_worktree: dir, .. } => { - let relative_path = if dir.ends_with(".vscode") { - dir.join("launch.json") + let relative_path = if dir.ends_with(RelPath::new(".vscode").unwrap()) { + dir.join(RelPath::new("launch.json").unwrap()) } else { - dir.join("debug.json") + dir.join(RelPath::new("debug.json").unwrap()) }; ProjectPath { worktree_id: id, - path: Arc::from(relative_path), + path: relative_path, } } _ => return self.save_scenario(scenario, worktree_id, window, cx), @@ -1129,7 +1130,7 @@ impl DebugPanel { let fs = workspace.read_with(cx, |workspace, _| workspace.app_state().fs.clone())?; - path.push(paths::local_settings_folder_relative_path()); + path.push(paths::local_settings_folder_name()); if !fs.is_dir(path.as_path()).await { fs.create_dir(path.as_path()).await?; } diff --git a/crates/debugger_ui/src/new_process_modal.rs b/crates/debugger_ui/src/new_process_modal.rs index a25c02c1b5f72f1e85f532fcee244f0165a8a48e..2ee8d13732f09c517015437691aae659d7449f5c 100644 --- a/crates/debugger_ui/src/new_process_modal.rs +++ b/crates/debugger_ui/src/new_process_modal.rs @@ -32,7 +32,7 @@ use ui::{ SharedString, Styled, StyledExt, ToggleButton, ToggleState, Toggleable, Tooltip, Window, div, h_flex, relative, rems, v_flex, }; -use util::ResultExt; +use util::{ResultExt, rel_path::RelPath}; use workspace::{ModalView, Workspace, notifications::DetachAndPromptErr, pane}; use crate::{attach_modal::AttachModal, debugger_panel::DebugPanel}; @@ -1026,29 +1026,27 @@ impl DebugDelegate { let mut path = if worktrees.len() > 1 && let Some(worktree) = project.worktree_for_id(*worktree_id, cx) { - let worktree_path = worktree.read(cx).abs_path(); - let full_path = worktree_path.join(directory_in_worktree); - full_path + worktree + .read(cx) + .root_name() + .join(directory_in_worktree) + .to_rel_path_buf() } else { - directory_in_worktree.clone() + directory_in_worktree.to_rel_path_buf() }; - match path - .components() - .next_back() - .and_then(|component| component.as_os_str().to_str()) - { + match path.components().next_back() { Some(".zed") => { - path.push("debug.json"); + path.push(RelPath::new("debug.json").unwrap()); } Some(".vscode") => { - path.push("launch.json"); + path.push(RelPath::new("launch.json").unwrap()); } _ => {} } - Some(path.display().to_string()) + path.display(project.path_style(cx)).to_string() }) - .unwrap_or_else(|_| Some(directory_in_worktree.display().to_string())), + .ok(), Some(TaskSourceKind::AbsPath { abs_path, .. }) => { Some(abs_path.to_string_lossy().into_owned()) } @@ -1135,7 +1133,7 @@ impl DebugDelegate { id: _, directory_in_worktree: dir, id_base: _, - } => dir.ends_with(".zed"), + } => dir.ends_with(RelPath::new(".zed").unwrap()), _ => false, }); @@ -1154,7 +1152,10 @@ impl DebugDelegate { id: _, directory_in_worktree: dir, id_base: _, - } => !(hide_vscode && dir.ends_with(".vscode")), + } => { + !(hide_vscode + && dir.ends_with(RelPath::new(".vscode").unwrap())) + } _ => true, }) .filter(|(_, scenario)| valid_adapters.contains(&scenario.adapter)) diff --git a/crates/debugger_ui/src/session/running/breakpoint_list.rs b/crates/debugger_ui/src/session/running/breakpoint_list.rs index 0ede5879aeb6f406191eb6ee1fb83cb6ea67a3f2..f0f86b124a711922a452afc1a74cd4cab9fe28fd 100644 --- a/crates/debugger_ui/src/session/running/breakpoint_list.rs +++ b/crates/debugger_ui/src/session/running/breakpoint_list.rs @@ -26,6 +26,7 @@ use ui::{ Divider, DividerColor, FluentBuilder as _, Indicator, IntoElement, ListItem, Render, StatefulInteractiveElement, Tooltip, WithScrollbar, prelude::*, }; +use util::rel_path::RelPath; use workspace::Workspace; use zed_actions::{ToggleEnableBreakpoint, UnsetBreakpoint}; @@ -663,6 +664,7 @@ impl Render for BreakpointList { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl ui::IntoElement { let breakpoints = self.breakpoint_store.read(cx).all_source_breakpoints(cx); self.breakpoints.clear(); + let path_style = self.worktree_store.read(cx).path_style(); let weak = cx.weak_entity(); let breakpoints = breakpoints.into_iter().flat_map(|(path, mut breakpoints)| { let relative_worktree_path = self @@ -673,7 +675,7 @@ impl Render for BreakpointList { worktree .read(cx) .is_visible() - .then(|| Path::new(worktree.read(cx).root_name()).join(relative_path)) + .then(|| worktree.read(cx).root_name().join(&relative_path)) }); breakpoints.sort_by_key(|breakpoint| breakpoint.row); let weak = weak.clone(); @@ -683,14 +685,9 @@ impl Render for BreakpointList { let dir = relative_worktree_path .clone() - .unwrap_or_else(|| PathBuf::from(&*breakpoint.path)) + .or_else(|| RelPath::from_std_path(&breakpoint.path, path_style).ok())? .parent() - .and_then(|parent| { - parent - .to_str() - .map(ToOwned::to_owned) - .map(SharedString::from) - }); + .map(|parent| SharedString::from(parent.display(path_style).to_string())); let name = file_name .to_str() .map(ToOwned::to_owned) diff --git a/crates/debugger_ui/src/session/running/module_list.rs b/crates/debugger_ui/src/session/running/module_list.rs index 4ea763c92cff18f571f27033174ee0b1163b94f9..545d8392745c636b805cfc1e0743170635ef8abe 100644 --- a/crates/debugger_ui/src/session/running/module_list.rs +++ b/crates/debugger_ui/src/session/running/module_list.rs @@ -87,7 +87,7 @@ impl ModuleList { this.open_buffer( ProjectPath { worktree_id, - path: relative_path.into(), + path: relative_path, }, cx, ) diff --git a/crates/debugger_ui/src/session/running/stack_frame_list.rs b/crates/debugger_ui/src/session/running/stack_frame_list.rs index 4271bdcbb83b6a34f3c8e15b7572a9712ffd20fd..ee922dd08eff4ba3e56e07d5542595f70d2da6cf 100644 --- a/crates/debugger_ui/src/session/running/stack_frame_list.rs +++ b/crates/debugger_ui/src/session/running/stack_frame_list.rs @@ -401,7 +401,7 @@ impl StackFrameList { this.open_buffer( ProjectPath { worktree_id, - path: relative_path.into(), + path: relative_path, }, cx, ) diff --git a/crates/debugger_ui/src/stack_trace_view.rs b/crates/debugger_ui/src/stack_trace_view.rs index aef053df4a1ea930fb09a779e08afecfa08ddde9..7cfb111a2bf0a2ab4c60acbff16825eb9e4cf41d 100644 --- a/crates/debugger_ui/src/stack_trace_view.rs +++ b/crates/debugger_ui/src/stack_trace_view.rs @@ -181,7 +181,7 @@ impl StackTraceView { let project_path = ProjectPath { worktree_id: worktree.read_with(cx, |tree, _| tree.id())?, - path: relative_path.into(), + path: relative_path, }; if let Some(buffer) = this diff --git a/crates/debugger_ui/src/tests/debugger_panel.rs b/crates/debugger_ui/src/tests/debugger_panel.rs index ab6d5cb9605d5d774187f836130fdae66a8d3404..05de4a47a4ec3c0762a8ebe1318b37f8a812a6d2 100644 --- a/crates/debugger_ui/src/tests/debugger_panel.rs +++ b/crates/debugger_ui/src/tests/debugger_panel.rs @@ -32,7 +32,7 @@ use std::{ }; use terminal_view::terminal_panel::TerminalPanel; use tests::{active_debug_session_panel, init_test, init_test_workspace}; -use util::path; +use util::{path, rel_path::rel_path}; use workspace::item::SaveOptions; use workspace::{Item, dock::Panel}; @@ -1114,7 +1114,7 @@ async fn test_send_breakpoints_when_editor_has_been_saved( let buffer = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "main.rs"), cx) + project.open_buffer((worktree_id, rel_path("main.rs")), cx) }) .await .unwrap(); @@ -1276,14 +1276,14 @@ async fn test_unsetting_breakpoints_on_clear_breakpoint_action( let first = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "main.rs"), cx) + project.open_buffer((worktree_id, rel_path("main.rs")), cx) }) .await .unwrap(); let second = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "second.rs"), cx) + project.open_buffer((worktree_id, rel_path("second.rs")), cx) }) .await .unwrap(); @@ -1499,14 +1499,14 @@ async fn test_active_debug_line_setting(executor: BackgroundExecutor, cx: &mut T let main_buffer = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "main.rs"), cx) + project.open_buffer((worktree_id, rel_path("main.rs")), cx) }) .await .unwrap(); let second_buffer = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "second.rs"), cx) + project.open_buffer((worktree_id, rel_path("second.rs")), cx) }) .await .unwrap(); diff --git a/crates/debugger_ui/src/tests/inline_values.rs b/crates/debugger_ui/src/tests/inline_values.rs index 9f921ec969debc5247d531469c5132e8485c163b..8ca3061f5763de089c84a9e73850dd9ea858e6bb 100644 --- a/crates/debugger_ui/src/tests/inline_values.rs +++ b/crates/debugger_ui/src/tests/inline_values.rs @@ -7,7 +7,7 @@ use language::{Language, LanguageConfig, LanguageMatcher, tree_sitter_python, tr use project::{FakeFs, Project}; use serde_json::json; use unindent::Unindent as _; -use util::path; +use util::{path, rel_path::rel_path}; use crate::{ debugger_panel::DebugPanel, @@ -215,7 +215,7 @@ fn main() { let buffer = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "main.rs"), cx) + project.open_buffer((worktree_id, rel_path("main.rs")), cx) }) .await .unwrap(); @@ -1584,7 +1584,7 @@ def process_data(untyped_param, typed_param: int, another_typed: str): let buffer = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "main.py"), cx) + project.open_buffer((worktree_id, rel_path("main.py")), cx) }) .await .unwrap(); @@ -2082,7 +2082,7 @@ async fn test_inline_values_util( let buffer = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "main.rs"), cx) + project.open_buffer((worktree_id, rel_path("main.rs")), cx) }) .await .unwrap(); diff --git a/crates/debugger_ui/src/tests/stack_frame_list.rs b/crates/debugger_ui/src/tests/stack_frame_list.rs index a61a31d270c9d599f30185d7da3c825c51bb7898..9caef5ba56d4f915bf25aebb5ab7e806f3edf171 100644 --- a/crates/debugger_ui/src/tests/stack_frame_list.rs +++ b/crates/debugger_ui/src/tests/stack_frame_list.rs @@ -13,7 +13,7 @@ use project::{FakeFs, Project}; use serde_json::json; use std::sync::Arc; use unindent::Unindent as _; -use util::path; +use util::{path, rel_path::rel_path}; #[gpui::test] async fn test_fetch_initial_stack_frames_and_go_to_stack_frame( @@ -331,12 +331,7 @@ async fn test_select_stack_frame(executor: BackgroundExecutor, cx: &mut TestAppC let project_path = editors[0] .update(cx, |editor, cx| editor.project_path(cx)) .unwrap(); - let expected = if cfg!(target_os = "windows") { - "src\\test.js" - } else { - "src/test.js" - }; - assert_eq!(expected, project_path.path.to_string_lossy()); + assert_eq!(rel_path("src/test.js"), project_path.path.as_ref()); assert_eq!(test_file_content, editors[0].read(cx).text(cx)); assert_eq!( vec![2..3], @@ -399,12 +394,7 @@ async fn test_select_stack_frame(executor: BackgroundExecutor, cx: &mut TestAppC let project_path = editors[0] .update(cx, |editor, cx| editor.project_path(cx)) .unwrap(); - let expected = if cfg!(target_os = "windows") { - "src\\module.js" - } else { - "src/module.js" - }; - assert_eq!(expected, project_path.path.to_string_lossy()); + assert_eq!(rel_path("src/module.js"), project_path.path.as_ref()); assert_eq!(module_file_content, editors[0].read(cx).text(cx)); assert_eq!( vec![0..1], diff --git a/crates/diagnostics/src/buffer_diagnostics.rs b/crates/diagnostics/src/buffer_diagnostics.rs index 3a245163822fb19c43d11a93bc48c3d276e4d502..0bdc5c4eb72a35b0a55598e059d28a91ad1cc834 100644 --- a/crates/diagnostics/src/buffer_diagnostics.rs +++ b/crates/diagnostics/src/buffer_diagnostics.rs @@ -28,7 +28,6 @@ use std::{ }; use text::{Anchor, BufferSnapshot, OffsetRangeExt}; use ui::{Button, ButtonStyle, Icon, IconName, Label, Tooltip, h_flex, prelude::*}; -use util::paths::PathExt; use workspace::{ ItemHandle, ItemNavHistory, ToolbarItemLocation, Workspace, item::{BreadcrumbText, Item, ItemEvent, TabContentParams}, @@ -783,15 +782,16 @@ impl Item for BufferDiagnosticsEditor { } // Builds the content to be displayed in the tab. - fn tab_content(&self, params: TabContentParams, _window: &Window, _cx: &App) -> AnyElement { + fn tab_content(&self, params: TabContentParams, _window: &Window, cx: &App) -> AnyElement { + let path_style = self.project.read(cx).path_style(cx); let error_count = self.summary.error_count; let warning_count = self.summary.warning_count; let label = Label::new( self.project_path .path .file_name() - .map(|f| f.to_sanitized_string()) - .unwrap_or_else(|| self.project_path.path.to_sanitized_string()), + .map(|s| s.to_string()) + .unwrap_or_else(|| self.project_path.path.display(path_style).to_string()), ); h_flex() @@ -827,11 +827,12 @@ impl Item for BufferDiagnosticsEditor { "Buffer Diagnostics".into() } - fn tab_tooltip_text(&self, _: &App) -> Option { + fn tab_tooltip_text(&self, cx: &App) -> Option { + let path_style = self.project.read(cx).path_style(cx); Some( format!( "Buffer Diagnostics - {}", - self.project_path.path.to_sanitized_string() + self.project_path.path.display(path_style) ) .into(), ) @@ -848,7 +849,8 @@ impl Item for BufferDiagnosticsEditor { impl Render for BufferDiagnosticsEditor { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - let filename = self.project_path.path.to_sanitized_string(); + let path_style = self.project.read(cx).path_style(cx); + let filename = self.project_path.path.display(path_style).to_string(); let error_count = self.summary.error_count; let warning_count = match self.include_warnings { true => self.summary.warning_count, diff --git a/crates/diagnostics/src/diagnostics_tests.rs b/crates/diagnostics/src/diagnostics_tests.rs index a50e20f579e67010819de0fdb7273d4c9912b8b8..7fe88c8243a12629d6ececfa4d5d38901ed2fd42 100644 --- a/crates/diagnostics/src/diagnostics_tests.rs +++ b/crates/diagnostics/src/diagnostics_tests.rs @@ -27,7 +27,7 @@ use std::{ str::FromStr, }; use unindent::Unindent as _; -use util::{RandomCharIter, path, post_inc}; +use util::{RandomCharIter, path, post_inc, rel_path::rel_path}; #[ctor::ctor] fn init_logger() { @@ -1609,7 +1609,7 @@ async fn test_buffer_diagnostics(cx: &mut TestAppContext) { worktree_id: project.read_with(cx, |project, cx| { project.worktrees(cx).next().unwrap().read(cx).id() }), - path: Arc::from(Path::new("main.rs")), + path: rel_path("main.rs").into(), }; let buffer = project .update(cx, |project, cx| { @@ -1763,7 +1763,7 @@ async fn test_buffer_diagnostics_without_warnings(cx: &mut TestAppContext) { worktree_id: project.read_with(cx, |project, cx| { project.worktrees(cx).next().unwrap().read(cx).id() }), - path: Arc::from(Path::new("main.rs")), + path: rel_path("main.rs").into(), }; let buffer = project .update(cx, |project, cx| { @@ -1892,7 +1892,7 @@ async fn test_buffer_diagnostics_multiple_servers(cx: &mut TestAppContext) { worktree_id: project.read_with(cx, |project, cx| { project.worktrees(cx).next().unwrap().read(cx).id() }), - path: Arc::from(Path::new("main.rs")), + path: rel_path("main.rs").into(), }; let buffer = project .update(cx, |project, cx| { diff --git a/crates/docs_preprocessor/src/main.rs b/crates/docs_preprocessor/src/main.rs index c8c3dc54b76085707c0491eab683ff954a483bf9..67ac5fe7983af7369bd86d41c4d45542c06f2007 100644 --- a/crates/docs_preprocessor/src/main.rs +++ b/crates/docs_preprocessor/src/main.rs @@ -9,7 +9,6 @@ use std::collections::{HashMap, HashSet}; use std::io::{self, Read}; use std::process; use std::sync::{LazyLock, OnceLock}; -use util::paths::PathExt; static KEYMAP_MACOS: LazyLock = LazyLock::new(|| { load_keymap("keymaps/default-macos.json").expect("Failed to load MacOS keymap") @@ -345,7 +344,7 @@ fn handle_postprocessing() -> Result<()> { let mut queue = Vec::with_capacity(64); queue.push(root_dir.clone()); while let Some(dir) = queue.pop() { - for entry in std::fs::read_dir(&dir).context(dir.to_sanitized_string())? { + for entry in std::fs::read_dir(&dir).context("failed to read docs dir")? { let Ok(entry) = entry else { continue; }; diff --git a/crates/edit_prediction_context/src/syntax_index.rs b/crates/edit_prediction_context/src/syntax_index.rs index 1b5e4268ccec74b9eea52c1001c7854dd746c5cf..4e890bd4230c05679b56f56d6a089fd0e5ebb0a4 100644 --- a/crates/edit_prediction_context/src/syntax_index.rs +++ b/crates/edit_prediction_context/src/syntax_index.rs @@ -324,7 +324,7 @@ impl SyntaxIndex { cx.spawn(async move |_this, cx| { let loaded_file = load_task.await?; let language = language_registry - .language_for_file_path(&project_path.path) + .language_for_file_path(&project_path.path.as_std_path()) .await .ok(); @@ -549,7 +549,7 @@ impl SyntaxIndexState { #[cfg(test)] mod tests { use super::*; - use std::{path::Path, sync::Arc}; + use std::sync::Arc; use gpui::TestAppContext; use indoc::indoc; @@ -558,7 +558,7 @@ mod tests { use serde_json::json; use settings::SettingsStore; use text::OffsetRangeExt as _; - use util::path; + use util::{path, rel_path::rel_path}; use crate::syntax_index::SyntaxIndex; @@ -739,7 +739,7 @@ mod tests { .read(cx) .path_for_entry(*project_entry_id, cx) .unwrap(); - assert_eq!(project_path.path.as_ref(), Path::new(path),); + assert_eq!(project_path.path.as_ref(), rel_path(path),); declaration } else { panic!("Expected a buffer declaration, found {:?}", declaration); @@ -764,7 +764,7 @@ mod tests { .unwrap() .path .as_ref(), - Path::new(path), + rel_path(path), ); declaration } else { diff --git a/crates/editor/src/clangd_ext.rs b/crates/editor/src/clangd_ext.rs index c78d4c83c01c49e6b1ff947d3cd53bc887424a16..276464d7ccafac55d7abfbf8eeb3d1f36dfa77d6 100644 --- a/crates/editor/src/clangd_ext.rs +++ b/crates/editor/src/clangd_ext.rs @@ -4,6 +4,7 @@ use language::Language; use project::lsp_store::lsp_ext_command::SwitchSourceHeaderResult; use rpc::proto; use url::Url; +use util::paths::PathStyle; use workspace::{OpenOptions, OpenVisible}; use crate::lsp_ext::find_specific_language_server_in_selection; @@ -38,7 +39,11 @@ pub fn switch_source_header( let upstream_client = project.read(cx).lsp_store().read(cx).upstream_client(); cx.spawn_in(window, async move |_editor, cx| { let source_file = buffer.read_with(cx, |buffer, _| { - buffer.file().map(|file| file.path()).map(|path| path.to_string_lossy().to_string()).unwrap_or_else(|| "Unknown".to_string()) + buffer + .file() + .map(|file| file.path()) + .map(|path| path.display(PathStyle::local()).to_string()) + .unwrap_or_else(|| "Unknown".to_string()) })?; let switch_source_header = if let Some((client, project_id)) = upstream_client { @@ -53,18 +58,22 @@ pub fn switch_source_header( .context("lsp ext switch source header proto request")?; SwitchSourceHeaderResult(response.target_file) } else { - project.update(cx, |project, cx| { - project.request_lsp( - buffer, - project::LanguageServerToQuery::Other(server_to_query), - project::lsp_store::lsp_ext_command::SwitchSourceHeader, - cx, - ) - })?.await.with_context(|| format!("Switch source/header LSP request for path \"{source_file}\" failed"))? + project + .update(cx, |project, cx| { + project.request_lsp( + buffer, + project::LanguageServerToQuery::Other(server_to_query), + project::lsp_store::lsp_ext_command::SwitchSourceHeader, + cx, + ) + })? + .await + .with_context(|| { + format!("Switch source/header LSP request for path \"{source_file}\" failed") + })? }; if switch_source_header.0.is_empty() { - log::info!("Clangd returned an empty string when requesting to switch source/header from \"{source_file}\"" ); return Ok(()); } @@ -75,18 +84,24 @@ pub fn switch_source_header( ) })?; - let path = goto.to_file_path().map_err(|()| { - anyhow::anyhow!("URL conversion to file path failed for \"{goto}\"") - })?; + let path = goto + .to_file_path() + .map_err(|()| anyhow::anyhow!("URL conversion to file path failed for \"{goto}\""))?; workspace .update_in(cx, |workspace, window, cx| { - workspace.open_abs_path(path, OpenOptions { visible: Some(OpenVisible::None), ..Default::default() }, window, cx) + workspace.open_abs_path( + path, + OpenOptions { + visible: Some(OpenVisible::None), + ..Default::default() + }, + window, + cx, + ) }) .with_context(|| { - format!( - "Switch source/header could not open \"{goto}\" in workspace" - ) + format!("Switch source/header could not open \"{goto}\" in workspace") })? .await .map(|_| ()) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index eec6e367dc597eb61a37ad33e116fd6688ba7c66..e3e11a483a76696b1e257049f6792e705dfad5ad 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -2494,7 +2494,7 @@ impl Editor { if let Some(extension) = singleton_buffer .read(cx) .file() - .and_then(|file| file.path().extension()?.to_str()) + .and_then(|file| file.path().extension()) { key_context.set("extension", extension.to_string()); } @@ -7603,7 +7603,7 @@ impl Editor { let extension = buffer .read(cx) .file() - .and_then(|file| Some(file.path().extension()?.to_string_lossy().to_string())); + .and_then(|file| Some(file.path().extension()?.to_string())); let event_type = match accepted { true => "Edit Prediction Accepted", @@ -19263,10 +19263,6 @@ impl Editor { { return Some(dir.to_owned()); } - - if let Some(project_path) = buffer.read(cx).project_path(cx) { - return Some(project_path.path.to_path_buf()); - } } None @@ -19294,16 +19290,6 @@ impl Editor { }) } - fn target_file_path(&self, cx: &mut Context) -> Option { - self.active_excerpt(cx).and_then(|(_, buffer, _)| { - let project_path = buffer.read(cx).project_path(cx)?; - let project = self.project()?.read(cx); - let entry = project.entry_for_path(&project_path, cx)?; - let path = entry.path.to_path_buf(); - Some(path) - }) - } - pub fn reveal_in_finder( &mut self, _: &RevealInFileManager, @@ -19336,9 +19322,12 @@ impl Editor { _window: &mut Window, cx: &mut Context, ) { - if let Some(path) = self.target_file_path(cx) - && let Some(path) = path.to_str() - { + if let Some(path) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { + let project = self.project()?.read(cx); + let path = buffer.read(cx).file()?.path(); + let path = path.display(project.path_style(cx)); + Some(path) + }) { cx.write_to_clipboard(ClipboardItem::new_string(path.to_string())); } else { cx.propagate(); @@ -19414,16 +19403,14 @@ impl Editor { ) { if let Some(file) = self.target_file(cx) && let Some(file_stem) = file.path().file_stem() - && let Some(name) = file_stem.to_str() { - cx.write_to_clipboard(ClipboardItem::new_string(name.to_string())); + cx.write_to_clipboard(ClipboardItem::new_string(file_stem.to_string())); } } pub fn copy_file_name(&mut self, _: &CopyFileName, _: &mut Window, cx: &mut Context) { if let Some(file) = self.target_file(cx) - && let Some(file_name) = file.path().file_name() - && let Some(name) = file_name.to_str() + && let Some(name) = file.path().file_name() { cx.write_to_clipboard(ClipboardItem::new_string(name.to_string())); } @@ -19691,9 +19678,8 @@ impl Editor { cx: &mut Context, ) { let selection = self.selections.newest::(cx).start.row + 1; - if let Some(file) = self.target_file(cx) - && let Some(path) = file.path().to_str() - { + if let Some(file) = self.target_file(cx) { + let path = file.path().display(file.path_style(cx)); cx.write_to_clipboard(ClipboardItem::new_string(format!("{path}:{selection}"))); } } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 7aa441b4c7083aa4e39b006bfd03aea47fec23f0..7a1a6f49830d657d2c3aa0f1e2eeabe205031075 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -56,6 +56,7 @@ use text::ToPoint as _; use unindent::Unindent; use util::{ assert_set_eq, path, + rel_path::rel_path, test::{TextRangeMarker, marked_text_ranges, marked_text_ranges_by, sample_text}, uri, }; @@ -11142,19 +11143,19 @@ async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) { let buffer_1 = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "main.rs"), cx) + project.open_buffer((worktree_id, rel_path("main.rs")), cx) }) .await .unwrap(); let buffer_2 = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "other.rs"), cx) + project.open_buffer((worktree_id, rel_path("other.rs")), cx) }) .await .unwrap(); let buffer_3 = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "lib.rs"), cx) + project.open_buffer((worktree_id, rel_path("lib.rs")), cx) }) .await .unwrap(); @@ -11329,19 +11330,19 @@ async fn test_autosave_with_dirty_buffers(cx: &mut TestAppContext) { // Open three buffers let buffer_1 = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "file1.rs"), cx) + project.open_buffer((worktree_id, rel_path("file1.rs")), cx) }) .await .unwrap(); let buffer_2 = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "file2.rs"), cx) + project.open_buffer((worktree_id, rel_path("file2.rs")), cx) }) .await .unwrap(); let buffer_3 = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "file3.rs"), cx) + project.open_buffer((worktree_id, rel_path("file3.rs")), cx) }) .await .unwrap(); @@ -14677,7 +14678,7 @@ async fn test_multiline_completion(cx: &mut TestAppContext) { .unwrap(); let editor = workspace .update(cx, |workspace, window, cx| { - workspace.open_path((worktree_id, "main.ts"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("main.ts")), None, true, window, cx) }) .unwrap() .await @@ -16394,7 +16395,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut TestAppContext) { leader.update(cx, |leader, cx| { leader.buffer.update(cx, |multibuffer, cx| { multibuffer.set_excerpts_for_path( - PathKey::namespaced(1, Arc::from(Path::new("b.txt"))), + PathKey::namespaced(1, "b.txt".into()), buffer_1.clone(), vec![ Point::row_range(0..3), @@ -16405,7 +16406,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut TestAppContext) { cx, ); multibuffer.set_excerpts_for_path( - PathKey::namespaced(1, Arc::from(Path::new("a.txt"))), + PathKey::namespaced(1, "a.txt".into()), buffer_2.clone(), vec![Point::row_range(0..6), Point::row_range(8..12)], 0, @@ -16897,7 +16898,7 @@ async fn test_on_type_formatting_not_triggered(cx: &mut TestAppContext) { .unwrap(); let editor_handle = workspace .update(cx, |workspace, window, cx| { - workspace.open_path((worktree_id, "main.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx) }) .unwrap() .await @@ -20878,9 +20879,9 @@ async fn test_display_diff_hunks(cx: &mut TestAppContext) { fs.set_head_for_repo( path!("/test/.git").as_ref(), &[ - ("file-1".into(), "one\n".into()), - ("file-2".into(), "two\n".into()), - ("file-3".into(), "three\n".into()), + ("file-1", "one\n".into()), + ("file-2", "two\n".into()), + ("file-3", "three\n".into()), ], "deadbeef", ); @@ -20904,7 +20905,7 @@ async fn test_display_diff_hunks(cx: &mut TestAppContext) { for buffer in &buffers { let snapshot = buffer.read(cx).snapshot(); multibuffer.set_excerpts_for_path( - PathKey::namespaced(0, buffer.read(cx).file().unwrap().path().clone()), + PathKey::namespaced(0, buffer.read(cx).file().unwrap().path().as_str().into()), buffer.clone(), vec![text::Anchor::MIN.to_point(&snapshot)..text::Anchor::MAX.to_point(&snapshot)], 2, @@ -21657,19 +21658,19 @@ async fn test_folding_buffers(cx: &mut TestAppContext) { let buffer_1 = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "first.rs"), cx) + project.open_buffer((worktree_id, rel_path("first.rs")), cx) }) .await .unwrap(); let buffer_2 = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "second.rs"), cx) + project.open_buffer((worktree_id, rel_path("second.rs")), cx) }) .await .unwrap(); let buffer_3 = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "third.rs"), cx) + project.open_buffer((worktree_id, rel_path("third.rs")), cx) }) .await .unwrap(); @@ -21825,19 +21826,19 @@ async fn test_folding_buffers_with_one_excerpt(cx: &mut TestAppContext) { let buffer_1 = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "first.rs"), cx) + project.open_buffer((worktree_id, rel_path("first.rs")), cx) }) .await .unwrap(); let buffer_2 = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "second.rs"), cx) + project.open_buffer((worktree_id, rel_path("second.rs")), cx) }) .await .unwrap(); let buffer_3 = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "third.rs"), cx) + project.open_buffer((worktree_id, rel_path("third.rs")), cx) }) .await .unwrap(); @@ -21960,7 +21961,7 @@ async fn test_folding_buffer_when_multibuffer_has_only_one_excerpt(cx: &mut Test let buffer_1 = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "main.rs"), cx) + project.open_buffer((worktree_id, rel_path("main.rs")), cx) }) .await .unwrap(); @@ -22499,7 +22500,7 @@ async fn test_breakpoint_toggling(cx: &mut TestAppContext) { let buffer = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "main.rs"), cx) + project.open_buffer((worktree_id, rel_path("main.rs")), cx) }) .await .unwrap(); @@ -22613,7 +22614,7 @@ async fn test_log_breakpoint_editing(cx: &mut TestAppContext) { let buffer = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "main.rs"), cx) + project.open_buffer((worktree_id, rel_path("main.rs")), cx) }) .await .unwrap(); @@ -22783,7 +22784,7 @@ async fn test_breakpoint_enabling_and_disabling(cx: &mut TestAppContext) { let buffer = project .update(cx, |project, cx| { - project.open_buffer((worktree_id, "main.rs"), cx) + project.open_buffer((worktree_id, rel_path("main.rs")), cx) }) .await .unwrap(); @@ -23371,7 +23372,7 @@ println!("5"); let editor_1 = workspace .update_in(cx, |workspace, window, cx| { workspace.open_path( - (worktree_id, "main.rs"), + (worktree_id, rel_path("main.rs")), Some(pane_1.downgrade()), true, window, @@ -23414,7 +23415,7 @@ println!("5"); let editor_2 = workspace .update_in(cx, |workspace, window, cx| { workspace.open_path( - (worktree_id, "main.rs"), + (worktree_id, rel_path("main.rs")), Some(pane_2.downgrade()), true, window, @@ -23453,7 +23454,7 @@ println!("5"); let _other_editor_1 = workspace .update_in(cx, |workspace, window, cx| { workspace.open_path( - (worktree_id, "lib.rs"), + (worktree_id, rel_path("lib.rs")), Some(pane_1.downgrade()), true, window, @@ -23489,7 +23490,7 @@ println!("5"); let _other_editor_2 = workspace .update_in(cx, |workspace, window, cx| { workspace.open_path( - (worktree_id, "lib.rs"), + (worktree_id, rel_path("lib.rs")), Some(pane_2.downgrade()), true, window, @@ -23526,7 +23527,7 @@ println!("5"); let _editor_1_reopened = workspace .update_in(cx, |workspace, window, cx| { workspace.open_path( - (worktree_id, "main.rs"), + (worktree_id, rel_path("main.rs")), Some(pane_1.downgrade()), true, window, @@ -23540,7 +23541,7 @@ println!("5"); let _editor_2_reopened = workspace .update_in(cx, |workspace, window, cx| { workspace.open_path( - (worktree_id, "main.rs"), + (worktree_id, rel_path("main.rs")), Some(pane_2.downgrade()), true, window, @@ -23634,7 +23635,7 @@ println!("5"); let editor = workspace .update_in(cx, |workspace, window, cx| { workspace.open_path( - (worktree_id, "main.rs"), + (worktree_id, rel_path("main.rs")), Some(pane.downgrade()), true, window, @@ -23693,7 +23694,7 @@ println!("5"); let _editor_reopened = workspace .update_in(cx, |workspace, window, cx| { workspace.open_path( - (worktree_id, "main.rs"), + (worktree_id, rel_path("main.rs")), Some(pane.downgrade()), true, window, @@ -23860,7 +23861,7 @@ async fn test_html_linked_edits_on_completion(cx: &mut TestAppContext) { .unwrap(); let editor = workspace .update(cx, |workspace, window, cx| { - workspace.open_path((worktree_id, "file.html"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("file.html")), None, true, window, cx) }) .unwrap() .await @@ -24054,7 +24055,7 @@ async fn test_invisible_worktree_servers(cx: &mut TestAppContext) { let main_editor = workspace .update_in(cx, |workspace, window, cx| { workspace.open_path( - (worktree_id, "main.rs"), + (worktree_id, rel_path("main.rs")), Some(pane.downgrade()), true, window, @@ -25636,7 +25637,7 @@ async fn test_document_colors(cx: &mut TestAppContext) { .path(); assert_eq!( editor_file.as_ref(), - Path::new("first.rs"), + rel_path("first.rs"), "Both editors should be opened for the same file" ) } @@ -25816,7 +25817,7 @@ async fn test_non_utf_8_opens(cx: &mut TestAppContext) { let handle = workspace .update_in(cx, |workspace, window, cx| { - let project_path = (worktree_id, "one.pdf"); + let project_path = (worktree_id, rel_path("one.pdf")); workspace.open_path(project_path, None, true, window, cx) }) .await diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 65da1d76b78b9099073a9abd655a7f54608bd848..90dd2a599a16705e0eb49319ec562d505ca58399 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -3779,13 +3779,15 @@ impl EditorElement { .as_ref() .map(|project| project.read(cx).visible_worktrees(cx).count() > 1) .unwrap_or_default(); - let can_open_excerpts = Editor::can_open_excerpts_in_file(for_excerpt.buffer.file()); + let file = for_excerpt.buffer.file(); + let can_open_excerpts = Editor::can_open_excerpts_in_file(file); + let path_style = file.map(|file| file.path_style(cx)); let relative_path = for_excerpt.buffer.resolve_file_path(cx, include_root); let filename = relative_path .as_ref() .and_then(|path| Some(path.file_name()?.to_string_lossy().to_string())); let parent_path = relative_path.as_ref().and_then(|path| { - Some(path.parent()?.to_string_lossy().to_string() + std::path::MAIN_SEPARATOR_STR) + Some(path.parent()?.to_string_lossy().to_string() + path_style?.separator()) }); let focus_handle = editor.focus_handle(cx); let colors = cx.theme().colors(); @@ -3990,12 +3992,13 @@ impl EditorElement { && let Some(worktree) = project.read(cx).worktree_for_id(file.worktree_id(cx), cx) { + let path_style = file.path_style(cx); let worktree = worktree.read(cx); let relative_path = file.path(); let entry_for_path = worktree.entry_for_path(relative_path); let abs_path = entry_for_path.map(|e| { e.canonical_path.as_deref().map_or_else( - || worktree.abs_path().join(relative_path), + || worktree.absolutize(relative_path), Path::to_path_buf, ) }); @@ -4031,7 +4034,7 @@ impl EditorElement { Some(Box::new(zed_actions::workspace::CopyRelativePath)), window.handler_for(&editor, move |_, _, cx| { cx.write_to_clipboard(ClipboardItem::new_string( - relative_path.to_string_lossy().to_string(), + relative_path.display(path_style).to_string(), )); }), ) diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index 48a6da74467ea91630b4954fe9af38d34b8a7e96..35cab36a5568023db9ff77fd692074d43df56b91 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -698,6 +698,7 @@ async fn parse_commit_messages( #[cfg(test)] mod tests { use super::*; + use git::repository::repo_path; use gpui::Context; use language::{Point, Rope}; use project::FakeFs; @@ -850,7 +851,7 @@ mod tests { fs.set_blame_for_repo( Path::new("/my-repo/.git"), vec![( - "file.txt".into(), + repo_path("file.txt"), Blame { entries: vec![ blame_entry("1b1b1b", 0..1), @@ -967,7 +968,7 @@ mod tests { fs.set_blame_for_repo( Path::new(path!("/my-repo/.git")), vec![( - "file.txt".into(), + repo_path("file.txt"), Blame { entries: vec![blame_entry("1b1b1b", 0..4)], ..Default::default() @@ -1135,7 +1136,7 @@ mod tests { fs.set_blame_for_repo( Path::new(path!("/my-repo/.git")), vec![( - "file.txt".into(), + repo_path("file.txt"), Blame { entries: blame_entries, ..Default::default() @@ -1178,7 +1179,7 @@ mod tests { fs.set_blame_for_repo( Path::new(path!("/my-repo/.git")), vec![( - "file.txt".into(), + repo_path("file.txt"), Blame { entries: blame_entries, ..Default::default() diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index b5ae47bbdf0fc13a87b6bdac63f9f2a85594aca0..3e592e4bdcbca2a0e9679f9e79e7d3f65e95895a 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -651,7 +651,7 @@ impl Item for Editor { fn tab_content_text(&self, detail: usize, cx: &App) -> SharedString { if let Some(path) = path_for_buffer(&self.buffer, detail, true, cx) { - path.to_string_lossy().to_string().into() + path.to_string().into() } else { // Use the same logic as the displayed title for consistency self.buffer.read(cx).title(cx).to_string().into() @@ -667,7 +667,7 @@ impl Item for Editor { .file_icons .then(|| { path_for_buffer(&self.buffer, 0, true, cx) - .and_then(|path| FileIcons::get_icon(path.as_ref(), cx)) + .and_then(|path| FileIcons::get_icon(Path::new(&*path), cx)) }) .flatten() .map(Icon::from_path) @@ -703,8 +703,7 @@ impl Item for Editor { let description = params.detail.and_then(|detail| { let path = path_for_buffer(&self.buffer, detail, false, cx)?; - let description = path.to_string_lossy(); - let description = description.trim(); + let description = path.trim(); if description.is_empty() { return None; @@ -898,10 +897,7 @@ impl Item for Editor { .as_singleton() .expect("cannot call save_as on an excerpt list"); - let file_extension = path - .path - .extension() - .map(|a| a.to_string_lossy().to_string()); + let file_extension = path.path.extension().map(|a| a.to_string()); self.report_editor_event( ReportEditorEvent::Saved { auto_saved: false }, file_extension, @@ -1167,7 +1163,7 @@ impl SerializableItem for Editor { let (worktree, path) = project.find_worktree(&abs_path, cx)?; let project_path = ProjectPath { worktree_id: worktree.read(cx).id(), - path: path.into(), + path: path, }; Some(project.open_path(project_path, cx)) }); @@ -1288,7 +1284,7 @@ impl SerializableItem for Editor { project .read(cx) .worktree_for_id(worktree_id, cx) - .and_then(|worktree| worktree.read(cx).absolutize(file.path()).ok()) + .map(|worktree| worktree.read(cx).absolutize(file.path())) .or_else(|| { let full_path = file.full_path(cx); let project_path = project.read(cx).find_project_path(&full_path, cx)?; @@ -1882,7 +1878,7 @@ fn path_for_buffer<'a>( height: usize, include_filename: bool, cx: &'a App, -) -> Option> { +) -> Option> { let file = buffer.read(cx).as_singleton()?.read(cx).file()?; path_for_file(file.as_ref(), height, include_filename, cx) } @@ -1892,7 +1888,7 @@ fn path_for_file<'a>( mut height: usize, include_filename: bool, cx: &'a App, -) -> Option> { +) -> Option> { // Ensure we always render at least the filename. height += 1; @@ -1906,22 +1902,21 @@ fn path_for_file<'a>( } } - // Here we could have just always used `full_path`, but that is very - // allocation-heavy and so we try to use a `Cow` if we haven't - // traversed all the way up to the worktree's root. + // The full_path method allocates, so avoid calling it if height is zero. if height > 0 { - let full_path = file.full_path(cx); - if include_filename { - Some(full_path.into()) - } else { - Some(full_path.parent()?.to_path_buf().into()) + let mut full_path = file.full_path(cx); + if !include_filename { + if !full_path.pop() { + return None; + } } + Some(full_path.to_string_lossy().to_string().into()) } else { let mut path = file.path().strip_prefix(prefix).ok()?; if !include_filename { path = path.parent()?; } - Some(path.into()) + Some(path.display(file.path_style(cx))) } } @@ -1936,12 +1931,12 @@ mod tests { use language::{LanguageMatcher, TestFile}; use project::FakeFs; use std::path::{Path, PathBuf}; - use util::path; + use util::{path, rel_path::RelPath}; #[gpui::test] fn test_path_for_file(cx: &mut App) { let file = TestFile { - path: Path::new("").into(), + path: RelPath::empty().into(), root_name: String::new(), local_root: None, }; diff --git a/crates/editor/src/test.rs b/crates/editor/src/test.rs index 03e99b9fff9a89fcac28605fe6bf7a08b23f8f02..be4a5575b7761e5c81acb215d13344d8779ddfdf 100644 --- a/crates/editor/src/test.rs +++ b/crates/editor/src/test.rs @@ -217,15 +217,7 @@ pub fn editor_content_with_blocks(editor: &Entity, cx: &mut VisualTestCo height, } => { lines[row.0 as usize].push_str(&cx.update(|_, cx| { - format!( - "§ {}", - first_excerpt - .buffer - .file() - .unwrap() - .file_name(cx) - .to_string_lossy() - ) + format!("§ {}", first_excerpt.buffer.file().unwrap().file_name(cx)) })); for row in row.0 + 1..row.0 + height { lines[row as usize].push_str("§ -----"); @@ -237,17 +229,11 @@ pub fn editor_content_with_blocks(editor: &Entity, cx: &mut VisualTestCo } } Block::BufferHeader { excerpt, height } => { - lines[row.0 as usize].push_str(&cx.update(|_, cx| { - format!( - "§ {}", - excerpt - .buffer - .file() - .unwrap() - .file_name(cx) - .to_string_lossy() - ) - })); + lines[row.0 as usize].push_str( + &cx.update(|_, cx| { + format!("§ {}", excerpt.buffer.file().unwrap().file_name(cx)) + }), + ); for row in row.0 + 1..row.0 + height { lines[row as usize].push_str("§ -----"); } diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index fbf7a312fe56600ad78e13c278c85e29b8ca5aa5..06fd01c85f8d2371a9ff181b6720e25353a396c7 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -296,7 +296,7 @@ impl EditorTestContext { let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone()); fs.set_head_for_repo( &Self::root_path().join(".git"), - &[(path.into(), diff_base.to_string())], + &[(path.as_str(), diff_base.to_string())], "deadbeef", ); self.cx.run_until_parked(); @@ -317,7 +317,7 @@ impl EditorTestContext { let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone()); fs.set_index_for_repo( &Self::root_path().join(".git"), - &[(path.into(), diff_base.to_string())], + &[(path.as_str(), diff_base.to_string())], ); self.cx.run_until_parked(); } @@ -329,7 +329,7 @@ impl EditorTestContext { let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone()); let mut found = None; fs.with_git_state(&Self::root_path().join(".git"), false, |git_state| { - found = git_state.index_contents.get(path.as_ref()).cloned(); + found = git_state.index_contents.get(&path.into()).cloned(); }) .unwrap(); assert_eq!(expected, found.as_deref()); diff --git a/crates/eval/src/example.rs b/crates/eval/src/example.rs index 457b62e98ca4cabf83fb379cbaa70f07957ac6b7..c0f0900a6cfa5dd942bd27eed852ee4a52896c2c 100644 --- a/crates/eval/src/example.rs +++ b/crates/eval/src/example.rs @@ -1,7 +1,6 @@ use std::{ error::Error, fmt::{self, Debug}, - path::Path, sync::{Arc, Mutex}, time::Duration, }; @@ -20,6 +19,7 @@ use collections::HashMap; use futures::{FutureExt as _, StreamExt, channel::mpsc, select_biased}; use gpui::{App, AppContext, AsyncApp, Entity}; use language_model::{LanguageModel, Role, StopReason}; +use util::rel_path::RelPath; pub const THREAD_EVENT_TIMEOUT: Duration = Duration::from_secs(60 * 2); @@ -354,7 +354,7 @@ impl ExampleContext { Ok(response) } - pub fn edits(&self) -> HashMap, FileEdits> { + pub fn edits(&self) -> HashMap, FileEdits> { self.agent_thread .read_with(&self.app, |thread, cx| { let action_log = thread.action_log().read(cx); diff --git a/crates/eval/src/examples/add_arg_to_trait_method.rs b/crates/eval/src/examples/add_arg_to_trait_method.rs index 084f12bc6263da030d313c362cc3d051dfdb8ea8..0626be5a4e2d620337e1bd8896e25f519de86811 100644 --- a/crates/eval/src/examples/add_arg_to_trait_method.rs +++ b/crates/eval/src/examples/add_arg_to_trait_method.rs @@ -1,8 +1,7 @@ -use std::path::Path; - use agent_settings::AgentProfileId; use anyhow::Result; use async_trait::async_trait; +use util::rel_path::RelPath; use crate::example::{Example, ExampleContext, ExampleMetadata, JudgeAssertion, LanguageServer}; @@ -68,7 +67,7 @@ impl Example for AddArgToTraitMethod { for tool_name in add_ignored_window_paths { let path_str = format!("crates/assistant_tools/src/{}.rs", tool_name); - let edits = edits.get(Path::new(&path_str)); + let edits = edits.get(RelPath::new(&path_str).unwrap()); let ignored = edits.is_some_and(|edits| { edits.has_added_line(" _window: Option,\n") @@ -86,7 +85,8 @@ impl Example for AddArgToTraitMethod { // Adds unignored argument to `batch_tool` - let batch_tool_edits = edits.get(Path::new("crates/assistant_tools/src/batch_tool.rs")); + let batch_tool_edits = + edits.get(RelPath::new("crates/assistant_tools/src/batch_tool.rs").unwrap()); cx.assert( batch_tool_edits.is_some_and(|edits| { diff --git a/crates/eval/src/examples/code_block_citations.rs b/crates/eval/src/examples/code_block_citations.rs index 2239ccdfddcc023fdae6f56bd91fd73c1f851ac6..8150d68ac3e54772e35fe52f086fb942d8923ffb 100644 --- a/crates/eval/src/examples/code_block_citations.rs +++ b/crates/eval/src/examples/code_block_citations.rs @@ -65,7 +65,7 @@ impl Example for CodeBlockCitations { thread .project() .read(cx) - .find_project_path(path_range.path, cx) + .find_project_path(path_range.path.as_ref(), cx) }) .ok() .flatten(); diff --git a/crates/eval/src/instance.rs b/crates/eval/src/instance.rs index c6e4e0b6ec683b63b90920861f3cd023069666e6..d0fd98d0280c2f539a67de009a0335b5ae479027 100644 --- a/crates/eval/src/instance.rs +++ b/crates/eval/src/instance.rs @@ -250,7 +250,7 @@ impl ExampleInstance { worktree .files(false, 0) .find_map(|e| { - if e.path.clone().extension().and_then(|ext| ext.to_str()) + if e.path.clone().extension() == Some(&language_server.file_extension) { Some(ProjectPath { diff --git a/crates/extension/src/extension.rs b/crates/extension/src/extension.rs index 6af793253bce2d122a5361f6b83f33cb39d45253..bd2b37c337dcaca448e2175472ea46c126d2f9a3 100644 --- a/crates/extension/src/extension.rs +++ b/crates/extension/src/extension.rs @@ -16,6 +16,7 @@ use gpui::{App, Task}; use language::LanguageName; use semantic_version::SemanticVersion; use task::{SpawnInTerminal, ZedDebugConfig}; +use util::rel_path::RelPath; pub use crate::capabilities::*; pub use crate::extension_events::*; @@ -33,7 +34,7 @@ pub fn init(cx: &mut App) { pub trait WorktreeDelegate: Send + Sync + 'static { fn id(&self) -> u64; fn root_path(&self) -> String; - async fn read_text_file(&self, path: PathBuf) -> Result; + async fn read_text_file(&self, path: &RelPath) -> Result; async fn which(&self, binary_name: String) -> Option; async fn shell_env(&self) -> Vec<(String, String)>; } diff --git a/crates/extension_host/src/extension_host.rs b/crates/extension_host/src/extension_host.rs index 2d5839e96a1cf5c7550300e762bb97d357864fc6..cd1eaa9855351630e13185962738b1e0d5fb9489 100644 --- a/crates/extension_host/src/extension_host.rs +++ b/crates/extension_host/src/extension_host.rs @@ -1752,7 +1752,14 @@ impl ExtensionStore { })? .await?; let dest_dir = RemotePathBuf::new( - PathBuf::from(&response.tmp_dir).join(missing_extension.clone().id), + path_style + .join(&response.tmp_dir, &missing_extension.id) + .with_context(|| { + format!( + "failed to construct destination path: {:?}, {:?}", + response.tmp_dir, missing_extension.id, + ) + })?, path_style, ); log::info!("Uploading extension {}", missing_extension.clone().id); diff --git a/crates/extension_host/src/headless_host.rs b/crates/extension_host/src/headless_host.rs index a6305118cd3355f69a42914ec86bb5edcfc74810..f14bb811a6742a60899ac4301cfac096bb41a07f 100644 --- a/crates/extension_host/src/headless_host.rs +++ b/crates/extension_host/src/headless_host.rs @@ -1,10 +1,7 @@ use std::{path::PathBuf, sync::Arc}; use anyhow::{Context as _, Result}; -use client::{ - TypedEnvelope, - proto::{self, FromProto}, -}; +use client::{TypedEnvelope, proto}; use collections::{HashMap, HashSet}; use extension::{ Extension, ExtensionDebugAdapterProviderProxy, ExtensionHostProxy, ExtensionLanguageProxy, @@ -342,7 +339,7 @@ impl HeadlessExtensionStore { version: extension.version, dev: extension.dev, }, - PathBuf::from_proto(envelope.payload.tmp_dir), + PathBuf::from(envelope.payload.tmp_dir), cx, ) })? diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs index 9c726ebd1c45d868d0794f26044cbc53d87eb00f..16e695f04fc52b307a08ffce48bfcca77ba816c0 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs @@ -16,6 +16,7 @@ use std::{ path::{Path, PathBuf}, sync::{Arc, OnceLock}, }; +use util::rel_path::RelPath; use util::{archive::extract_zip, fs::make_file_executable, maybe}; use wasmtime::component::{Linker, Resource}; @@ -421,12 +422,12 @@ impl ExtensionImports for WasmState { ) -> wasmtime::Result> { self.on_main_thread(|cx| { async move { - let location = location - .as_ref() - .map(|location| ::settings::SettingsLocation { + let location = location.as_ref().and_then(|location| { + Some(::settings::SettingsLocation { worktree_id: WorktreeId::from_proto(location.worktree_id), - path: Path::new(&location.path), - }); + path: RelPath::new(&location.path).ok()?, + }) + }); cx.update(|cx| match category.as_str() { "language" => { diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs index 790a75e896dc0a440bc27d8972c09b879020e9c2..9e608b9e8e68ee93cfcb47c39708a8a0c2499d71 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs @@ -31,7 +31,7 @@ use std::{ }; use task::{SpawnInTerminal, ZedDebugConfig}; use url::Url; -use util::{archive::extract_zip, fs::make_file_executable, maybe}; +use util::{archive::extract_zip, fs::make_file_executable, maybe, rel_path::RelPath}; use wasmtime::component::{Linker, Resource}; pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 6, 0); @@ -564,7 +564,7 @@ impl HostWorktree for WasmState { ) -> wasmtime::Result> { let delegate = self.table.get(&delegate)?; Ok(delegate - .read_text_file(path.into()) + .read_text_file(RelPath::new(&path)?) .await .map_err(|error| error.to_string())) } @@ -914,12 +914,12 @@ impl ExtensionImports for WasmState { ) -> wasmtime::Result> { self.on_main_thread(|cx| { async move { - let location = location - .as_ref() - .map(|location| ::settings::SettingsLocation { + let location = location.as_ref().and_then(|location| { + Some(::settings::SettingsLocation { worktree_id: WorktreeId::from_proto(location.worktree_id), - path: Path::new(&location.path), - }); + path: RelPath::new(&location.path).ok()?, + }) + }); cx.update(|cx| match category.as_str() { "language" => { diff --git a/crates/extensions_ui/src/extension_suggest.rs b/crates/extensions_ui/src/extension_suggest.rs index 65572eb0241be52dc0fb2ebf9891d3a5858caa83..5dcd1e210527ee89a35a3b89008a901cf1f9f036 100644 --- a/crates/extensions_ui/src/extension_suggest.rs +++ b/crates/extensions_ui/src/extension_suggest.rs @@ -1,5 +1,4 @@ use std::collections::HashMap; -use std::path::Path; use std::sync::{Arc, OnceLock}; use db::kvp::KEY_VALUE_STORE; @@ -8,6 +7,7 @@ use extension_host::ExtensionStore; use gpui::{AppContext as _, Context, Entity, SharedString, Window}; use language::Buffer; use ui::prelude::*; +use util::rel_path::RelPath; use workspace::notifications::simple_message_notification::MessageNotification; use workspace::{Workspace, notifications::NotificationId}; @@ -100,15 +100,9 @@ struct SuggestedExtension { } /// Returns the suggested extension for the given [`Path`]. -fn suggested_extension(path: impl AsRef) -> Option { - let path = path.as_ref(); - - let file_extension: Option> = path - .extension() - .and_then(|extension| Some(extension.to_str()?.into())); - let file_name: Option> = path - .file_name() - .and_then(|file_name| Some(file_name.to_str()?.into())); +fn suggested_extension(path: &RelPath) -> Option { + let file_extension: Option> = path.extension().map(|extension| extension.into()); + let file_name: Option> = path.file_name().map(|name| name.into()); let (file_name_or_extension, extension_id) = None // We suggest against file names first, as these suggestions will be more @@ -210,39 +204,40 @@ pub(crate) fn suggest(buffer: Entity, window: &mut Window, cx: &mut Cont #[cfg(test)] mod tests { use super::*; + use util::rel_path::rel_path; #[test] pub fn test_suggested_extension() { assert_eq!( - suggested_extension("Cargo.toml"), + suggested_extension(rel_path("Cargo.toml")), Some(SuggestedExtension { extension_id: "toml".into(), file_name_or_extension: "toml".into() }) ); assert_eq!( - suggested_extension("Cargo.lock"), + suggested_extension(rel_path("Cargo.lock")), Some(SuggestedExtension { extension_id: "toml".into(), file_name_or_extension: "Cargo.lock".into() }) ); assert_eq!( - suggested_extension("Dockerfile"), + suggested_extension(rel_path("Dockerfile")), Some(SuggestedExtension { extension_id: "dockerfile".into(), file_name_or_extension: "Dockerfile".into() }) ); assert_eq!( - suggested_extension("a/b/c/d/.gitignore"), + suggested_extension(rel_path("a/b/c/d/.gitignore")), Some(SuggestedExtension { extension_id: "git-firefly".into(), file_name_or_extension: ".gitignore".into() }) ); assert_eq!( - suggested_extension("a/b/c/d/test.gleam"), + suggested_extension(rel_path("a/b/c/d/test.gleam")), Some(SuggestedExtension { extension_id: "gleam".into(), file_name_or_extension: "gleam".into() diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index dadd3ea299304e845bbc0f412c3962d14e2006e4..4126d37a3fb60893474b5e090a6d2a83fab1b50e 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -39,7 +39,12 @@ use ui::{ ButtonLike, ContextMenu, HighlightedLabel, Indicator, KeyBinding, ListItem, ListItemSpacing, PopoverMenu, PopoverMenuHandle, TintColor, Tooltip, prelude::*, }; -use util::{ResultExt, maybe, paths::PathWithPosition, post_inc}; +use util::{ + ResultExt, maybe, + paths::{PathStyle, PathWithPosition}, + post_inc, + rel_path::RelPath, +}; use workspace::{ ModalView, OpenOptions, OpenVisible, SplitDirection, Workspace, item::PreviewTabsSettings, notifications::NotifyResultExt, pane, @@ -126,38 +131,34 @@ impl FileFinder { let project = workspace.project().read(cx); let fs = project.fs(); - let currently_opened_path = workspace - .active_item(cx) - .and_then(|item| item.project_path(cx)) - .map(|project_path| { - let abs_path = project - .worktree_for_id(project_path.worktree_id, cx) - .map(|worktree| worktree.read(cx).abs_path().join(&project_path.path)); - FoundPath::new(project_path, abs_path) - }); + let currently_opened_path = workspace.active_item(cx).and_then(|item| { + let project_path = item.project_path(cx)?; + let abs_path = project + .worktree_for_id(project_path.worktree_id, cx)? + .read(cx) + .absolutize(&project_path.path); + Some(FoundPath::new(project_path, abs_path)) + }); let history_items = workspace .recent_navigation_history(Some(MAX_RECENT_SELECTIONS), cx) .into_iter() .filter_map(|(project_path, abs_path)| { if project.entry_for_path(&project_path, cx).is_some() { - return Some(Task::ready(Some(FoundPath::new(project_path, abs_path)))); + return Some(Task::ready(Some(FoundPath::new(project_path, abs_path?)))); } let abs_path = abs_path?; if project.is_local() { let fs = fs.clone(); Some(cx.background_spawn(async move { if fs.is_file(&abs_path).await { - Some(FoundPath::new(project_path, Some(abs_path))) + Some(FoundPath::new(project_path, abs_path)) } else { None } })) } else { - Some(Task::ready(Some(FoundPath::new( - project_path, - Some(abs_path), - )))) + Some(Task::ready(Some(FoundPath::new(project_path, abs_path)))) } }) .collect::>(); @@ -465,7 +466,7 @@ enum Match { } impl Match { - fn relative_path(&self) -> Option<&Arc> { + fn relative_path(&self) -> Option<&Arc> { match self { Match::History { path, .. } => Some(&path.project.path), Match::Search(panel_match) => Some(&panel_match.0.path), @@ -475,20 +476,14 @@ impl Match { fn abs_path(&self, project: &Entity, cx: &App) -> Option { match self { - Match::History { path, .. } => path.absolute.clone().or_else(|| { + Match::History { path, .. } => Some(path.absolute.clone()), + Match::Search(ProjectPanelOrdMatch(path_match)) => Some( project .read(cx) - .worktree_for_id(path.project.worktree_id, cx)? + .worktree_for_id(WorktreeId::from_usize(path_match.worktree_id), cx)? .read(cx) - .absolutize(&path.project.path) - .ok() - }), - Match::Search(ProjectPanelOrdMatch(path_match)) => project - .read(cx) - .worktree_for_id(WorktreeId::from_usize(path_match.worktree_id), cx)? - .read(cx) - .absolutize(&path_match.path) - .ok(), + .absolutize(&path_match.path), + ), Match::CreateNew(_) => None, } } @@ -671,10 +666,9 @@ impl Matches { } if let Some(filename) = panel_match.0.path.file_name() { - let path_str = panel_match.0.path.to_string_lossy(); - let filename_str = filename.to_string_lossy(); + let path_str = panel_match.0.path.as_str(); - if let Some(filename_pos) = path_str.rfind(&*filename_str) + if let Some(filename_pos) = path_str.rfind(filename) && panel_match.0.positions[0] >= filename_pos { let mut prev_position = panel_match.0.positions[0]; @@ -696,7 +690,7 @@ fn matching_history_items<'a>( history_items: impl IntoIterator, currently_opened: Option<&'a FoundPath>, query: &FileSearchQuery, -) -> HashMap, Match> { +) -> HashMap, Match> { let mut candidates_paths = HashMap::default(); let history_items_by_worktrees = history_items @@ -714,7 +708,7 @@ fn matching_history_items<'a>( .project .path .file_name()? - .to_string_lossy() + .to_string() .to_lowercase() .chars(), ), @@ -768,11 +762,11 @@ fn matching_history_items<'a>( #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] struct FoundPath { project: ProjectPath, - absolute: Option, + absolute: PathBuf, } impl FoundPath { - fn new(project: ProjectPath, absolute: Option) -> Self { + fn new(project: ProjectPath, absolute: PathBuf) -> Self { Self { project, absolute } } } @@ -944,47 +938,44 @@ impl FileFinderDelegate { extend_old_matches, ); - let filename = &query.raw_query; - let mut query_path = Path::new(filename); - // add option of creating new file only if path is relative - let available_worktree = self - .project - .read(cx) - .visible_worktrees(cx) - .filter(|worktree| !worktree.read(cx).is_single_file()) - .collect::>(); - let worktree_count = available_worktree.len(); - let mut expect_worktree = available_worktree.first().cloned(); - for worktree in available_worktree { - let worktree_root = worktree + let path_style = self.project.read(cx).path_style(cx); + let query_path = query.raw_query.as_str(); + if let Ok(mut query_path) = RelPath::from_std_path(Path::new(query_path), path_style) { + let available_worktree = self + .project .read(cx) - .abs_path() - .file_name() - .map_or(String::new(), |f| f.to_string_lossy().to_string()); - if worktree_count > 1 && query_path.starts_with(&worktree_root) { - query_path = query_path - .strip_prefix(&worktree_root) - .unwrap_or(query_path); - expect_worktree = Some(worktree); - break; + .visible_worktrees(cx) + .filter(|worktree| !worktree.read(cx).is_single_file()) + .collect::>(); + let worktree_count = available_worktree.len(); + let mut expect_worktree = available_worktree.first().cloned(); + for worktree in available_worktree { + let worktree_root = worktree.read(cx).root_name(); + if worktree_count > 1 { + if let Ok(suffix) = query_path.strip_prefix(worktree_root) { + query_path = suffix.into(); + expect_worktree = Some(worktree); + break; + } + } } - } - if let Some(FoundPath { ref project, .. }) = self.currently_opened_path { - let worktree_id = project.worktree_id; - expect_worktree = self.project.read(cx).worktree_for_id(worktree_id, cx); - } + if let Some(FoundPath { ref project, .. }) = self.currently_opened_path { + let worktree_id = project.worktree_id; + expect_worktree = self.project.read(cx).worktree_for_id(worktree_id, cx); + } - if let Some(worktree) = expect_worktree { - let worktree = worktree.read(cx); - if query_path.is_relative() - && worktree.entry_for_path(&query_path).is_none() - && !filename.ends_with("/") - { - self.matches.matches.push(Match::CreateNew(ProjectPath { - worktree_id: worktree.id(), - path: Arc::from(query_path), - })); + if let Some(worktree) = expect_worktree { + let worktree = worktree.read(cx); + if worktree.entry_for_path(&query_path).is_none() + && !query.raw_query.ends_with("/") + && !(path_style.is_windows() && query.raw_query.ends_with("\\")) + { + self.matches.matches.push(Match::CreateNew(ProjectPath { + worktree_id: worktree.id(), + path: query_path, + })); + } } } @@ -1009,8 +1000,8 @@ impl FileFinderDelegate { path_match: &Match, window: &mut Window, cx: &App, - ix: usize, ) -> (HighlightedLabel, HighlightedLabel) { + let path_style = self.project.read(cx).path_style(cx); let (file_name, file_name_positions, mut full_path, mut full_path_positions) = match &path_match { Match::History { @@ -1018,68 +1009,52 @@ impl FileFinderDelegate { panel_match, } => { let worktree_id = entry_path.project.worktree_id; - let project_relative_path = &entry_path.project.path; - let has_worktree = self + let worktree = self .project .read(cx) .worktree_for_id(worktree_id, cx) - .is_some(); - - if let Some(absolute_path) = - entry_path.absolute.as_ref().filter(|_| !has_worktree) - { + .filter(|worktree| worktree.read(cx).is_visible()); + + if let Some(panel_match) = panel_match { + self.labels_for_path_match(&panel_match.0, path_style) + } else if let Some(worktree) = worktree { + let full_path = + worktree.read(cx).root_name().join(&entry_path.project.path); + let mut components = full_path.components(); + let filename = components.next_back().unwrap_or(""); + let prefix = components.rest(); ( - absolute_path - .file_name() - .map_or_else( - || project_relative_path.to_string_lossy(), - |file_name| file_name.to_string_lossy(), - ) - .to_string(), + filename.to_string(), Vec::new(), - absolute_path.to_string_lossy().to_string(), + prefix.display(path_style).to_string() + path_style.separator(), Vec::new(), ) } else { - let mut path = Arc::clone(project_relative_path); - if project_relative_path.as_ref() == Path::new("") - && let Some(absolute_path) = &entry_path.absolute - { - path = Arc::from(absolute_path.as_path()); - } - - let mut path_match = PathMatch { - score: ix as f64, - positions: Vec::new(), - worktree_id: worktree_id.to_usize(), - path, - is_dir: false, // File finder doesn't support directories - path_prefix: "".into(), - distance_to_relative_ancestor: usize::MAX, - }; - if let Some(found_path_match) = &panel_match { - path_match - .positions - .extend(found_path_match.0.positions.iter()) - } - - self.labels_for_path_match(&path_match) + ( + entry_path + .absolute + .file_name() + .map_or(String::new(), |f| f.to_string_lossy().into_owned()), + Vec::new(), + entry_path.absolute.parent().map_or(String::new(), |path| { + path.to_string_lossy().into_owned() + path_style.separator() + }), + Vec::new(), + ) } } - Match::Search(path_match) => self.labels_for_path_match(&path_match.0), + Match::Search(path_match) => self.labels_for_path_match(&path_match.0, path_style), Match::CreateNew(project_path) => ( - format!("Create file: {}", project_path.path.display()), + format!("Create file: {}", project_path.path.display(path_style)), vec![], String::from(""), vec![], ), }; - if file_name_positions.is_empty() - && let Some(user_home_path) = std::env::var("HOME").ok() - { - let user_home_path = user_home_path.trim(); - if !user_home_path.is_empty() && full_path.starts_with(user_home_path) { + if file_name_positions.is_empty() { + let user_home_path = util::paths::home_dir().to_string_lossy(); + if !user_home_path.is_empty() && full_path.starts_with(&*user_home_path) { full_path.replace_range(0..user_home_path.len(), "~"); full_path_positions.retain_mut(|pos| { if *pos >= user_home_path.len() { @@ -1147,17 +1122,13 @@ impl FileFinderDelegate { fn labels_for_path_match( &self, path_match: &PathMatch, + path_style: PathStyle, ) -> (String, Vec, String, Vec) { - let path = &path_match.path; - let path_string = path.to_string_lossy(); - let full_path = [path_match.path_prefix.as_ref(), path_string.as_ref()].join(""); + let full_path = path_match.path_prefix.join(&path_match.path); let mut path_positions = path_match.positions.clone(); - let file_name = path.file_name().map_or_else( - || path_match.path_prefix.to_string(), - |file_name| file_name.to_string_lossy().to_string(), - ); - let file_name_start = path_match.path_prefix.len() + path_string.len() - file_name.len(); + let file_name = full_path.file_name().unwrap_or(""); + let file_name_start = full_path.as_str().len() - file_name.len(); let file_name_positions = path_positions .iter() .filter_map(|pos| { @@ -1167,12 +1138,33 @@ impl FileFinderDelegate { None } }) - .collect(); + .collect::>(); - let full_path = full_path.trim_end_matches(&file_name).to_string(); + let full_path = full_path + .display(path_style) + .trim_end_matches(&file_name) + .to_string(); path_positions.retain(|idx| *idx < full_path.len()); - (file_name, file_name_positions, full_path, path_positions) + debug_assert!( + file_name_positions + .iter() + .all(|ix| file_name[*ix..].chars().next().is_some()), + "invalid file name positions {file_name:?} {file_name_positions:?}" + ); + debug_assert!( + path_positions + .iter() + .all(|ix| full_path[*ix..].chars().next().is_some()), + "invalid path positions {full_path:?} {path_positions:?}" + ); + + ( + file_name.to_string(), + file_name_positions, + full_path, + path_positions, + ) } fn lookup_absolute_path( @@ -1210,8 +1202,8 @@ impl FileFinderDelegate { score: 1.0, positions: Vec::new(), worktree_id: worktree.read(cx).id().to_usize(), - path: Arc::from(relative_path), - path_prefix: "".into(), + path: relative_path, + path_prefix: RelPath::empty().into(), is_dir: false, // File finder doesn't support directories distance_to_relative_ancestor: usize::MAX, })); @@ -1333,7 +1325,7 @@ impl PickerDelegate for FileFinderDelegate { .all(|worktree| { worktree .read(cx) - .entry_for_path(Path::new("a")) + .entry_for_path(RelPath::new("a").unwrap()) .is_none_or(|entry| !entry.is_dir()) }) { @@ -1351,7 +1343,7 @@ impl PickerDelegate for FileFinderDelegate { .all(|worktree| { worktree .read(cx) - .entry_for_path(Path::new("b")) + .entry_for_path(RelPath::new("b").unwrap()) .is_none_or(|entry| !entry.is_dir()) }) { @@ -1381,8 +1373,8 @@ impl PickerDelegate for FileFinderDelegate { project .worktree_for_id(history_item.project.worktree_id, cx) .is_some() - || ((project.is_local() || project.is_via_remote_server()) - && history_item.absolute.is_some()) + || project.is_local() + || project.is_via_remote_server() }), self.currently_opened_path.as_ref(), None, @@ -1397,13 +1389,7 @@ impl PickerDelegate for FileFinderDelegate { Task::ready(()) } else { let path_position = PathWithPosition::parse_str(raw_query); - - #[cfg(windows)] - let raw_query = raw_query.trim().to_owned().replace("/", "\\"); - #[cfg(not(windows))] - let raw_query = raw_query.trim(); - - let raw_query = raw_query.trim_end_matches(':').to_owned(); + let raw_query = raw_query.trim().trim_end_matches(':').to_owned(); let path = path_position.path.to_str(); let path_trimmed = path.unwrap_or(&raw_query).trim_end_matches(':'); let file_query_end = if path_trimmed == raw_query { @@ -1505,38 +1491,18 @@ impl PickerDelegate for FileFinderDelegate { window, cx, ) + } else if secondary { + workspace.split_abs_path(path.absolute.clone(), false, window, cx) } else { - match path.absolute.as_ref() { - Some(abs_path) => { - if secondary { - workspace.split_abs_path( - abs_path.to_path_buf(), - false, - window, - cx, - ) - } else { - workspace.open_abs_path( - abs_path.to_path_buf(), - OpenOptions { - visible: Some(OpenVisible::None), - ..Default::default() - }, - window, - cx, - ) - } - } - None => split_or_open( - workspace, - ProjectPath { - worktree_id, - path: Arc::clone(&path.project.path), - }, - window, - cx, - ), - } + workspace.open_abs_path( + path.absolute.clone(), + OpenOptions { + visible: Some(OpenVisible::None), + ..Default::default() + }, + window, + cx, + ) } } Match::Search(m) => split_or_open( @@ -1615,7 +1581,7 @@ impl PickerDelegate for FileFinderDelegate { .size(IconSize::Small) .into_any_element(), }; - let (file_name_label, full_path_label) = self.labels_for_match(path_match, window, cx, ix); + let (file_name_label, full_path_label) = self.labels_for_match(path_match, window, cx); let file_icon = maybe!({ if !settings.file_icons { diff --git a/crates/file_finder/src/file_finder_tests.rs b/crates/file_finder/src/file_finder_tests.rs index cd0f203d6a300b4039df74a646bf0a9d56818347..75b2101101bcdddf4112f6ea1f3d864f71924aa2 100644 --- a/crates/file_finder/src/file_finder_tests.rs +++ b/crates/file_finder/src/file_finder_tests.rs @@ -4,10 +4,10 @@ use super::*; use editor::Editor; use gpui::{Entity, TestAppContext, VisualTestContext}; use menu::{Confirm, SelectNext, SelectPrevious}; -use pretty_assertions::assert_eq; +use pretty_assertions::{assert_eq, assert_matches}; use project::{FS_WATCH_LATENCY, RemoveOptions}; use serde_json::json; -use util::path; +use util::{path, rel_path::rel_path}; use workspace::{AppState, CloseActiveItem, OpenOptions, ToggleFileFinder, Workspace}; #[ctor::ctor] @@ -77,8 +77,8 @@ fn test_custom_project_search_ordering_in_file_finder() { score: 0.5, positions: Vec::new(), worktree_id: 0, - path: Arc::from(Path::new("b0.5")), - path_prefix: Arc::default(), + path: rel_path("b0.5").into(), + path_prefix: rel_path("").into(), distance_to_relative_ancestor: 0, is_dir: false, }), @@ -86,8 +86,8 @@ fn test_custom_project_search_ordering_in_file_finder() { score: 1.0, positions: Vec::new(), worktree_id: 0, - path: Arc::from(Path::new("c1.0")), - path_prefix: Arc::default(), + path: rel_path("c1.0").into(), + path_prefix: rel_path("").into(), distance_to_relative_ancestor: 0, is_dir: false, }), @@ -95,8 +95,8 @@ fn test_custom_project_search_ordering_in_file_finder() { score: 1.0, positions: Vec::new(), worktree_id: 0, - path: Arc::from(Path::new("a1.0")), - path_prefix: Arc::default(), + path: rel_path("a1.0").into(), + path_prefix: rel_path("").into(), distance_to_relative_ancestor: 0, is_dir: false, }), @@ -104,8 +104,8 @@ fn test_custom_project_search_ordering_in_file_finder() { score: 0.5, positions: Vec::new(), worktree_id: 0, - path: Arc::from(Path::new("a0.5")), - path_prefix: Arc::default(), + path: rel_path("a0.5").into(), + path_prefix: rel_path("").into(), distance_to_relative_ancestor: 0, is_dir: false, }), @@ -113,8 +113,8 @@ fn test_custom_project_search_ordering_in_file_finder() { score: 1.0, positions: Vec::new(), worktree_id: 0, - path: Arc::from(Path::new("b1.0")), - path_prefix: Arc::default(), + path: rel_path("b1.0").into(), + path_prefix: rel_path("").into(), distance_to_relative_ancestor: 0, is_dir: false, }), @@ -128,8 +128,8 @@ fn test_custom_project_search_ordering_in_file_finder() { score: 1.0, positions: Vec::new(), worktree_id: 0, - path: Arc::from(Path::new("a1.0")), - path_prefix: Arc::default(), + path: rel_path("a1.0").into(), + path_prefix: rel_path("").into(), distance_to_relative_ancestor: 0, is_dir: false, }), @@ -137,8 +137,8 @@ fn test_custom_project_search_ordering_in_file_finder() { score: 1.0, positions: Vec::new(), worktree_id: 0, - path: Arc::from(Path::new("b1.0")), - path_prefix: Arc::default(), + path: rel_path("b1.0").into(), + path_prefix: rel_path("").into(), distance_to_relative_ancestor: 0, is_dir: false, }), @@ -146,8 +146,8 @@ fn test_custom_project_search_ordering_in_file_finder() { score: 1.0, positions: Vec::new(), worktree_id: 0, - path: Arc::from(Path::new("c1.0")), - path_prefix: Arc::default(), + path: rel_path("c1.0").into(), + path_prefix: rel_path("").into(), distance_to_relative_ancestor: 0, is_dir: false, }), @@ -155,8 +155,8 @@ fn test_custom_project_search_ordering_in_file_finder() { score: 0.5, positions: Vec::new(), worktree_id: 0, - path: Arc::from(Path::new("a0.5")), - path_prefix: Arc::default(), + path: rel_path("a0.5").into(), + path_prefix: rel_path("").into(), distance_to_relative_ancestor: 0, is_dir: false, }), @@ -164,8 +164,8 @@ fn test_custom_project_search_ordering_in_file_finder() { score: 0.5, positions: Vec::new(), worktree_id: 0, - path: Arc::from(Path::new("b0.5")), - path_prefix: Arc::default(), + path: rel_path("b0.5").into(), + path_prefix: rel_path("").into(), distance_to_relative_ancestor: 0, is_dir: false, }), @@ -366,7 +366,7 @@ async fn test_absolute_paths(cx: &mut TestAppContext) { picker.update(cx, |picker, _| { assert_eq!( collect_search_matches(picker).search_paths_only(), - vec![PathBuf::from("a/b/file2.txt")], + vec![rel_path("a/b/file2.txt").into()], "Matching abs path should be the only match" ) }); @@ -388,7 +388,7 @@ async fn test_absolute_paths(cx: &mut TestAppContext) { picker.update(cx, |picker, _| { assert_eq!( collect_search_matches(picker).search_paths_only(), - Vec::::new(), + Vec::new(), "Mismatching abs path should produce no matches" ) }); @@ -421,7 +421,7 @@ async fn test_complex_path(cx: &mut TestAppContext) { assert_eq!(picker.delegate.matches.len(), 2); assert_eq!( collect_search_matches(picker).search_paths_only(), - vec![PathBuf::from("其他/S数据表格/task.xlsx")], + vec![rel_path("其他/S数据表格/task.xlsx").into()], ) }); cx.dispatch_action(Confirm); @@ -713,13 +713,13 @@ async fn test_ignored_root(cx: &mut TestAppContext) { assert_eq!( matches.search, vec![ - PathBuf::from("ignored-root/hi"), - PathBuf::from("tracked-root/hi"), - PathBuf::from("ignored-root/hiccup"), - PathBuf::from("tracked-root/hiccup"), - PathBuf::from("ignored-root/height"), - PathBuf::from("ignored-root/happiness"), - PathBuf::from("tracked-root/happiness"), + rel_path("ignored-root/hi").into(), + rel_path("tracked-root/hi").into(), + rel_path("ignored-root/hiccup").into(), + rel_path("tracked-root/hiccup").into(), + rel_path("ignored-root/height").into(), + rel_path("ignored-root/happiness").into(), + rel_path("tracked-root/happiness").into(), ], "All ignored files that were indexed are found for default ignored mode" ); @@ -738,14 +738,14 @@ async fn test_ignored_root(cx: &mut TestAppContext) { assert_eq!( matches.search, vec![ - PathBuf::from("ignored-root/hi"), - PathBuf::from("tracked-root/hi"), - PathBuf::from("ignored-root/hiccup"), - PathBuf::from("tracked-root/hiccup"), - PathBuf::from("ignored-root/height"), - PathBuf::from("tracked-root/height"), - PathBuf::from("ignored-root/happiness"), - PathBuf::from("tracked-root/happiness"), + rel_path("ignored-root/hi").into(), + rel_path("tracked-root/hi").into(), + rel_path("ignored-root/hiccup").into(), + rel_path("tracked-root/hiccup").into(), + rel_path("ignored-root/height").into(), + rel_path("tracked-root/height").into(), + rel_path("ignored-root/happiness").into(), + rel_path("tracked-root/happiness").into(), ], "All ignored files should be found, for the toggled on ignored mode" ); @@ -765,9 +765,9 @@ async fn test_ignored_root(cx: &mut TestAppContext) { assert_eq!( matches.search, vec![ - PathBuf::from("tracked-root/hi"), - PathBuf::from("tracked-root/hiccup"), - PathBuf::from("tracked-root/happiness"), + rel_path("tracked-root/hi").into(), + rel_path("tracked-root/hiccup").into(), + rel_path("tracked-root/happiness").into(), ], "Only non-ignored files should be found for the turned off ignored mode" ); @@ -812,13 +812,13 @@ async fn test_ignored_root(cx: &mut TestAppContext) { assert_eq!( matches.search, vec![ - PathBuf::from("ignored-root/hi"), - PathBuf::from("tracked-root/hi"), - PathBuf::from("ignored-root/hiccup"), - PathBuf::from("tracked-root/hiccup"), - PathBuf::from("ignored-root/height"), - PathBuf::from("ignored-root/happiness"), - PathBuf::from("tracked-root/happiness"), + rel_path("ignored-root/hi").into(), + rel_path("tracked-root/hi").into(), + rel_path("ignored-root/hiccup").into(), + rel_path("tracked-root/hiccup").into(), + rel_path("ignored-root/height").into(), + rel_path("ignored-root/happiness").into(), + rel_path("tracked-root/happiness").into(), ], "Only for the worktree with the ignored root, all indexed ignored files are found in the auto ignored mode" ); @@ -838,16 +838,16 @@ async fn test_ignored_root(cx: &mut TestAppContext) { assert_eq!( matches.search, vec![ - PathBuf::from("ignored-root/hi"), - PathBuf::from("tracked-root/hi"), - PathBuf::from("ignored-root/hiccup"), - PathBuf::from("tracked-root/hiccup"), - PathBuf::from("ignored-root/height"), - PathBuf::from("tracked-root/height"), - PathBuf::from("tracked-root/heights/height_1"), - PathBuf::from("tracked-root/heights/height_2"), - PathBuf::from("ignored-root/happiness"), - PathBuf::from("tracked-root/happiness"), + rel_path("ignored-root/hi").into(), + rel_path("tracked-root/hi").into(), + rel_path("ignored-root/hiccup").into(), + rel_path("tracked-root/hiccup").into(), + rel_path("ignored-root/height").into(), + rel_path("tracked-root/height").into(), + rel_path("tracked-root/heights/height_1").into(), + rel_path("tracked-root/heights/height_2").into(), + rel_path("ignored-root/happiness").into(), + rel_path("tracked-root/happiness").into(), ], "All ignored files that were indexed are found in the turned on ignored mode" ); @@ -867,9 +867,9 @@ async fn test_ignored_root(cx: &mut TestAppContext) { assert_eq!( matches.search, vec![ - PathBuf::from("tracked-root/hi"), - PathBuf::from("tracked-root/hiccup"), - PathBuf::from("tracked-root/happiness"), + rel_path("tracked-root/hi").into(), + rel_path("tracked-root/hiccup").into(), + rel_path("tracked-root/happiness").into(), ], "Only non-ignored files should be found for the turned off ignored mode" ); @@ -910,7 +910,7 @@ async fn test_single_file_worktrees(cx: &mut TestAppContext) { assert_eq!(matches.len(), 1); let (file_name, file_name_positions, full_path, full_path_positions) = - delegate.labels_for_path_match(&matches[0]); + delegate.labels_for_path_match(&matches[0], PathStyle::local()); assert_eq!(file_name, "the-file"); assert_eq!(file_name_positions, &[0, 1, 4]); assert_eq!(full_path, ""); @@ -968,7 +968,7 @@ async fn test_create_file_for_multiple_worktrees(cx: &mut TestAppContext) { let b_path = ProjectPath { worktree_id: worktree_id2, - path: Arc::from(Path::new(path!("the-parent-dirb/fileb"))), + path: rel_path("the-parent-dirb/fileb").into(), }; workspace .update_in(cx, |workspace, window, cx| { @@ -1001,7 +1001,7 @@ async fn test_create_file_for_multiple_worktrees(cx: &mut TestAppContext) { project_path, Some(ProjectPath { worktree_id: worktree_id2, - path: Arc::from(Path::new(path!("the-parent-dirb/filec"))) + path: rel_path("the-parent-dirb/filec").into() }) ); }); @@ -1038,10 +1038,7 @@ async fn test_create_file_no_focused_with_multiple_worktrees(cx: &mut TestAppCon let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); let (_worktree_id1, worktree_id2) = cx.read(|cx| { let worktrees = workspace.read(cx).worktrees(cx).collect::>(); - ( - WorktreeId::from_usize(worktrees[0].entity_id().as_u64() as usize), - WorktreeId::from_usize(worktrees[1].entity_id().as_u64() as usize), - ) + (worktrees[0].read(cx).id(), worktrees[1].read(cx).id()) }); let finder = open_file_picker(&workspace, cx); @@ -1065,7 +1062,7 @@ async fn test_create_file_no_focused_with_multiple_worktrees(cx: &mut TestAppCon project_path, Some(ProjectPath { worktree_id: worktree_id2, - path: Arc::from(Path::new("filec")) + path: rel_path("filec").into() }) ); }); @@ -1103,7 +1100,7 @@ async fn test_path_distance_ordering(cx: &mut TestAppContext) { // so that one should be sorted earlier let b_path = ProjectPath { worktree_id, - path: Arc::from(Path::new("dir2/b.txt")), + path: rel_path("dir2/b.txt").into(), }; workspace .update_in(cx, |workspace, window, cx| { @@ -1121,8 +1118,8 @@ async fn test_path_distance_ordering(cx: &mut TestAppContext) { finder.update(cx, |picker, _| { let matches = collect_search_matches(picker).search_paths_only(); - assert_eq!(matches[0].as_path(), Path::new("dir2/a.txt")); - assert_eq!(matches[1].as_path(), Path::new("dir1/a.txt")); + assert_eq!(matches[0].as_ref(), rel_path("dir2/a.txt")); + assert_eq!(matches[1].as_ref(), rel_path("dir1/a.txt")); }); } @@ -1207,9 +1204,9 @@ async fn test_query_history(cx: &mut gpui::TestAppContext) { vec![FoundPath::new( ProjectPath { worktree_id, - path: Arc::from(Path::new("test/first.rs")), + path: rel_path("test/first.rs").into(), }, - Some(PathBuf::from(path!("/src/test/first.rs"))) + PathBuf::from(path!("/src/test/first.rs")) )], "Should show 1st opened item in the history when opening the 2nd item" ); @@ -1222,16 +1219,16 @@ async fn test_query_history(cx: &mut gpui::TestAppContext) { FoundPath::new( ProjectPath { worktree_id, - path: Arc::from(Path::new("test/second.rs")), + path: rel_path("test/second.rs").into(), }, - Some(PathBuf::from(path!("/src/test/second.rs"))) + PathBuf::from(path!("/src/test/second.rs")) ), FoundPath::new( ProjectPath { worktree_id, - path: Arc::from(Path::new("test/first.rs")), + path: rel_path("test/first.rs").into(), }, - Some(PathBuf::from(path!("/src/test/first.rs"))) + PathBuf::from(path!("/src/test/first.rs")) ), ], "Should show 1st and 2nd opened items in the history when opening the 3rd item. \ @@ -1246,23 +1243,23 @@ async fn test_query_history(cx: &mut gpui::TestAppContext) { FoundPath::new( ProjectPath { worktree_id, - path: Arc::from(Path::new("test/third.rs")), + path: rel_path("test/third.rs").into(), }, - Some(PathBuf::from(path!("/src/test/third.rs"))) + PathBuf::from(path!("/src/test/third.rs")) ), FoundPath::new( ProjectPath { worktree_id, - path: Arc::from(Path::new("test/second.rs")), + path: rel_path("test/second.rs").into(), }, - Some(PathBuf::from(path!("/src/test/second.rs"))) + PathBuf::from(path!("/src/test/second.rs")) ), FoundPath::new( ProjectPath { worktree_id, - path: Arc::from(Path::new("test/first.rs")), + path: rel_path("test/first.rs").into(), }, - Some(PathBuf::from(path!("/src/test/first.rs"))) + PathBuf::from(path!("/src/test/first.rs")) ), ], "Should show 1st, 2nd and 3rd opened items in the history when opening the 2nd item again. \ @@ -1277,23 +1274,23 @@ async fn test_query_history(cx: &mut gpui::TestAppContext) { FoundPath::new( ProjectPath { worktree_id, - path: Arc::from(Path::new("test/second.rs")), + path: rel_path("test/second.rs").into(), }, - Some(PathBuf::from(path!("/src/test/second.rs"))) + PathBuf::from(path!("/src/test/second.rs")) ), FoundPath::new( ProjectPath { worktree_id, - path: Arc::from(Path::new("test/third.rs")), + path: rel_path("test/third.rs").into(), }, - Some(PathBuf::from(path!("/src/test/third.rs"))) + PathBuf::from(path!("/src/test/third.rs")) ), FoundPath::new( ProjectPath { worktree_id, - path: Arc::from(Path::new("test/first.rs")), + path: rel_path("test/first.rs").into(), }, - Some(PathBuf::from(path!("/src/test/first.rs"))) + PathBuf::from(path!("/src/test/first.rs")) ), ], "Should show 1st, 2nd and 3rd opened items in the history when opening the 3rd item again. \ @@ -1301,6 +1298,62 @@ async fn test_query_history(cx: &mut gpui::TestAppContext) { ); } +#[gpui::test] +async fn test_history_match_positions(cx: &mut gpui::TestAppContext) { + let app_state = init_test(cx); + + app_state + .fs + .as_fake() + .insert_tree( + path!("/src"), + json!({ + "test": { + "first.rs": "// First Rust file", + "second.rs": "// Second Rust file", + "third.rs": "// Third Rust file", + } + }), + ) + .await; + + let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await; + let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + + workspace.update_in(cx, |_workspace, window, cx| window.focused(cx)); + + open_close_queried_buffer("efir", 1, "first.rs", &workspace, cx).await; + let history = open_close_queried_buffer("second", 1, "second.rs", &workspace, cx).await; + assert_eq!(history.len(), 1); + + let picker = open_file_picker(&workspace, cx); + cx.simulate_input("fir"); + picker.update_in(cx, |finder, window, cx| { + let matches = &finder.delegate.matches.matches; + assert_matches!( + matches.as_slice(), + [Match::History { .. }, Match::CreateNew { .. }] + ); + assert_eq!( + matches[0].panel_match().unwrap().0.path.as_ref(), + rel_path("test/first.rs") + ); + assert_eq!(matches[0].panel_match().unwrap().0.positions, &[5, 6, 7]); + + let (file_label, path_label) = + finder + .delegate + .labels_for_match(&finder.delegate.matches.matches[0], window, cx); + assert_eq!(file_label.text(), "first.rs"); + assert_eq!(file_label.highlight_indices(), &[0, 1, 2]); + assert_eq!( + path_label.text(), + format!("test{}", PathStyle::local().separator()) + ); + assert_eq!(path_label.highlight_indices(), &[] as &[usize]); + }); +} + #[gpui::test] async fn test_external_files_history(cx: &mut gpui::TestAppContext) { let app_state = init_test(cx); @@ -1392,9 +1445,9 @@ async fn test_external_files_history(cx: &mut gpui::TestAppContext) { vec![FoundPath::new( ProjectPath { worktree_id: external_worktree_id, - path: Arc::from(Path::new("")), + path: rel_path("").into(), }, - Some(PathBuf::from(path!("/external-src/test/third.rs"))) + PathBuf::from(path!("/external-src/test/third.rs")) )], "Should show external file with its full path in the history after it was open" ); @@ -1407,16 +1460,16 @@ async fn test_external_files_history(cx: &mut gpui::TestAppContext) { FoundPath::new( ProjectPath { worktree_id, - path: Arc::from(Path::new("test/second.rs")), + path: rel_path("test/second.rs").into(), }, - Some(PathBuf::from(path!("/src/test/second.rs"))) + PathBuf::from(path!("/src/test/second.rs")) ), FoundPath::new( ProjectPath { worktree_id: external_worktree_id, - path: Arc::from(Path::new("")), + path: rel_path("").into(), }, - Some(PathBuf::from(path!("/external-src/test/third.rs"))) + PathBuf::from(path!("/external-src/test/third.rs")) ), ], "Should keep external file with history updates", @@ -1529,12 +1582,12 @@ async fn test_search_preserves_history_items(cx: &mut gpui::TestAppContext) { assert_eq!(history_match, &FoundPath::new( ProjectPath { worktree_id, - path: Arc::from(Path::new("test/first.rs")), + path: rel_path("test/first.rs").into(), }, - Some(PathBuf::from(path!("/src/test/first.rs"))) + PathBuf::from(path!("/src/test/first.rs")), )); assert_eq!(matches.search.len(), 1, "Only one non-history item contains {first_query}, it should be present"); - assert_eq!(matches.search.first().unwrap(), Path::new("test/fourth.rs")); + assert_eq!(matches.search.first().unwrap().as_ref(), rel_path("test/fourth.rs")); }); let second_query = "fsdasdsa"; @@ -1572,12 +1625,12 @@ async fn test_search_preserves_history_items(cx: &mut gpui::TestAppContext) { assert_eq!(history_match, &FoundPath::new( ProjectPath { worktree_id, - path: Arc::from(Path::new("test/first.rs")), + path: rel_path("test/first.rs").into(), }, - Some(PathBuf::from(path!("/src/test/first.rs"))) + PathBuf::from(path!("/src/test/first.rs")) )); assert_eq!(matches.search.len(), 1, "Only one non-history item contains {first_query_again}, it should be present, even after non-matching query"); - assert_eq!(matches.search.first().unwrap(), Path::new("test/fourth.rs")); + assert_eq!(matches.search.first().unwrap().as_ref(), rel_path("test/fourth.rs")); }); } @@ -1626,13 +1679,16 @@ async fn test_search_sorts_history_items(cx: &mut gpui::TestAppContext) { let search_matches = collect_search_matches(finder); assert_eq!( search_matches.history, - vec![PathBuf::from("test/1_qw"), PathBuf::from("test/6_qwqwqw"),], + vec![ + rel_path("test/1_qw").into(), + rel_path("test/6_qwqwqw").into() + ], ); assert_eq!( search_matches.search, vec![ - PathBuf::from("test/5_qwqwqw"), - PathBuf::from("test/7_qwqwqw"), + rel_path("test/5_qwqwqw").into(), + rel_path("test/7_qwqwqw").into() ], ); }); @@ -2083,10 +2139,10 @@ async fn test_history_items_vs_very_good_external_match(cx: &mut gpui::TestAppCo assert_eq!( search_entries, vec![ - PathBuf::from("collab_ui/collab_ui.rs"), - PathBuf::from("collab_ui/first.rs"), - PathBuf::from("collab_ui/third.rs"), - PathBuf::from("collab_ui/second.rs"), + rel_path("collab_ui/collab_ui.rs").into(), + rel_path("collab_ui/first.rs").into(), + rel_path("collab_ui/third.rs").into(), + rel_path("collab_ui/second.rs").into(), ], "Despite all search results having the same directory name, the most matching one should be on top" ); @@ -2135,8 +2191,8 @@ async fn test_nonexistent_history_items_not_shown(cx: &mut gpui::TestAppContext) assert_eq!( collect_search_matches(picker).history, vec![ - PathBuf::from("test/first.rs"), - PathBuf::from("test/third.rs"), + rel_path("test/first.rs").into(), + rel_path("test/third.rs").into(), ], "Should have all opened files in the history, except the ones that do not exist on disk" ); @@ -2766,15 +2822,15 @@ fn active_file_picker( #[derive(Debug, Default)] struct SearchEntries { - history: Vec, + history: Vec>, history_found_paths: Vec, - search: Vec, + search: Vec>, search_matches: Vec, } impl SearchEntries { #[track_caller] - fn search_paths_only(self) -> Vec { + fn search_paths_only(self) -> Vec> { assert!( self.history.is_empty(), "Should have no history matches, but got: {:?}", @@ -2802,20 +2858,15 @@ fn collect_search_matches(picker: &Picker) -> SearchEntries path: history_path, panel_match: path_match, } => { - search_entries.history.push( - path_match - .as_ref() - .map(|path_match| { - Path::new(path_match.0.path_prefix.as_ref()).join(&path_match.0.path) - }) - .unwrap_or_else(|| { - history_path - .absolute - .as_deref() - .unwrap_or_else(|| &history_path.project.path) - .to_path_buf() - }), - ); + if let Some(path_match) = path_match.as_ref() { + search_entries + .history + .push(path_match.0.path_prefix.join(&path_match.0.path)); + } else { + // This occurs when the query is empty and we show history matches + // that are outside the project. + panic!("currently not exercised in tests"); + } search_entries .history_found_paths .push(history_path.clone()); @@ -2823,7 +2874,7 @@ fn collect_search_matches(picker: &Picker) -> SearchEntries Match::Search(path_match) => { search_entries .search - .push(Path::new(path_match.0.path_prefix.as_ref()).join(&path_match.0.path)); + .push(path_match.0.path_prefix.join(&path_match.0.path)); search_entries.search_matches.push(path_match.0.clone()); } Match::CreateNew(_) => {} @@ -2858,12 +2909,11 @@ fn assert_match_at_position( .get(match_index) .unwrap_or_else(|| panic!("Finder has no match for index {match_index}")); let match_file_name = match &match_item { - Match::History { path, .. } => path.absolute.as_deref().unwrap().file_name(), + Match::History { path, .. } => path.absolute.file_name().and_then(|s| s.to_str()), Match::Search(path_match) => path_match.0.path.file_name(), Match::CreateNew(project_path) => project_path.path.file_name(), } - .unwrap() - .to_string_lossy(); + .unwrap(); assert_eq!(match_file_name, expected_file_name); } @@ -2901,11 +2951,11 @@ async fn test_filename_precedence(cx: &mut TestAppContext) { assert_eq!( search_matches, vec![ - PathBuf::from("routes/+layout.svelte"), - PathBuf::from("layout/app.css"), - PathBuf::from("layout/app.d.ts"), - PathBuf::from("layout/app.html"), - PathBuf::from("layout/+page.svelte"), + rel_path("routes/+layout.svelte").into(), + rel_path("layout/app.css").into(), + rel_path("layout/app.d.ts").into(), + rel_path("layout/app.html").into(), + rel_path("layout/+page.svelte").into(), ], "File with 'layout' in filename should be prioritized over files in 'layout' directory" ); diff --git a/crates/file_finder/src/open_path_prompt.rs b/crates/file_finder/src/open_path_prompt.rs index 63f2f37ab18cbd1ab210685eeda01ac535a8118f..b0417b1d13fc4f82c8b16b0ac87249405b6f4129 100644 --- a/crates/file_finder/src/open_path_prompt.rs +++ b/crates/file_finder/src/open_path_prompt.rs @@ -7,7 +7,7 @@ use picker::{Picker, PickerDelegate}; use project::{DirectoryItem, DirectoryLister}; use settings::Settings; use std::{ - path::{self, MAIN_SEPARATOR_STR, Path, PathBuf}, + path::{self, Path, PathBuf}, sync::{ Arc, atomic::{self, AtomicBool}, @@ -217,7 +217,7 @@ impl OpenPathPrompt { ) { workspace.toggle_modal(window, cx, |window, cx| { let delegate = - OpenPathDelegate::new(tx, lister.clone(), creating_path, PathStyle::current()); + OpenPathDelegate::new(tx, lister.clone(), creating_path, PathStyle::local()); let picker = Picker::uniform_list(delegate, window, cx).width(rems(34.)); let query = lister.default_query(cx); picker.set_query(query, window, cx); @@ -822,7 +822,7 @@ impl PickerDelegate for OpenPathDelegate { } fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { - Arc::from(format!("[directory{MAIN_SEPARATOR_STR}]filename.ext")) + Arc::from(format!("[directory{}]filename.ext", self.path_style.separator()).as_str()) } fn separators_after_indices(&self) -> Vec { diff --git a/crates/file_finder/src/open_path_prompt_tests.rs b/crates/file_finder/src/open_path_prompt_tests.rs index fd7cc1c6c612d28b1cc8f2352f6dbb0a254e7e98..5e8874cd01e06bb05f4ff6918bc02ea6883ea064 100644 --- a/crates/file_finder/src/open_path_prompt_tests.rs +++ b/crates/file_finder/src/open_path_prompt_tests.rs @@ -37,7 +37,7 @@ async fn test_open_path_prompt(cx: &mut TestAppContext) { let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; - let (picker, cx) = build_open_path_prompt(project, false, PathStyle::current(), cx); + let (picker, cx) = build_open_path_prompt(project, false, PathStyle::local(), cx); insert_query(path!("sadjaoislkdjasldj"), &picker, cx).await; assert_eq!(collect_match_candidates(&picker, cx), Vec::::new()); @@ -119,7 +119,7 @@ async fn test_open_path_prompt_completion(cx: &mut TestAppContext) { let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; - let (picker, cx) = build_open_path_prompt(project, false, PathStyle::current(), cx); + let (picker, cx) = build_open_path_prompt(project, false, PathStyle::local(), cx); // Confirm completion for the query "/root", since it's a directory, it should add a trailing slash. let query = path!("/root"); @@ -227,7 +227,7 @@ async fn test_open_path_prompt_on_windows(cx: &mut TestAppContext) { let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; - let (picker, cx) = build_open_path_prompt(project, false, PathStyle::current(), cx); + let (picker, cx) = build_open_path_prompt(project, false, PathStyle::local(), cx); // Support both forward and backward slashes. let query = "C:/root/"; @@ -372,7 +372,7 @@ async fn test_new_path_prompt(cx: &mut TestAppContext) { let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; - let (picker, cx) = build_open_path_prompt(project, true, PathStyle::current(), cx); + let (picker, cx) = build_open_path_prompt(project, true, PathStyle::local(), cx); insert_query(path!("/root"), &picker, cx).await; assert_eq!(collect_match_candidates(&picker, cx), vec!["root"]); diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index b608d0fec65a80057445fb3598102297f445ad4f..940210a7105a38baf472eafc4638f955b4acc6ae 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -17,6 +17,7 @@ use parking_lot::Mutex; use rope::Rope; use smol::future::FutureExt as _; use std::{path::PathBuf, sync::Arc}; +use util::{paths::PathStyle, rel_path::RelPath}; #[derive(Clone)] pub struct FakeGitRepository { @@ -82,7 +83,7 @@ impl GitRepository for FakeGitRepository { self.with_state_async(false, move |state| { state .index_contents - .get(path.as_ref()) + .get(&path) .context("not present in index") .cloned() }) @@ -97,7 +98,7 @@ impl GitRepository for FakeGitRepository { self.with_state_async(false, move |state| { state .head_contents - .get(path.as_ref()) + .get(&path) .context("not present in HEAD") .cloned() }) @@ -225,6 +226,7 @@ impl GitRepository for FakeGitRepository { .read_file_sync(path) .ok() .map(|content| String::from_utf8(content).unwrap())?; + let repo_path = RelPath::from_std_path(repo_path, PathStyle::local()).ok()?; Some((repo_path.into(), (content, is_ignored))) }) .collect(); @@ -386,7 +388,11 @@ impl GitRepository for FakeGitRepository { let contents = paths .into_iter() .map(|path| { - let abs_path = self.dot_git_path.parent().unwrap().join(&path); + let abs_path = self + .dot_git_path + .parent() + .unwrap() + .join(&path.as_std_path()); Box::pin(async move { (path.clone(), self.fs.load(&abs_path).await.ok()) }) }) .collect::>(); diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 198299617619363fa9d486042d1b803c3ede6f88..a1ee23cf5f33ea20d479d534d87596701dac1a16 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -47,7 +47,7 @@ use collections::{BTreeMap, btree_map}; use fake_git_repo::FakeGitRepositoryState; #[cfg(any(test, feature = "test-support"))] use git::{ - repository::RepoPath, + repository::{RepoPath, repo_path}, status::{FileStatus, StatusCode, TrackedStatus, UnmergedStatus}, }; #[cfg(any(test, feature = "test-support"))] @@ -1608,13 +1608,13 @@ impl FakeFs { .unwrap(); } - pub fn set_index_for_repo(&self, dot_git: &Path, index_state: &[(RepoPath, String)]) { + pub fn set_index_for_repo(&self, dot_git: &Path, index_state: &[(&str, String)]) { self.with_git_state(dot_git, true, |state| { state.index_contents.clear(); state.index_contents.extend( index_state .iter() - .map(|(path, content)| (path.clone(), content.clone())), + .map(|(path, content)| (repo_path(path), content.clone())), ); }) .unwrap(); @@ -1623,7 +1623,7 @@ impl FakeFs { pub fn set_head_for_repo( &self, dot_git: &Path, - head_state: &[(RepoPath, String)], + head_state: &[(&str, String)], sha: impl Into, ) { self.with_git_state(dot_git, true, |state| { @@ -1631,50 +1631,22 @@ impl FakeFs { state.head_contents.extend( head_state .iter() - .map(|(path, content)| (path.clone(), content.clone())), + .map(|(path, content)| (repo_path(path), content.clone())), ); state.refs.insert("HEAD".into(), sha.into()); }) .unwrap(); } - pub fn set_git_content_for_repo( - &self, - dot_git: &Path, - head_state: &[(RepoPath, String, Option)], - ) { + pub fn set_head_and_index_for_repo(&self, dot_git: &Path, contents_by_path: &[(&str, String)]) { self.with_git_state(dot_git, true, |state| { state.head_contents.clear(); state.head_contents.extend( - head_state + contents_by_path .iter() - .map(|(path, head_content, _)| (path.clone(), head_content.clone())), + .map(|(path, contents)| (repo_path(path), contents.clone())), ); - state.index_contents.clear(); - state.index_contents.extend(head_state.iter().map( - |(path, head_content, index_content)| { - ( - path.clone(), - index_content.as_ref().unwrap_or(head_content).clone(), - ) - }, - )); - }) - .unwrap(); - } - - pub fn set_head_and_index_for_repo( - &self, - dot_git: &Path, - contents_by_path: &[(RepoPath, String)], - ) { - self.with_git_state(dot_git, true, |state| { - state.head_contents.clear(); - state.index_contents.clear(); - state.head_contents.extend(contents_by_path.iter().cloned()); - state - .index_contents - .extend(contents_by_path.iter().cloned()); + state.index_contents = state.head_contents.clone(); }) .unwrap(); } @@ -1689,7 +1661,7 @@ impl FakeFs { /// Put the given git repository into a state with the given status, /// by mutating the head, index, and unmerged state. - pub fn set_status_for_repo(&self, dot_git: &Path, statuses: &[(&Path, FileStatus)]) { + pub fn set_status_for_repo(&self, dot_git: &Path, statuses: &[(&str, FileStatus)]) { let workdir_path = dot_git.parent().unwrap(); let workdir_contents = self.files_with_contents(workdir_path); self.with_git_state(dot_git, true, |state| { @@ -1697,10 +1669,12 @@ impl FakeFs { state.head_contents.clear(); state.unmerged_paths.clear(); for (path, content) in workdir_contents { - let repo_path: RepoPath = path.strip_prefix(&workdir_path).unwrap().into(); + use util::{paths::PathStyle, rel_path::RelPath}; + + let repo_path: RepoPath = RelPath::from_std_path(path.strip_prefix(&workdir_path).unwrap(), PathStyle::local()).unwrap().into(); let status = statuses .iter() - .find_map(|(p, status)| (**p == *repo_path.0).then_some(status)); + .find_map(|(p, status)| (*p == repo_path.as_str()).then_some(status)); let mut content = String::from_utf8_lossy(&content).to_string(); let mut index_content = None; diff --git a/crates/fuzzy/Cargo.toml b/crates/fuzzy/Cargo.toml index 534d7d4db5bc2637f7b093f67cead7a3fa52b416..35e134236d619e51467ef96a204df3fc8cc7681c 100644 --- a/crates/fuzzy/Cargo.toml +++ b/crates/fuzzy/Cargo.toml @@ -17,3 +17,6 @@ gpui.workspace = true util.workspace = true log.workspace = true workspace-hack.workspace = true + +[dev-dependencies] +util = {workspace = true, features = ["test-support"]} diff --git a/crates/fuzzy/src/matcher.rs b/crates/fuzzy/src/matcher.rs index 88253d4848b4b3866b9380256eccf1826213cfd1..eb844e349821394785bb61a34600f04a6fa985eb 100644 --- a/crates/fuzzy/src/matcher.rs +++ b/crates/fuzzy/src/matcher.rs @@ -1,5 +1,5 @@ use std::{ - borrow::{Borrow, Cow}, + borrow::Borrow, collections::BTreeMap, sync::atomic::{self, AtomicBool}, }; @@ -27,7 +27,7 @@ pub struct Matcher<'a> { pub trait MatchCandidate { fn has_chars(&self, bag: CharBag) -> bool; - fn to_string(&self) -> Cow<'_, str>; + fn candidate_chars(&self) -> impl Iterator; } impl<'a> Matcher<'a> { @@ -83,7 +83,7 @@ impl<'a> Matcher<'a> { candidate_chars.clear(); lowercase_candidate_chars.clear(); extra_lowercase_chars.clear(); - for (i, c) in candidate.borrow().to_string().chars().enumerate() { + for (i, c) in candidate.borrow().candidate_chars().enumerate() { candidate_chars.push(c); let mut char_lowercased = c.to_lowercase().collect::>(); if char_lowercased.len() > 1 { @@ -202,8 +202,6 @@ impl<'a> Matcher<'a> { cur_score: f64, extra_lowercase_chars: &BTreeMap, ) -> f64 { - use std::path::MAIN_SEPARATOR; - if query_idx == self.query.len() { return 1.0; } @@ -245,17 +243,11 @@ impl<'a> Matcher<'a> { None => continue, } }; - let is_path_sep = path_char == MAIN_SEPARATOR; + let is_path_sep = path_char == '/'; if query_idx == 0 && is_path_sep { last_slash = j_regular; } - - #[cfg(not(target_os = "windows"))] - let need_to_score = - query_char == path_char || (is_path_sep && query_char == '_' || query_char == '\\'); - // `query_char == '\\'` breaks `test_match_path_entries` on Windows, `\` is only used as a path separator on Windows. - #[cfg(target_os = "windows")] let need_to_score = query_char == path_char || (is_path_sep && query_char == '_'); if need_to_score { let curr = match prefix.get(j_regular) { @@ -270,7 +262,7 @@ impl<'a> Matcher<'a> { None => path[j_regular - 1 - prefix.len()], }; - if last == MAIN_SEPARATOR { + if last == '/' { char_score = 0.9; } else if (last == '-' || last == '_' || last == ' ' || last.is_numeric()) || (last.is_lowercase() && curr.is_uppercase()) @@ -291,7 +283,7 @@ impl<'a> Matcher<'a> { // Apply a severe penalty if the case doesn't match. // This will make the exact matches have higher score than the case-insensitive and the // path insensitive matches. - if (self.smart_case || curr == MAIN_SEPARATOR) && self.query[query_idx] != curr { + if (self.smart_case || curr == '/') && self.query[query_idx] != curr { char_score *= 0.001; } @@ -348,13 +340,12 @@ impl<'a> Matcher<'a> { #[cfg(test)] mod tests { + use util::rel_path::{RelPath, rel_path}; + use crate::{PathMatch, PathMatchCandidate}; use super::*; - use std::{ - path::{Path, PathBuf}, - sync::Arc, - }; + use std::sync::Arc; #[test] fn test_get_last_positions() { @@ -376,7 +367,6 @@ mod tests { assert_eq!(matcher.last_positions, vec![0, 3, 4, 8]); } - #[cfg(not(target_os = "windows"))] #[test] fn test_match_path_entries() { let paths = vec![ @@ -388,9 +378,9 @@ mod tests { "alphabravocharlie", "AlphaBravoCharlie", "thisisatestdir", - "/////ThisIsATestDir", - "/this/is/a/test/dir", - "/test/tiatd", + "ThisIsATestDir", + "this/is/a/test/dir", + "test/tiatd", ]; assert_eq!( @@ -404,63 +394,15 @@ mod tests { ); assert_eq!( match_single_path_query("t/i/a/t/d", false, &paths), - vec![("/this/is/a/test/dir", vec![1, 5, 6, 8, 9, 10, 11, 15, 16]),] - ); - - assert_eq!( - match_single_path_query("tiatd", false, &paths), - vec![ - ("/test/tiatd", vec![6, 7, 8, 9, 10]), - ("/this/is/a/test/dir", vec![1, 6, 9, 11, 16]), - ("/////ThisIsATestDir", vec![5, 9, 11, 12, 16]), - ("thisisatestdir", vec![0, 2, 6, 7, 11]), - ] - ); - } - - /// todo(windows) - /// Now, on Windows, users can only use the backslash as a path separator. - /// I do want to support both the backslash and the forward slash as path separators on Windows. - #[cfg(target_os = "windows")] - #[test] - fn test_match_path_entries() { - let paths = vec![ - "", - "a", - "ab", - "abC", - "abcd", - "alphabravocharlie", - "AlphaBravoCharlie", - "thisisatestdir", - "\\\\\\\\\\ThisIsATestDir", - "\\this\\is\\a\\test\\dir", - "\\test\\tiatd", - ]; - - assert_eq!( - match_single_path_query("abc", false, &paths), - vec![ - ("abC", vec![0, 1, 2]), - ("abcd", vec![0, 1, 2]), - ("AlphaBravoCharlie", vec![0, 5, 10]), - ("alphabravocharlie", vec![4, 5, 10]), - ] - ); - assert_eq!( - match_single_path_query("t\\i\\a\\t\\d", false, &paths), - vec![( - "\\this\\is\\a\\test\\dir", - vec![1, 5, 6, 8, 9, 10, 11, 15, 16] - ),] + vec![("this/is/a/test/dir", vec![0, 4, 5, 7, 8, 9, 10, 14, 15]),] ); assert_eq!( match_single_path_query("tiatd", false, &paths), vec![ - ("\\test\\tiatd", vec![6, 7, 8, 9, 10]), - ("\\this\\is\\a\\test\\dir", vec![1, 6, 9, 11, 16]), - ("\\\\\\\\\\ThisIsATestDir", vec![5, 9, 11, 12, 16]), + ("test/tiatd", vec![5, 6, 7, 8, 9]), + ("ThisIsATestDir", vec![0, 4, 6, 7, 11]), + ("this/is/a/test/dir", vec![0, 5, 8, 10, 15]), ("thisisatestdir", vec![0, 2, 6, 7, 11]), ] ); @@ -491,7 +433,7 @@ mod tests { "aαbβ/cγdδ", "αβγδ/bcde", "c1️⃣2️⃣3️⃣/d4️⃣5️⃣6️⃣/e7️⃣8️⃣9️⃣/f", - "/d/🆒/h", + "d/🆒/h", ]; assert_eq!("1️⃣".len(), 7); assert_eq!( @@ -602,9 +544,9 @@ mod tests { let query = query.chars().collect::>(); let query_chars = CharBag::from(&lowercase_query[..]); - let path_arcs: Vec> = paths + let path_arcs: Vec> = paths .iter() - .map(|path| Arc::from(PathBuf::from(path))) + .map(|path| Arc::from(rel_path(path))) .collect::>(); let mut path_entries = Vec::new(); for (i, path) in paths.iter().enumerate() { @@ -632,8 +574,8 @@ mod tests { score, worktree_id: 0, positions: positions.clone(), - path: Arc::from(candidate.path), - path_prefix: "".into(), + path: candidate.path.into(), + path_prefix: RelPath::empty().into(), distance_to_relative_ancestor: usize::MAX, is_dir: false, }, @@ -647,7 +589,7 @@ mod tests { paths .iter() .copied() - .find(|p| result.path.as_ref() == Path::new(p)) + .find(|p| result.path.as_ref() == rel_path(p)) .unwrap(), result.positions, ) diff --git a/crates/fuzzy/src/paths.rs b/crates/fuzzy/src/paths.rs index de6284e957a5320b5eac15ad4ff23a8c4ff5b420..fa6d3f850465e62106d2b84f8f0a9be56c4ca19d 100644 --- a/crates/fuzzy/src/paths.rs +++ b/crates/fuzzy/src/paths.rs @@ -1,13 +1,12 @@ use gpui::BackgroundExecutor; use std::{ - borrow::Cow, cmp::{self, Ordering}, - path::Path, sync::{ Arc, atomic::{self, AtomicBool}, }, }; +use util::{paths::PathStyle, rel_path::RelPath}; use crate::{ CharBag, @@ -17,7 +16,7 @@ use crate::{ #[derive(Clone, Debug)] pub struct PathMatchCandidate<'a> { pub is_dir: bool, - pub path: &'a Path, + pub path: &'a RelPath, pub char_bag: CharBag, } @@ -26,8 +25,8 @@ pub struct PathMatch { pub score: f64, pub positions: Vec, pub worktree_id: usize, - pub path: Arc, - pub path_prefix: Arc, + pub path: Arc, + pub path_prefix: Arc, pub is_dir: bool, /// Number of steps removed from a shared parent with the relative path /// Used to order closer paths first in the search list @@ -41,8 +40,10 @@ pub trait PathMatchCandidateSet<'a>: Send + Sync { fn is_empty(&self) -> bool { self.len() == 0 } - fn prefix(&self) -> Arc; + fn root_is_file(&self) -> bool; + fn prefix(&self) -> Arc; fn candidates(&'a self, start: usize) -> Self::Candidates; + fn path_style(&self) -> PathStyle; } impl<'a> MatchCandidate for PathMatchCandidate<'a> { @@ -50,8 +51,8 @@ impl<'a> MatchCandidate for PathMatchCandidate<'a> { self.char_bag.is_superset(bag) } - fn to_string(&self) -> Cow<'a, str> { - self.path.to_string_lossy() + fn candidate_chars(&self) -> impl Iterator { + self.path.as_str().chars() } } @@ -109,8 +110,8 @@ pub fn match_fixed_path_set( worktree_id, positions: positions.clone(), is_dir: candidate.is_dir, - path: Arc::from(candidate.path), - path_prefix: Arc::default(), + path: candidate.path.into(), + path_prefix: RelPath::empty().into(), distance_to_relative_ancestor: usize::MAX, }, ); @@ -121,7 +122,7 @@ pub fn match_fixed_path_set( pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( candidate_sets: &'a [Set], query: &str, - relative_to: &Option>, + relative_to: &Option>, smart_case: bool, max_results: usize, cancel_flag: &AtomicBool, @@ -132,12 +133,27 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( return Vec::new(); } - let lowercase_query = query.to_lowercase().chars().collect::>(); - let query = query.chars().collect::>(); + let path_style = candidate_sets[0].path_style(); + + let query = query + .chars() + .map(|char| { + if path_style.is_windows() && char == '\\' { + '/' + } else { + char + } + }) + .collect::>(); + + let lowercase_query = query + .iter() + .map(|query| query.to_ascii_lowercase()) + .collect::>(); - let lowercase_query = &lowercase_query; let query = &query; - let query_char_bag = CharBag::from(&lowercase_query[..]); + let lowercase_query = &lowercase_query; + let query_char_bag = CharBag::from_iter(lowercase_query.iter().copied()); let num_cpus = executor.num_cpus().min(path_count); let segment_size = path_count.div_ceil(num_cpus); @@ -168,7 +184,11 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( let candidates = candidate_set.candidates(start).take(end - start); let worktree_id = candidate_set.id(); - let prefix = candidate_set.prefix().chars().collect::>(); + let mut prefix = + candidate_set.prefix().as_str().chars().collect::>(); + if !candidate_set.root_is_file() && !prefix.is_empty() { + prefix.push('/'); + } let lowercase_prefix = prefix .iter() .map(|c| c.to_ascii_lowercase()) @@ -219,7 +239,7 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( /// Compute the distance from a given path to some other path /// If there is no shared path, returns usize::MAX -fn distance_between_paths(path: &Path, relative_to: &Path) -> usize { +fn distance_between_paths(path: &RelPath, relative_to: &RelPath) -> usize { let mut path_components = path.components(); let mut relative_components = relative_to.components(); @@ -234,12 +254,12 @@ fn distance_between_paths(path: &Path, relative_to: &Path) -> usize { #[cfg(test)] mod tests { - use std::path::Path; + use util::rel_path::RelPath; use super::distance_between_paths; #[test] fn test_distance_between_paths_empty() { - distance_between_paths(Path::new(""), Path::new("")); + distance_between_paths(RelPath::empty(), RelPath::empty()); } } diff --git a/crates/fuzzy/src/strings.rs b/crates/fuzzy/src/strings.rs index 7c866de05c4566c060fa01a362931e1355cd8c37..54539840cfb0ca251428d9f78d5d134f16afdf4c 100644 --- a/crates/fuzzy/src/strings.rs +++ b/crates/fuzzy/src/strings.rs @@ -4,7 +4,7 @@ use crate::{ }; use gpui::BackgroundExecutor; use std::{ - borrow::{Borrow, Cow}, + borrow::Borrow, cmp::{self, Ordering}, iter, ops::Range, @@ -28,13 +28,13 @@ impl StringMatchCandidate { } } -impl<'a> MatchCandidate for &'a StringMatchCandidate { +impl MatchCandidate for &StringMatchCandidate { fn has_chars(&self, bag: CharBag) -> bool { self.char_bag.is_superset(bag) } - fn to_string(&self) -> Cow<'a, str> { - self.string.as_str().into() + fn candidate_chars(&self) -> impl Iterator { + self.string.chars() } } diff --git a/crates/git/src/blame.rs b/crates/git/src/blame.rs index 24b2c44218120b1237fb42e04edc9b6784356c57..a06d5081b77672a7578e6fc74c6db56dd9705471 100644 --- a/crates/git/src/blame.rs +++ b/crates/git/src/blame.rs @@ -1,4 +1,5 @@ use crate::commit::get_messages; +use crate::repository::RepoPath; use crate::{GitRemote, Oid}; use anyhow::{Context as _, Result}; use collections::{HashMap, HashSet}; @@ -33,7 +34,7 @@ impl Blame { pub async fn for_path( git_binary: &Path, working_directory: &Path, - path: &Path, + path: &RepoPath, content: &Rope, remote_url: Option, ) -> Result { @@ -66,7 +67,7 @@ const GIT_BLAME_NO_PATH: &str = "fatal: no such path"; async fn run_git_blame( git_binary: &Path, working_directory: &Path, - path: &Path, + path: &RepoPath, contents: &Rope, ) -> Result { let mut child = util::command::new_smol_command(git_binary) @@ -76,7 +77,7 @@ async fn run_git_blame( .arg("-w") .arg("--contents") .arg("-") - .arg(path.as_os_str()) + .arg(path.as_str()) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) diff --git a/crates/git/src/commit.rs b/crates/git/src/commit.rs index 1a2aa7b43331aca97e6d9d26bac3569448aafc4a..50fd0ad2484c77bf227d6138a20481e11ec2e7f9 100644 --- a/crates/git/src/commit.rs +++ b/crates/git/src/commit.rs @@ -39,7 +39,7 @@ pub async fn get_messages(working_directory: &Path, shas: &[Oid]) -> Result impl Iterator { +pub fn parse_git_diff_name_status(content: &str) -> impl Iterator { let mut parts = content.split('\0'); std::iter::from_fn(move || { loop { @@ -51,13 +51,14 @@ pub fn parse_git_diff_name_status(content: &str) -> impl Iterator StatusCode::Deleted, _ => continue, }; - return Some((Path::new(path), status)); + return Some((path, status)); } }) } #[cfg(test)] mod tests { + use super::*; #[test] @@ -78,31 +79,19 @@ mod tests { assert_eq!( output, &[ - (Path::new("Cargo.lock"), StatusCode::Modified), - (Path::new("crates/project/Cargo.toml"), StatusCode::Modified), - ( - Path::new("crates/project/src/buffer_store.rs"), - StatusCode::Modified - ), - (Path::new("crates/project/src/git.rs"), StatusCode::Deleted), - ( - Path::new("crates/project/src/git_store.rs"), - StatusCode::Added - ), + ("Cargo.lock", StatusCode::Modified), + ("crates/project/Cargo.toml", StatusCode::Modified), + ("crates/project/src/buffer_store.rs", StatusCode::Modified), + ("crates/project/src/git.rs", StatusCode::Deleted), + ("crates/project/src/git_store.rs", StatusCode::Added), ( - Path::new("crates/project/src/git_store/git_traversal.rs"), + "crates/project/src/git_store/git_traversal.rs", StatusCode::Added, ), + ("crates/project/src/project.rs", StatusCode::Modified), + ("crates/project/src/worktree_store.rs", StatusCode::Modified), ( - Path::new("crates/project/src/project.rs"), - StatusCode::Modified - ), - ( - Path::new("crates/project/src/worktree_store.rs"), - StatusCode::Modified - ), - ( - Path::new("crates/project_panel/src/project_panel.rs"), + "crates/project_panel/src/project_panel.rs", StatusCode::Modified ), ] diff --git a/crates/git/src/git.rs b/crates/git/src/git.rs index 2028a0f374578d0c0f35bdc8c80ec09462ab0875..354614e32cd43aaf8bd677b0303d08b312045df0 100644 --- a/crates/git/src/git.rs +++ b/crates/git/src/git.rs @@ -12,22 +12,17 @@ use anyhow::{Context as _, Result}; pub use git2 as libgit; use gpui::{Action, actions}; pub use repository::RemoteCommandOutput; -pub use repository::WORK_DIRECTORY_REPO_PATH; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use std::ffi::OsStr; use std::fmt; use std::str::FromStr; -use std::sync::LazyLock; - -pub static DOT_GIT: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new(".git")); -pub static GITIGNORE: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new(".gitignore")); -pub static FSMONITOR_DAEMON: LazyLock<&'static OsStr> = - LazyLock::new(|| OsStr::new("fsmonitor--daemon")); -pub static LFS_DIR: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new("lfs")); -pub static COMMIT_MESSAGE: LazyLock<&'static OsStr> = - LazyLock::new(|| OsStr::new("COMMIT_EDITMSG")); -pub static INDEX_LOCK: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new("index.lock")); + +pub const DOT_GIT: &str = ".git"; +pub const GITIGNORE: &str = ".gitignore"; +pub const FSMONITOR_DAEMON: &str = "fsmonitor--daemon"; +pub const LFS_DIR: &str = "lfs"; +pub const COMMIT_MESSAGE: &str = "COMMIT_EDITMSG"; +pub const INDEX_LOCK: &str = "index.lock"; actions!( git, diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 88bfc41dfee4184a733e733eea77817c32f796e0..b455f5b14c7b1bb90fe93ecd94e51c4907a5d36f 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -12,12 +12,9 @@ use parking_lot::Mutex; use rope::Rope; use schemars::JsonSchema; use serde::Deserialize; -use std::borrow::{Borrow, Cow}; use std::ffi::{OsStr, OsString}; use std::io::prelude::*; -use std::path::Component; use std::process::{ExitStatus, Stdio}; -use std::sync::LazyLock; use std::{ cmp::Ordering, future, @@ -28,6 +25,8 @@ use std::{ use sum_tree::MapSeekTarget; use thiserror::Error; use util::command::{new_smol_command, new_std_command}; +use util::paths::PathStyle; +use util::rel_path::RelPath; use util::{ResultExt, paths}; use uuid::Uuid; @@ -719,16 +718,21 @@ impl GitRepository for RealGitRepository { let mut info_line = String::new(); let mut newline = [b'\0']; for (path, status_code) in changes { + // git-show outputs `/`-delimited paths even on Windows. + let Ok(rel_path) = RelPath::new(path) else { + continue; + }; + match status_code { StatusCode::Modified => { - writeln!(&mut stdin, "{commit}:{}", path.display())?; - writeln!(&mut stdin, "{parent_sha}:{}", path.display())?; + writeln!(&mut stdin, "{commit}:{path}")?; + writeln!(&mut stdin, "{parent_sha}:{path}")?; } StatusCode::Added => { - writeln!(&mut stdin, "{commit}:{}", path.display())?; + writeln!(&mut stdin, "{commit}:{path}")?; } StatusCode::Deleted => { - writeln!(&mut stdin, "{parent_sha}:{}", path.display())?; + writeln!(&mut stdin, "{parent_sha}:{path}")?; } _ => continue, } @@ -766,7 +770,7 @@ impl GitRepository for RealGitRepository { } files.push(CommitFile { - path: path.into(), + path: rel_path.into(), old_text, new_text, }) @@ -824,7 +828,7 @@ impl GitRepository for RealGitRepository { .current_dir(&working_directory?) .envs(env.iter()) .args(["checkout", &commit, "--"]) - .args(paths.iter().map(|path| path.as_ref())) + .args(paths.iter().map(|path| path.as_str())) .output() .await?; anyhow::ensure!( @@ -846,13 +850,11 @@ impl GitRepository for RealGitRepository { .spawn(async move { fn logic(repo: &git2::Repository, path: &RepoPath) -> Result> { // This check is required because index.get_path() unwraps internally :( - check_path_to_repo_path_errors(path)?; - let mut index = repo.index()?; index.read(false)?; const STAGE_NORMAL: i32 = 0; - let oid = match index.get_path(path, STAGE_NORMAL) { + let oid = match index.get_path(path.as_std_path(), STAGE_NORMAL) { Some(entry) if entry.mode != GIT_MODE_SYMLINK => entry.id, _ => return Ok(None), }; @@ -876,7 +878,7 @@ impl GitRepository for RealGitRepository { .spawn(async move { let repo = repo.lock(); let head = repo.head().ok()?.peel_to_tree().log_err()?; - let entry = head.get_path(&path).ok()?; + let entry = head.get_path(path.as_std_path()).ok()?; if entry.filemode() == i32::from(git2::FileMode::Link) { return None; } @@ -918,7 +920,7 @@ impl GitRepository for RealGitRepository { .current_dir(&working_directory) .envs(env.iter()) .args(["update-index", "--add", "--cacheinfo", "100644", sha]) - .arg(path.to_unix_style()) + .arg(path.as_str()) .output() .await?; @@ -933,7 +935,7 @@ impl GitRepository for RealGitRepository { .current_dir(&working_directory) .envs(env.iter()) .args(["update-index", "--force-remove"]) - .arg(path.to_unix_style()) + .arg(path.as_str()) .output() .await?; anyhow::ensure!( @@ -1251,7 +1253,7 @@ impl GitRepository for RealGitRepository { .current_dir(&working_directory?) .envs(env.iter()) .args(["update-index", "--add", "--remove", "--"]) - .args(paths.iter().map(|p| p.to_unix_style())) + .args(paths.iter().map(|p| p.as_str())) .output() .await?; anyhow::ensure!( @@ -1812,7 +1814,7 @@ fn git_status_args(path_prefixes: &[RepoPath]) -> Vec { OsString::from("-z"), ]; args.extend(path_prefixes.iter().map(|path_prefix| { - if path_prefix.0.as_ref() == Path::new("") { + if path_prefix.is_empty() { Path::new(".").into() } else { path_prefix.as_os_str().into() @@ -2066,99 +2068,65 @@ async fn run_askpass_command( } } -pub static WORK_DIRECTORY_REPO_PATH: LazyLock = - LazyLock::new(|| RepoPath(Path::new("").into())); - #[derive(Clone, Debug, Ord, Hash, PartialOrd, Eq, PartialEq)] -pub struct RepoPath(pub Arc); +pub struct RepoPath(pub Arc); impl RepoPath { - pub fn new(path: PathBuf) -> Self { - debug_assert!(path.is_relative(), "Repo paths must be relative"); - - RepoPath(path.into()) + pub fn new + ?Sized>(s: &S) -> Result { + let rel_path = RelPath::new(s)?; + Ok(rel_path.into()) } - pub fn from_str(path: &str) -> Self { - let path = Path::new(path); - debug_assert!(path.is_relative(), "Repo paths must be relative"); - - RepoPath(path.into()) + pub fn from_proto(proto: &str) -> Result { + let rel_path = RelPath::from_proto(proto)?; + Ok(rel_path.into()) } - pub fn to_unix_style(&self) -> Cow<'_, OsStr> { - #[cfg(target_os = "windows")] - { - use std::ffi::OsString; - - let path = self.0.as_os_str().to_string_lossy().replace("\\", "/"); - Cow::Owned(OsString::from(path)) - } - #[cfg(not(target_os = "windows"))] - { - Cow::Borrowed(self.0.as_os_str()) - } + pub fn from_std_path(path: &Path, path_style: PathStyle) -> Result { + let rel_path = RelPath::from_std_path(path, path_style)?; + Ok(rel_path.into()) } } -impl std::fmt::Display for RepoPath { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0.to_string_lossy().fmt(f) - } +#[cfg(any(test, feature = "test-support"))] +pub fn repo_path + ?Sized>(s: &S) -> RepoPath { + RepoPath(RelPath::new(s).unwrap().into()) } -impl From<&Path> for RepoPath { - fn from(value: &Path) -> Self { - RepoPath::new(value.into()) +impl From<&RelPath> for RepoPath { + fn from(value: &RelPath) -> Self { + RepoPath(value.into()) } } -impl From> for RepoPath { - fn from(value: Arc) -> Self { +impl From> for RepoPath { + fn from(value: Arc) -> Self { RepoPath(value) } } -impl From for RepoPath { - fn from(value: PathBuf) -> Self { - RepoPath::new(value) - } -} - -impl From<&str> for RepoPath { - fn from(value: &str) -> Self { - Self::from_str(value) - } -} - impl Default for RepoPath { fn default() -> Self { - RepoPath(Path::new("").into()) - } -} - -impl AsRef for RepoPath { - fn as_ref(&self) -> &Path { - self.0.as_ref() + RepoPath(RelPath::empty().into()) } } impl std::ops::Deref for RepoPath { - type Target = Path; + type Target = RelPath; fn deref(&self) -> &Self::Target { &self.0 } } -impl Borrow for RepoPath { - fn borrow(&self) -> &Path { - self.0.as_ref() +impl AsRef for RepoPath { + fn as_ref(&self) -> &Path { + RelPath::as_ref(&self.0) } } #[derive(Debug)] -pub struct RepoPathDescendants<'a>(pub &'a Path); +pub struct RepoPathDescendants<'a>(pub &'a RepoPath); impl MapSeekTarget for RepoPathDescendants<'_> { fn cmp_cursor(&self, key: &RepoPath) -> Ordering { @@ -2244,35 +2212,6 @@ fn parse_upstream_track(upstream_track: &str) -> Result { })) } -fn check_path_to_repo_path_errors(relative_file_path: &Path) -> Result<()> { - match relative_file_path.components().next() { - None => anyhow::bail!("repo path should not be empty"), - Some(Component::Prefix(_)) => anyhow::bail!( - "repo path `{}` should be relative, not a windows prefix", - relative_file_path.to_string_lossy() - ), - Some(Component::RootDir) => { - anyhow::bail!( - "repo path `{}` should be relative", - relative_file_path.to_string_lossy() - ) - } - Some(Component::CurDir) => { - anyhow::bail!( - "repo path `{}` should not start with `.`", - relative_file_path.to_string_lossy() - ) - } - Some(Component::ParentDir) => { - anyhow::bail!( - "repo path `{}` should not start with `..`", - relative_file_path.to_string_lossy() - ) - } - _ => Ok(()), - } -} - fn checkpoint_author_envs() -> HashMap { HashMap::from_iter([ ("GIT_AUTHOR_NAME".to_string(), "Zed".to_string()), @@ -2299,12 +2238,9 @@ mod tests { let repo = RealGitRepository::new(&repo_dir.path().join(".git"), None, cx.executor()).unwrap(); - repo.stage_paths( - vec![RepoPath::from_str("file")], - Arc::new(HashMap::default()), - ) - .await - .unwrap(); + repo.stage_paths(vec![repo_path("file")], Arc::new(HashMap::default())) + .await + .unwrap(); repo.commit( "Initial commit".into(), None, @@ -2328,12 +2264,9 @@ mod tests { smol::fs::write(&file_path, "modified after checkpoint") .await .unwrap(); - repo.stage_paths( - vec![RepoPath::from_str("file")], - Arc::new(HashMap::default()), - ) - .await - .unwrap(); + repo.stage_paths(vec![repo_path("file")], Arc::new(HashMap::default())) + .await + .unwrap(); repo.commit( "Commit after checkpoint".into(), None, @@ -2466,12 +2399,9 @@ mod tests { RealGitRepository::new(&repo_dir.path().join(".git"), None, cx.executor()).unwrap(); // initial commit - repo.stage_paths( - vec![RepoPath::from_str("main.rs")], - Arc::new(HashMap::default()), - ) - .await - .unwrap(); + repo.stage_paths(vec![repo_path("main.rs")], Arc::new(HashMap::default())) + .await + .unwrap(); repo.commit( "Initial commit".into(), None, diff --git a/crates/git/src/status.rs b/crates/git/src/status.rs index d0399a137aa9af7fc400a13105119b897a9dec1c..2bcd0809dbb1ac4bf41c8b7e861ed906f086d1ad 100644 --- a/crates/git/src/status.rs +++ b/crates/git/src/status.rs @@ -1,8 +1,8 @@ use crate::repository::RepoPath; use anyhow::Result; use serde::{Deserialize, Serialize}; -use std::{path::Path, str::FromStr, sync::Arc}; -use util::ResultExt; +use std::{str::FromStr, sync::Arc}; +use util::{ResultExt, rel_path::RelPath}; #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] pub enum FileStatus { @@ -447,7 +447,8 @@ impl FromStr for GitStatus { } let status = entry.as_bytes()[0..2].try_into().unwrap(); let status = FileStatus::from_bytes(status).log_err()?; - let path = RepoPath(Path::new(path).into()); + // git-status outputs `/`-delimited repo paths, even on Windows. + let path = RepoPath(RelPath::new(path).log_err()?.into()); Some((path, status)) }) .collect::>(); diff --git a/crates/git_ui/src/commit_view.rs b/crates/git_ui/src/commit_view.rs index ac51cee8e42567a607891dd242c2bf103ae7fc0e..038a5beaac5fcb7b1d96ff5f1e63d307bce6595c 100644 --- a/crates/git_ui/src/commit_view.rs +++ b/crates/git_ui/src/commit_view.rs @@ -14,13 +14,12 @@ use multi_buffer::PathKey; use project::{Project, WorktreeId, git_store::Repository}; use std::{ any::{Any, TypeId}, - ffi::OsStr, fmt::Write as _, - path::{Path, PathBuf}, + path::PathBuf, sync::Arc, }; use ui::{Color, Icon, IconName, Label, LabelCommon as _, SharedString}; -use util::{ResultExt, truncate_and_trailoff}; +use util::{ResultExt, paths::PathStyle, rel_path::RelPath, truncate_and_trailoff}; use workspace::{ Item, ItemHandle as _, ItemNavHistory, ToolbarItemLocation, Workspace, item::{BreadcrumbText, ItemEvent, TabContentParams}, @@ -40,7 +39,7 @@ struct GitBlob { } struct CommitMetadataFile { - title: Arc, + title: Arc, worktree_id: WorktreeId, } @@ -129,7 +128,9 @@ impl CommitView { let mut metadata_buffer_id = None; if let Some(worktree_id) = first_worktree_id { let file = Arc::new(CommitMetadataFile { - title: PathBuf::from(format!("commit {}", commit.sha)).into(), + title: RelPath::new(&format!("commit {}", commit.sha)) + .unwrap() + .into(), worktree_id, }); let buffer = cx.new(|cx| { @@ -144,7 +145,7 @@ impl CommitView { }); multibuffer.update(cx, |multibuffer, cx| { multibuffer.set_excerpts_for_path( - PathKey::namespaced(COMMIT_METADATA_NAMESPACE, file.title.clone()), + PathKey::namespaced(COMMIT_METADATA_NAMESPACE, file.title.as_str().into()), buffer.clone(), vec![Point::zero()..buffer.read(cx).max_point()], 0, @@ -192,7 +193,7 @@ impl CommitView { .collect::>(); let path = snapshot.file().unwrap().path().clone(); let _is_newly_added = multibuffer.set_excerpts_for_path( - PathKey::namespaced(FILE_NAMESPACE, path), + PathKey::namespaced(FILE_NAMESPACE, path.as_str().into()), buffer, diff_hunk_ranges, multibuffer_context_lines(cx), @@ -227,15 +228,19 @@ impl language::File for GitBlob { } } - fn path(&self) -> &Arc { + fn path_style(&self, _: &App) -> PathStyle { + PathStyle::Posix + } + + fn path(&self) -> &Arc { &self.path.0 } fn full_path(&self, _: &App) -> PathBuf { - self.path.to_path_buf() + self.path.as_std_path().to_path_buf() } - fn file_name<'a>(&'a self, _: &'a App) -> &'a OsStr { + fn file_name<'a>(&'a self, _: &'a App) -> &'a str { self.path.file_name().unwrap() } @@ -261,15 +266,19 @@ impl language::File for CommitMetadataFile { DiskState::New } - fn path(&self) -> &Arc { + fn path_style(&self, _: &App) -> PathStyle { + PathStyle::Posix + } + + fn path(&self) -> &Arc { &self.title } fn full_path(&self, _: &App) -> PathBuf { - self.title.as_ref().into() + PathBuf::from(self.title.as_str().to_owned()) } - fn file_name<'a>(&'a self, _: &'a App) -> &'a OsStr { + fn file_name<'a>(&'a self, _: &'a App) -> &'a str { self.title.file_name().unwrap() } diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index b2c855f0fa341c19e7f44446e37240ee5a70e5e6..e66da36576429e8a1d768681e35015db034dff39 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -53,7 +53,7 @@ use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsStore, StatusStyle}; use std::future::Future; use std::ops::Range; -use std::path::{Path, PathBuf}; +use std::path::Path; use std::{collections::HashSet, sync::Arc, time::Duration, usize}; use strum::{IntoEnumIterator, VariantNames}; use time::OffsetDateTime; @@ -61,6 +61,7 @@ use ui::{ Checkbox, CommonAnimationExt, ContextMenu, ElevationIndex, IconPosition, Label, LabelSize, PopoverMenu, ScrollAxes, Scrollbars, SplitButton, Tooltip, WithScrollbar, prelude::*, }; +use util::paths::PathStyle; use util::{ResultExt, TryFutureExt, maybe}; use workspace::SERIALIZATION_THROTTLE_TIME; @@ -251,23 +252,22 @@ impl GitListEntry { #[derive(Debug, PartialEq, Eq, Clone)] pub struct GitStatusEntry { pub(crate) repo_path: RepoPath, - pub(crate) abs_path: PathBuf, pub(crate) status: FileStatus, pub(crate) staging: StageStatus, } impl GitStatusEntry { - fn display_name(&self) -> String { + fn display_name(&self, path_style: PathStyle) -> String { self.repo_path .file_name() - .map(|name| name.to_string_lossy().into_owned()) - .unwrap_or_else(|| self.repo_path.to_string_lossy().into_owned()) + .map(|name| name.to_owned()) + .unwrap_or_else(|| self.repo_path.display(path_style).to_string()) } - fn parent_dir(&self) -> Option { + fn parent_dir(&self, path_style: PathStyle) -> Option { self.repo_path .parent() - .map(|parent| parent.to_string_lossy().into_owned()) + .map(|parent| parent.display(path_style).to_string()) } } @@ -826,6 +826,7 @@ impl GitPanel { window: &mut Window, cx: &mut Context, ) { + let path_style = self.project.read(cx).path_style(cx); maybe!({ let list_entry = self.entries.get(self.selected_entry?)?.clone(); let entry = list_entry.status_entry()?.to_owned(); @@ -841,8 +842,7 @@ impl GitPanel { entry .repo_path .file_name() - .unwrap_or(entry.repo_path.as_os_str()) - .to_string_lossy() + .unwrap_or(entry.repo_path.display(path_style).as_ref()), ), None, &["Restore", "Cancel"], @@ -885,7 +885,7 @@ impl GitPanel { if entry.status.staging().has_staged() { self.change_file_stage(false, vec![entry.clone()], cx); } - let filename = path.path.file_name()?.to_string_lossy(); + let filename = path.path.file_name()?.to_string(); if !entry.status.is_created() { self.perform_checkout(vec![entry.clone()], window, cx); @@ -1028,7 +1028,7 @@ impl GitPanel { let mut details = entries .iter() .filter_map(|entry| entry.repo_path.0.file_name()) - .map(|filename| filename.to_string_lossy()) + .map(|filename| filename.to_string()) .take(5) .join("\n"); if entries.len() > 5 { @@ -1084,7 +1084,7 @@ impl GitPanel { .repo_path .0 .file_name() - .map(|f| f.to_string_lossy()) + .map(|f| f.to_string()) .unwrap_or_default() }) .take(5) @@ -1721,7 +1721,7 @@ impl GitPanel { .repo_path .file_name() .unwrap_or_default() - .to_string_lossy(); + .to_string(); Some(format!("{} {}", action_text, file_name)) } @@ -1973,11 +1973,7 @@ impl GitPanel { cx.spawn_in(window, async move |this, cx| { let mut paths = path.await.ok()?.ok()??; let mut path = paths.pop()?; - let repo_name = repo - .split(std::path::MAIN_SEPARATOR_STR) - .last()? - .strip_suffix(".git")? - .to_owned(); + let repo_name = repo.split("/").last()?.strip_suffix(".git")?.to_owned(); let fs = this.read_with(cx, |this, _| this.fs.clone()).ok()?; @@ -2558,6 +2554,7 @@ impl GitPanel { } fn update_visible_entries(&mut self, window: &mut Window, cx: &mut Context) { + let path_style = self.project.read(cx).path_style(cx); let bulk_staging = self.bulk_staging.take(); let last_staged_path_prev_index = bulk_staging .as_ref() @@ -2609,10 +2606,8 @@ impl GitPanel { continue; } - let abs_path = repo.work_directory_abs_path.join(&entry.repo_path.0); let entry = GitStatusEntry { repo_path: entry.repo_path.clone(), - abs_path, status: entry.status, staging, }; @@ -2623,8 +2618,8 @@ impl GitPanel { } let width_estimate = Self::item_width_estimate( - entry.parent_dir().map(|s| s.len()).unwrap_or(0), - entry.display_name().len(), + entry.parent_dir(path_style).map(|s| s.len()).unwrap_or(0), + entry.display_name(path_style).len(), ); match max_width_item.as_mut() { @@ -3634,7 +3629,7 @@ impl GitPanel { cx: &App, ) -> Option { let repo = self.active_repository.as_ref()?.read(cx); - let project_path = (file.worktree_id(cx), file.path()).into(); + let project_path = (file.worktree_id(cx), file.path().clone()).into(); let repo_path = repo.project_path_to_repo_path(&project_path, cx)?; let ix = self.entry_by_path(&repo_path, cx)?; let entry = self.entries.get(ix)?; @@ -3887,7 +3882,8 @@ impl GitPanel { window: &Window, cx: &Context, ) -> AnyElement { - let display_name = entry.display_name(); + let path_style = self.project.read(cx).path_style(cx); + let display_name = entry.display_name(path_style); let selected = self.selected_entry == Some(ix); let marked = self.marked_entries.contains(&ix); @@ -4060,11 +4056,14 @@ impl GitPanel { .items_center() .flex_1() // .overflow_hidden() - .when_some(entry.parent_dir(), |this, parent| { + .when_some(entry.parent_dir(path_style), |this, parent| { if !parent.is_empty() { this.child( - self.entry_label(format!("{}/", parent), path_color) - .when(status.is_deleted(), |this| this.strikethrough()), + self.entry_label( + format!("{parent}{}", path_style.separator()), + path_color, + ) + .when(status.is_deleted(), |this| this.strikethrough()), ) } else { this @@ -4889,7 +4888,10 @@ impl Component for PanelRepoFooter { #[cfg(test)] mod tests { - use git::status::{StatusCode, UnmergedStatus, UnmergedStatusCode}; + use git::{ + repository::repo_path, + status::{StatusCode, UnmergedStatus, UnmergedStatusCode}, + }; use gpui::{TestAppContext, VisualTestContext}; use project::{FakeFs, WorktreeSettings}; use serde_json::json; @@ -4941,14 +4943,8 @@ mod tests { fs.set_status_for_repo( Path::new(path!("/root/zed/.git")), &[ - ( - Path::new("crates/gpui/gpui.rs"), - StatusCode::Modified.worktree(), - ), - ( - Path::new("crates/util/util.rs"), - StatusCode::Modified.worktree(), - ), + ("crates/gpui/gpui.rs", StatusCode::Modified.worktree()), + ("crates/util/util.rs", StatusCode::Modified.worktree()), ], ); @@ -4989,14 +4985,12 @@ mod tests { header: Section::Tracked }), GitListEntry::Status(GitStatusEntry { - abs_path: path!("/root/zed/crates/gpui/gpui.rs").into(), - repo_path: "crates/gpui/gpui.rs".into(), + repo_path: repo_path("crates/gpui/gpui.rs"), status: StatusCode::Modified.worktree(), staging: StageStatus::Unstaged, }), GitListEntry::Status(GitStatusEntry { - abs_path: path!("/root/zed/crates/util/util.rs").into(), - repo_path: "crates/util/util.rs".into(), + repo_path: repo_path("crates/util/util.rs"), status: StatusCode::Modified.worktree(), staging: StageStatus::Unstaged, },), @@ -5016,14 +5010,12 @@ mod tests { header: Section::Tracked }), GitListEntry::Status(GitStatusEntry { - abs_path: path!("/root/zed/crates/gpui/gpui.rs").into(), - repo_path: "crates/gpui/gpui.rs".into(), + repo_path: repo_path("crates/gpui/gpui.rs"), status: StatusCode::Modified.worktree(), staging: StageStatus::Unstaged, }), GitListEntry::Status(GitStatusEntry { - abs_path: path!("/root/zed/crates/util/util.rs").into(), - repo_path: "crates/util/util.rs".into(), + repo_path: repo_path("crates/util/util.rs"), status: StatusCode::Modified.worktree(), staging: StageStatus::Unstaged, },), @@ -5061,14 +5053,14 @@ mod tests { fs.set_status_for_repo( Path::new(path!("/root/project/.git")), &[ - (Path::new("src/main.rs"), StatusCode::Modified.worktree()), - (Path::new("src/lib.rs"), StatusCode::Modified.worktree()), - (Path::new("tests/test.rs"), StatusCode::Modified.worktree()), - (Path::new("new_file.txt"), FileStatus::Untracked), - (Path::new("another_new.rs"), FileStatus::Untracked), - (Path::new("src/utils.rs"), FileStatus::Untracked), + ("src/main.rs", StatusCode::Modified.worktree()), + ("src/lib.rs", StatusCode::Modified.worktree()), + ("tests/test.rs", StatusCode::Modified.worktree()), + ("new_file.txt", FileStatus::Untracked), + ("another_new.rs", FileStatus::Untracked), + ("src/utils.rs", FileStatus::Untracked), ( - Path::new("conflict.txt"), + "conflict.txt", UnmergedStatus { first_head: UnmergedStatusCode::Updated, second_head: UnmergedStatusCode::Updated, @@ -5242,7 +5234,7 @@ mod tests { fs.set_status_for_repo( Path::new(path!("/root/project/.git")), - &[(Path::new("src/main.rs"), StatusCode::Modified.worktree())], + &[("src/main.rs", StatusCode::Modified.worktree())], ); let project = Project::test(fs.clone(), [Path::new(path!("/root/project"))], cx).await; diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index 9d0a575247427ec5fe674b342d0f2660e40e2299..a226caab34662c44c20a8bccb02d1149102befc7 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -243,7 +243,7 @@ impl ProjectDiff { TRACKED_NAMESPACE }; - let path_key = PathKey::namespaced(namespace, entry.repo_path.0); + let path_key = PathKey::namespaced(namespace, entry.repo_path.as_str().into()); self.move_to_path(path_key, window, cx) } @@ -397,7 +397,7 @@ impl ProjectDiff { } else { TRACKED_NAMESPACE }; - let path_key = PathKey::namespaced(namespace, entry.repo_path.0.clone()); + let path_key = PathKey::namespaced(namespace, entry.repo_path.as_str().into()); previous_paths.remove(&path_key); let load_buffer = self @@ -535,7 +535,7 @@ impl ProjectDiff { self.multibuffer .read(cx) .excerpt_paths() - .map(|key| key.path().to_string_lossy().to_string()) + .map(|key| key.path().to_string()) .collect() } } @@ -1406,12 +1406,12 @@ mod tests { fs.set_head_for_repo( path!("/project/.git").as_ref(), - &[("foo.txt".into(), "foo\n".into())], + &[("foo.txt", "foo\n".into())], "deadbeef", ); fs.set_index_for_repo( path!("/project/.git").as_ref(), - &[("foo.txt".into(), "foo\n".into())], + &[("foo.txt", "foo\n".into())], ); cx.run_until_parked(); @@ -1461,16 +1461,13 @@ mod tests { fs.set_head_and_index_for_repo( path!("/project/.git").as_ref(), - &[ - ("bar".into(), "bar\n".into()), - ("foo".into(), "foo\n".into()), - ], + &[("bar", "bar\n".into()), ("foo", "foo\n".into())], ); cx.run_until_parked(); let editor = cx.update_window_entity(&diff, |diff, window, cx| { diff.move_to_path( - PathKey::namespaced(TRACKED_NAMESPACE, Path::new("foo").into()), + PathKey::namespaced(TRACKED_NAMESPACE, "foo".into()), window, cx, ); @@ -1491,7 +1488,7 @@ mod tests { let editor = cx.update_window_entity(&diff, |diff, window, cx| { diff.move_to_path( - PathKey::namespaced(TRACKED_NAMESPACE, Path::new("bar").into()), + PathKey::namespaced(TRACKED_NAMESPACE, "bar".into()), window, cx, ); @@ -1543,7 +1540,7 @@ mod tests { fs.set_head_for_repo( path!("/project/.git").as_ref(), - &[("foo".into(), "original\n".into())], + &[("foo", "original\n".into())], "deadbeef", ); cx.run_until_parked(); @@ -1646,12 +1643,12 @@ mod tests { ) .await; - fs.set_git_content_for_repo( + fs.set_head_and_index_for_repo( Path::new("/a/.git"), &[ - ("b.txt".into(), "before\n".to_string(), None), - ("c.txt".into(), "unchanged\n".to_string(), None), - ("d.txt".into(), "deleted\n".to_string(), None), + ("b.txt", "before\n".to_string()), + ("c.txt", "unchanged\n".to_string()), + ("d.txt", "deleted\n".to_string()), ], ); @@ -1764,9 +1761,9 @@ mod tests { ) .await; - fs.set_git_content_for_repo( + fs.set_head_and_index_for_repo( Path::new("/a/.git"), - &[("main.rs".into(), git_contents.to_owned(), None)], + &[("main.rs", git_contents.to_owned())], ); let project = Project::test(fs, [Path::new("/a")], cx).await; @@ -1816,7 +1813,7 @@ mod tests { fs.set_status_for_repo( Path::new(path!("/project/.git")), &[( - Path::new("foo"), + "foo", UnmergedStatus { first_head: UnmergedStatusCode::Updated, second_head: UnmergedStatusCode::Updated, diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index 9b573d7071b64c7470e81079e7be5a5f048fc5eb..321ec09d0d745db6a95c498db166639820688345 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -311,7 +311,7 @@ mod tests { use project::{FakeFs, Project}; use serde_json::json; use std::{num::NonZeroU32, sync::Arc, time::Duration}; - use util::path; + use util::{path, rel_path::rel_path}; use workspace::{AppState, Workspace}; #[gpui::test] @@ -356,7 +356,7 @@ mod tests { .unwrap(); let editor = workspace .update_in(cx, |workspace, window, cx| { - workspace.open_path((worktree_id, "a.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("a.rs")), None, true, window, cx) }) .await .unwrap() @@ -460,7 +460,7 @@ mod tests { .unwrap(); let editor = workspace .update_in(cx, |workspace, window, cx| { - workspace.open_path((worktree_id, "a.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("a.rs")), None, true, window, cx) }) .await .unwrap() @@ -545,7 +545,7 @@ mod tests { .unwrap(); let editor = workspace .update_in(cx, |workspace, window, cx| { - workspace.open_path((worktree_id, "a.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("a.rs")), None, true, window, cx) }) .await .unwrap() @@ -623,7 +623,7 @@ mod tests { .unwrap(); let editor = workspace .update_in(cx, |workspace, window, cx| { - workspace.open_path((worktree_id, "a.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("a.rs")), None, true, window, cx) }) .await .unwrap() diff --git a/crates/image_viewer/src/image_viewer.rs b/crates/image_viewer/src/image_viewer.rs index 2dca57424b86e2221acc271efac19cdf39a3f79f..a9ec858ea3b5e32e093bc5476a4cadc1e64b8501 100644 --- a/crates/image_viewer/src/image_viewer.rs +++ b/crates/image_viewer/src/image_viewer.rs @@ -1,8 +1,6 @@ mod image_info; mod image_viewer_settings; -use std::path::PathBuf; - use anyhow::Context as _; use editor::{EditorSettings, items::entry_git_aware_label_color}; use file_icons::FileIcons; @@ -144,7 +142,6 @@ impl Item for ImageView { .read(cx) .file .file_name(cx) - .to_string_lossy() .to_string() .into() } @@ -198,20 +195,14 @@ impl Item for ImageView { } fn breadcrumbs_text_for_image(project: &Project, image: &ImageItem, cx: &App) -> String { - let path = image.file.file_name(cx); - if project.visible_worktrees(cx).count() <= 1 { - return path.to_string_lossy().to_string(); + let mut path = image.file.path().clone(); + if project.visible_worktrees(cx).count() > 1 + && let Some(worktree) = project.worktree_for_id(image.project_path(cx).worktree_id, cx) + { + path = worktree.read(cx).root_name().join(&path); } - project - .worktree_for_id(image.project_path(cx).worktree_id, cx) - .map(|worktree| { - PathBuf::from(worktree.read(cx).root_name()) - .join(path) - .to_string_lossy() - .to_string() - }) - .unwrap_or_else(|| path.to_string_lossy().to_string()) + path.display(project.path_style(cx)).to_string() } impl SerializableItem for ImageView { @@ -242,7 +233,7 @@ impl SerializableItem for ImageView { let project_path = ProjectPath { worktree_id, - path: relative_path.into(), + path: relative_path, }; let image_item = project diff --git a/crates/inspector_ui/src/div_inspector.rs b/crates/inspector_ui/src/div_inspector.rs index fa8b76517f0125e7319f035b41996e445451510a..5f0786c885b5636363c6f5f153c7db48d7b6e432 100644 --- a/crates/inspector_ui/src/div_inspector.rs +++ b/crates/inspector_ui/src/div_inspector.rs @@ -24,6 +24,7 @@ use std::path::Path; use std::rc::Rc; use std::sync::LazyLock; use ui::{Label, LabelSize, Tooltip, prelude::*, styled_ext_reflection, v_flex}; +use util::rel_path::RelPath; use util::split_str_with_ranges; /// Path used for unsaved buffer that contains style json. To support the json language server, this @@ -466,7 +467,7 @@ impl DivInspector { let project_path = worktree.read_with(cx, |worktree, _cx| ProjectPath { worktree_id: worktree.id(), - path: Path::new("").into(), + path: RelPath::empty().into(), })?; let buffer = project diff --git a/crates/journal/src/journal.rs b/crates/journal/src/journal.rs index 52d93ba21a828b076141dd8d21a1b8f88bc20be8..9dc724f1234d79619ea1347e6747ce286aa42ca3 100644 --- a/crates/journal/src/journal.rs +++ b/crates/journal/src/journal.rs @@ -94,7 +94,7 @@ pub fn new_journal_entry(workspace: &Workspace, window: &mut Window, cx: &mut Ap break; } for directory in worktree.read(cx).directories(true, 1) { - let full_directory_path = worktree_root.join(&directory.path); + let full_directory_path = worktree_root.join(directory.path.as_std_path()); if full_directory_path.ends_with(&journal_dir_clone) { open_new_workspace = false; break 'outer; diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 8e104fc8cb8a2a1cd211cbd59a0caa4d2f79f18e..b76bb7521b403d7c8900d8ac9963f7e908aa6ea3 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -41,13 +41,12 @@ use std::{ cell::Cell, cmp::{self, Ordering, Reverse}, collections::{BTreeMap, BTreeSet}, - ffi::OsStr, future::Future, iter::{self, Iterator, Peekable}, mem, num::NonZeroU32, ops::{Deref, Range}, - path::{Path, PathBuf}, + path::PathBuf, rc, sync::{Arc, LazyLock}, time::{Duration, Instant}, @@ -65,7 +64,7 @@ pub use text::{ use theme::{ActiveTheme as _, SyntaxTheme}; #[cfg(any(test, feature = "test-support"))] use util::RandomCharIter; -use util::{RangeExt, debug_panic, maybe}; +use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath}; #[cfg(any(test, feature = "test-support"))] pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript}; @@ -349,15 +348,18 @@ pub trait File: Send + Sync + Any { fn disk_state(&self) -> DiskState; /// Returns the path of this file relative to the worktree's root directory. - fn path(&self) -> &Arc; + fn path(&self) -> &Arc; /// Returns the path of this file relative to the worktree's parent directory (this means it /// includes the name of the worktree's root folder). fn full_path(&self, cx: &App) -> PathBuf; + /// Returns the path style of this file. + fn path_style(&self, cx: &App) -> PathStyle; + /// Returns the last component of this handle's absolute path. If this handle refers to the root /// of its worktree, then this method will return the name of the worktree itself. - fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr; + fn file_name<'a>(&'a self, cx: &'a App) -> &'a str; /// Returns the id of the worktree to which this file belongs. /// @@ -4626,13 +4628,12 @@ impl BufferSnapshot { self.file.as_ref() } - /// Resolves the file path (relative to the worktree root) associated with the underlying file. pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option { if let Some(file) = self.file() { if file.path().file_name().is_none() || include_root { Some(file.full_path(cx)) } else { - Some(file.path().to_path_buf()) + Some(file.path().as_std_path().to_owned()) } } else { None @@ -5117,19 +5118,19 @@ impl IndentSize { #[cfg(any(test, feature = "test-support"))] pub struct TestFile { - pub path: Arc, + pub path: Arc, pub root_name: String, pub local_root: Option, } #[cfg(any(test, feature = "test-support"))] impl File for TestFile { - fn path(&self) -> &Arc { + fn path(&self) -> &Arc { &self.path } fn full_path(&self, _: &gpui::App) -> PathBuf { - PathBuf::from(&self.root_name).join(self.path.as_ref()) + PathBuf::from(self.root_name.clone()).join(self.path.as_std_path()) } fn as_local(&self) -> Option<&dyn LocalFile> { @@ -5144,7 +5145,7 @@ impl File for TestFile { unimplemented!() } - fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr { + fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str { self.path().file_name().unwrap_or(self.root_name.as_ref()) } @@ -5159,6 +5160,10 @@ impl File for TestFile { fn is_private(&self) -> bool { false } + + fn path_style(&self, _cx: &App) -> PathStyle { + PathStyle::local() + } } #[cfg(any(test, feature = "test-support"))] @@ -5166,7 +5171,7 @@ impl LocalFile for TestFile { fn abs_path(&self, _cx: &App) -> PathBuf { PathBuf::from(self.local_root.as_ref().unwrap()) .join(&self.root_name) - .join(self.path.as_ref()) + .join(self.path.as_std_path()) } fn load(&self, _cx: &App) -> Task> { diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index b416099e3cd6f887e16f688594c41273be5cd582..6c87ec5b5183bd5b37e9dd52d53c8fa0f8f28db1 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -24,6 +24,7 @@ use text::{BufferId, LineEnding}; use text::{Point, ToPoint}; use theme::ActiveTheme; use unindent::Unindent as _; +use util::rel_path::rel_path; use util::test::marked_text_offsets; use util::{RandomCharIter, assert_set_eq, post_inc, test::marked_text_ranges}; @@ -380,7 +381,7 @@ async fn test_language_for_file_with_custom_file_types(cx: &mut TestAppContext) fn file(path: &str) -> Arc { Arc::new(TestFile { - path: Path::new(path).into(), + path: Arc::from(rel_path(path)), root_name: "zed".into(), local_root: None, }) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 3e9f3bf1bd0cb4719f5442e1b1bd9e357ac9efca..daa39788ee18e41543931d06c9309f1020825b8c 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -70,6 +70,7 @@ pub use toolchain::{ ToolchainMetadata, ToolchainScope, }; use tree_sitter::{self, Query, QueryCursor, WasmStore, wasmtime}; +use util::rel_path::RelPath; use util::serde::default_true; pub use buffer::Operation; @@ -307,7 +308,7 @@ pub trait LspAdapterDelegate: Send + Sync { ) -> Result>; async fn which(&self, command: &OsStr) -> Option; async fn shell_env(&self) -> HashMap; - async fn read_text_file(&self, path: PathBuf) -> Result; + async fn read_text_file(&self, path: &RelPath) -> Result; async fn try_exec(&self, binary: LanguageServerBinary) -> Result<()>; } diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index 92efe122aa5a13e0d4b1c196c019d9090ce3aa22..ed186062d825553660ce83a386897e742f75aca5 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -753,7 +753,7 @@ impl LanguageRegistry { content: Option<&Rope>, user_file_types: Option<&FxHashMap, GlobSet>>, ) -> Option { - let filename = path.file_name().and_then(|name| name.to_str()); + let filename = path.file_name().and_then(|filename| filename.to_str()); // `Path.extension()` returns None for files with a leading '.' // and no other extension which is not the desired behavior here, // as we want `.zshrc` to result in extension being `Some("zshrc")` diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index bae356614b56f5017415937dbaccb83723313c9d..73904516f982ee31ada88f6bee486d5512853a11 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -390,7 +390,7 @@ impl EditPredictionSettings { file.as_local() .is_some_and(|local| glob.matcher.is_match(local.abs_path(cx))) } else { - glob.matcher.is_match(file.path()) + glob.matcher.is_match(file.path().as_std_path()) } }) } @@ -798,6 +798,7 @@ pub struct JsxTagAutoCloseSettings { mod tests { use super::*; use gpui::TestAppContext; + use util::rel_path::rel_path; #[gpui::test] fn test_edit_predictions_enabled_for_file(cx: &mut TestAppContext) { @@ -839,11 +840,11 @@ mod tests { const WORKTREE_NAME: &str = "project"; let make_test_file = |segments: &[&str]| -> Arc { - let mut path_buf = PathBuf::new(); - path_buf.extend(segments); + let path = segments.join("/"); + let path = rel_path(&path); Arc::new(TestFile { - path: path_buf.as_path().into(), + path: path.into(), root_name: WORKTREE_NAME.to_string(), local_root: Some(PathBuf::from(if cfg!(windows) { "C:\\absolute\\" @@ -896,7 +897,7 @@ mod tests { assert!(!settings.enabled_for_file(&test_file, &cx)); let test_file_root: Arc = Arc::new(TestFile { - path: PathBuf::from("file.rs").as_path().into(), + path: rel_path("file.rs").into(), root_name: WORKTREE_NAME.to_string(), local_root: Some(PathBuf::from("/absolute/")), }); @@ -928,8 +929,12 @@ mod tests { // Test tilde expansion let home = shellexpand::tilde("~").into_owned(); - let home_file = make_test_file(&[&home, "test.rs"]); - let settings = build_settings(&["~/test.rs"]); + let home_file = Arc::new(TestFile { + path: rel_path("test.rs").into(), + root_name: "the-dir".to_string(), + local_root: Some(PathBuf::from(home)), + }) as Arc; + let settings = build_settings(&["~/the-dir/test.rs"]); assert!(!settings.enabled_for_file(&home_file, &cx)); } diff --git a/crates/language/src/manifest.rs b/crates/language/src/manifest.rs index 3ca0ddf71da20f69d5d6440189d4a656bfbe7c9d..82ed164a032cb18d2d011f59938a0cd1410ba60f 100644 --- a/crates/language/src/manifest.rs +++ b/crates/language/src/manifest.rs @@ -1,7 +1,8 @@ -use std::{borrow::Borrow, path::Path, sync::Arc}; +use std::{borrow::Borrow, sync::Arc}; use gpui::SharedString; use settings::WorktreeId; +use util::rel_path::RelPath; #[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct ManifestName(SharedString); @@ -42,17 +43,17 @@ impl AsRef for ManifestName { /// For example, given a path like `foo/bar/baz`, a depth of 2 would explore `foo/bar/baz` and `foo/bar`, but not `foo`. pub struct ManifestQuery { /// Path to the file, relative to worktree root. - pub path: Arc, + pub path: Arc, pub depth: usize, pub delegate: Arc, } pub trait ManifestProvider { fn name(&self) -> ManifestName; - fn search(&self, query: ManifestQuery) -> Option>; + fn search(&self, query: ManifestQuery) -> Option>; } pub trait ManifestDelegate: Send + Sync { fn worktree_id(&self) -> WorktreeId; - fn exists(&self, path: &Path, is_dir: Option) -> bool; + fn exists(&self, path: &RelPath, is_dir: Option) -> bool; } diff --git a/crates/language/src/toolchain.rs b/crates/language/src/toolchain.rs index 2cc86881fbd515317d4d6f5949e82eb3da63a1bb..d3466307f368e7008eedbc8881aa78ab854bc08b 100644 --- a/crates/language/src/toolchain.rs +++ b/crates/language/src/toolchain.rs @@ -4,10 +4,7 @@ //! which is a set of tools used to interact with the projects written in said language. //! For example, a Python project can have an associated virtual environment; a Rust project can have a toolchain override. -use std::{ - path::{Path, PathBuf}, - sync::Arc, -}; +use std::{path::PathBuf, sync::Arc}; use async_trait::async_trait; use collections::HashMap; @@ -15,6 +12,7 @@ use fs::Fs; use gpui::{AsyncApp, SharedString}; use settings::WorktreeId; use task::ShellKind; +use util::rel_path::RelPath; use crate::{LanguageName, ManifestName}; @@ -23,6 +21,7 @@ use crate::{LanguageName, ManifestName}; pub struct Toolchain { /// User-facing label pub name: SharedString, + /// Absolute path pub path: SharedString, pub language_name: LanguageName, /// Full toolchain data (including language-specific details) @@ -37,7 +36,7 @@ pub struct Toolchain { /// - Only in the subproject they're currently in. #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)] pub enum ToolchainScope { - Subproject(WorktreeId, Arc), + Subproject(WorktreeId, Arc), Project, /// Available in all projects on this box. It wouldn't make sense to show suggestions across machines. Global, @@ -97,7 +96,7 @@ pub trait ToolchainLister: Send + Sync + 'static { async fn list( &self, worktree_root: PathBuf, - subroot_relative_path: Arc, + subroot_relative_path: Arc, project_env: Option>, ) -> ToolchainList; @@ -134,7 +133,7 @@ pub trait LanguageToolchainStore: Send + Sync + 'static { async fn active_toolchain( self: Arc, worktree_id: WorktreeId, - relative_path: Arc, + relative_path: Arc, language_name: LanguageName, cx: &mut AsyncApp, ) -> Option; @@ -144,7 +143,7 @@ pub trait LocalLanguageToolchainStore: Send + Sync + 'static { fn active_toolchain( self: Arc, worktree_id: WorktreeId, - relative_path: &Arc, + relative_path: &Arc, language_name: LanguageName, cx: &mut AsyncApp, ) -> Option; @@ -155,7 +154,7 @@ impl LanguageToolchainStore for T { async fn active_toolchain( self: Arc, worktree_id: WorktreeId, - relative_path: Arc, + relative_path: Arc, language_name: LanguageName, cx: &mut AsyncApp, ) -> Option { diff --git a/crates/language_extension/src/extension_lsp_adapter.rs b/crates/language_extension/src/extension_lsp_adapter.rs index dccd33ffb6b90381d7d86c38e80ec95effd4daf7..002b6a15b40575d71e7828ed9fa55dd4c5d1b30a 100644 --- a/crates/language_extension/src/extension_lsp_adapter.rs +++ b/crates/language_extension/src/extension_lsp_adapter.rs @@ -19,7 +19,7 @@ use lsp::{ }; use serde::Serialize; use serde_json::Value; -use util::{ResultExt, fs::make_file_executable, maybe}; +use util::{ResultExt, fs::make_file_executable, maybe, rel_path::RelPath}; use crate::{LanguageServerRegistryProxy, LspAccess}; @@ -36,7 +36,7 @@ impl WorktreeDelegate for WorktreeDelegateAdapter { self.0.worktree_root_path().to_string_lossy().to_string() } - async fn read_text_file(&self, path: PathBuf) -> Result { + async fn read_text_file(&self, path: &RelPath) -> Result { self.0.read_text_file(path).await } diff --git a/crates/language_tools/src/lsp_button.rs b/crates/language_tools/src/lsp_button.rs index b750ad1621a4711ccbf827d6054ce50d168d6b29..8bf40ccc1a7f05ffd3c8ff971638d84ae4b7150c 100644 --- a/crates/language_tools/src/lsp_button.rs +++ b/crates/language_tools/src/lsp_button.rs @@ -21,6 +21,7 @@ use ui::{ DocumentationSide, Indicator, PopoverMenu, PopoverMenuHandle, Tooltip, Window, prelude::*, }; +use util::{ResultExt, rel_path::RelPath}; use workspace::{StatusItemView, Workspace}; use crate::lsp_log_view; @@ -148,6 +149,7 @@ impl LanguageServerState { return; }; let project = workspace.read(cx).project().clone(); + let path_style = project.read(cx).path_style(cx); let buffer_store = project.read(cx).buffer_store().clone(); let buffers = state .read(cx) @@ -159,6 +161,9 @@ impl LanguageServerState { servers.worktree.as_ref()?.upgrade()?.read(cx); let relative_path = abs_path.strip_prefix(&worktree.abs_path()).ok()?; + let relative_path = + RelPath::from_std_path(relative_path, path_style) + .log_err()?; let entry = worktree.entry_for_path(&relative_path)?; let project_path = project.read(cx).path_for_entry(entry.id, cx)?; @@ -767,7 +772,7 @@ impl LspButton { }); servers_with_health_checks.insert(&health.name); let worktree_name = - worktree.map(|worktree| SharedString::new(worktree.read(cx).root_name())); + worktree.map(|worktree| SharedString::new(worktree.read(cx).root_name_str())); let binary_status = state.language_servers.binary_statuses.get(&health.name); let server_data = ServerData::WithHealthCheck { @@ -826,7 +831,7 @@ impl LspButton { { Some((worktree, server_id)) => { let worktree_name = - SharedString::new(worktree.read(cx).root_name()); + SharedString::new(worktree.read(cx).root_name_str()); servers_per_worktree .entry(worktree_name.clone()) .or_default() diff --git a/crates/language_tools/src/lsp_log_view.rs b/crates/language_tools/src/lsp_log_view.rs index fb63ab9a99147328c4987bd80b698ef4a477f013..5c6b4faa14723107644fb22195f12952bba8ccb7 100644 --- a/crates/language_tools/src/lsp_log_view.rs +++ b/crates/language_tools/src/lsp_log_view.rs @@ -376,7 +376,7 @@ impl LspLogView { let worktree_root_name = state .worktree_id .and_then(|id| self.project.read(cx).worktree_for_id(id, cx)) - .map(|worktree| worktree.read(cx).root_name().to_string()) + .map(|worktree| worktree.read(cx).root_name_str().to_string()) .unwrap_or_else(|| "Unknown worktree".to_string()); LogMenuItem { diff --git a/crates/language_tools/src/lsp_log_view_tests.rs b/crates/language_tools/src/lsp_log_view_tests.rs index d572c4375ed09997dc57d6c58e6c90f3e55775b6..2ef915fdc386b69f1af604bb22abad58abb91d3a 100644 --- a/crates/language_tools/src/lsp_log_view_tests.rs +++ b/crates/language_tools/src/lsp_log_view_tests.rs @@ -91,7 +91,7 @@ async fn test_lsp_log_view(cx: &mut TestAppContext) { .next() .unwrap() .read(cx) - .root_name() + .root_name_str() .to_string(), rpc_trace_enabled: false, selected_entry: LogKind::Logs, diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index 482c709d453c520486990409a37c27a50132dd77..512653eab7dca97fd1c9ce3fbd790466ca62abd4 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -30,7 +30,10 @@ use std::{ }; use task::{AdapterSchemas, TaskTemplate, TaskTemplates, VariableName}; use theme::ThemeRegistry; -use util::{ResultExt, archive::extract_zip, fs::remove_matching, maybe, merge_json_value_into}; +use util::{ + ResultExt, archive::extract_zip, fs::remove_matching, maybe, merge_json_value_into, + rel_path::RelPath, +}; use crate::PackageJsonData; @@ -52,8 +55,8 @@ impl ContextProvider for JsonTaskProvider { let Some(file) = project::File::from_dyn(file.as_ref()).cloned() else { return Task::ready(None); }; - let is_package_json = file.path.ends_with("package.json"); - let is_composer_json = file.path.ends_with("composer.json"); + let is_package_json = file.path.ends_with(RelPath::new("package.json").unwrap()); + let is_composer_json = file.path.ends_with(RelPath::new("composer.json").unwrap()); if !is_package_json && !is_composer_json { return Task::ready(None); } diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index a8824d3776b08bdfdb99d216c8ab75e88e714c6c..8b6abc9c7205a769d0630a3792e37589c0e844c5 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -24,6 +24,7 @@ use smol::lock::OnceCell; use std::cmp::Ordering; use std::env::consts; use util::fs::{make_file_executable, remove_matching}; +use util::rel_path::RelPath; use parking_lot::Mutex; use std::str::FromStr; @@ -52,9 +53,9 @@ impl ManifestProvider for PyprojectTomlManifestProvider { depth, delegate, }: ManifestQuery, - ) -> Option> { + ) -> Option> { for path in path.ancestors().take(depth) { - let p = path.join("pyproject.toml"); + let p = path.join(RelPath::new("pyproject.toml").unwrap()); if delegate.exists(&p, Some(false)) { return Some(path.into()); } @@ -679,7 +680,7 @@ impl ContextProvider for PythonContextProvider { .as_ref() .and_then(|f| f.path().parent()) .map(Arc::from) - .unwrap_or_else(|| Arc::from("".as_ref())); + .unwrap_or_else(|| RelPath::empty().into()); toolchains .active_toolchain(worktree_id, file_path, "Python".into(), cx) @@ -1012,7 +1013,7 @@ impl ToolchainLister for PythonToolchainProvider { async fn list( &self, worktree_root: PathBuf, - subroot_relative_path: Arc, + subroot_relative_path: Arc, project_env: Option>, ) -> ToolchainList { let env = project_env.unwrap_or_default(); @@ -1024,7 +1025,6 @@ impl ToolchainLister for PythonToolchainProvider { ); let mut config = Configuration::default(); - debug_assert!(subroot_relative_path.is_relative()); // `.ancestors()` will yield at least one path, so in case of empty `subroot_relative_path`, we'll just use // worktree root as the workspace directory. config.workspace_directories = Some( diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index f5401f448c324f11c2de3a6379c704df5441b3db..b8f5c78dcef829cf97dcdb5f1698e3fa429c25fa 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -23,6 +23,7 @@ use std::{ use task::{TaskTemplate, TaskTemplates, TaskVariables, VariableName}; use util::fs::{make_file_executable, remove_matching}; use util::merge_json_value_into; +use util::rel_path::RelPath; use util::{ResultExt, maybe}; use crate::github_download::{GithubBinaryMetadata, download_server_binary}; @@ -88,10 +89,10 @@ impl ManifestProvider for CargoManifestProvider { depth, delegate, }: ManifestQuery, - ) -> Option> { + ) -> Option> { let mut outermost_cargo_toml = None; for path in path.ancestors().take(depth) { - let p = path.join("Cargo.toml"); + let p = path.join(RelPath::new("Cargo.toml").unwrap()); if delegate.exists(&p, Some(false)) { outermost_cargo_toml = Some(Arc::from(path)); } diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index edfddd3f76e374e9ae0d1ce71cfb0b7b3c586c4d..6e3661fa6f2807689a0e12580813ed7c7e56c0de 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -22,8 +22,8 @@ use std::{ sync::{Arc, LazyLock}, }; use task::{TaskTemplate, TaskTemplates, VariableName}; -use util::merge_json_value_into; use util::{ResultExt, fs::remove_matching, maybe}; +use util::{merge_json_value_into, rel_path::RelPath}; use crate::{PackageJson, PackageJsonData, github_download::download_server_binary}; @@ -264,7 +264,7 @@ impl TypeScriptContextProvider { &self, fs: Arc, worktree_root: &Path, - file_relative_path: &Path, + file_relative_path: &RelPath, cx: &App, ) -> Task> { let new_json_data = file_relative_path @@ -533,7 +533,7 @@ impl TypeScriptLspAdapter { } async fn tsdk_path(&self, adapter: &Arc) -> Option<&'static str> { let is_yarn = adapter - .read_text_file(PathBuf::from(".yarn/sdks/typescript/lib/typescript.js")) + .read_text_file(RelPath::new(".yarn/sdks/typescript/lib/typescript.js").unwrap()) .await .is_ok(); @@ -1014,7 +1014,7 @@ mod tests { use serde_json::json; use task::TaskTemplates; use unindent::Unindent; - use util::path; + use util::{path, rel_path::rel_path}; use crate::typescript::{ PackageJsonData, TypeScriptContextProvider, replace_test_name_parameters, @@ -1164,7 +1164,7 @@ mod tests { provider.combined_package_json_data( fs.clone(), path!("/root").as_ref(), - "sub/file1.js".as_ref(), + rel_path("sub/file1.js"), cx, ) }) diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index 763a0d5a5ed961d916e3deea44963b0aa9340cb9..053d15775b2f3ea6328b8dca269ed8144b0628b3 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -12,7 +12,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use util::{ResultExt, maybe, merge_json_value_into}; +use util::{ResultExt, maybe, merge_json_value_into, rel_path::RelPath}; fn typescript_server_binary_arguments(server_path: &Path) -> Vec { vec![server_path.into(), "--stdio".into()] @@ -36,7 +36,7 @@ impl VtslsLspAdapter { async fn tsdk_path(&self, adapter: &Arc) -> Option<&'static str> { let is_yarn = adapter - .read_text_file(PathBuf::from(".yarn/sdks/typescript/lib/typescript.js")) + .read_text_file(RelPath::new(".yarn/sdks/typescript/lib/typescript.js").unwrap()) .await .is_ok(); diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index c629756324cbded19485e7ba9d420db3fd4bd093..45faa142369e6c08817deebfbf8774f228bf70d5 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -16,7 +16,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use util::{ResultExt, maybe, merge_json_value_into}; +use util::{ResultExt, maybe, merge_json_value_into, rel_path::RelPath}; const SERVER_PATH: &str = "node_modules/yaml-language-server/bin/yaml-language-server"; @@ -141,7 +141,7 @@ impl LspAdapter for YamlLspAdapter { ) -> Result { let location = SettingsLocation { worktree_id: delegate.worktree_id(), - path: delegate.worktree_root_path(), + path: RelPath::empty(), }; let tab_size = cx.update(|cx| { diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index fdf0f2bbf20190d15b533d02b9f0122746439c89..61846717237df76c5fcb0e91c2aad8e91cd683f9 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -334,7 +334,10 @@ impl Markdown { } for path in paths { - if let Ok(language) = registry.language_for_file_path(&path).await { + if let Ok(language) = registry + .language_for_file_path(Path::new(path.as_ref())) + .await + { languages_by_path.insert(path, language); } } @@ -434,7 +437,7 @@ pub struct ParsedMarkdown { pub source: SharedString, pub events: Arc<[(Range, MarkdownEvent)]>, pub languages_by_name: TreeMap>, - pub languages_by_path: TreeMap, Arc>, + pub languages_by_path: TreeMap, Arc>, } impl ParsedMarkdown { diff --git a/crates/markdown/src/parser.rs b/crates/markdown/src/parser.rs index 1b4d5b5755c0b825124f37f68466bae7c0838b1a..62b210f9e33a90a44790c521591ba6f94e8baaef 100644 --- a/crates/markdown/src/parser.rs +++ b/crates/markdown/src/parser.rs @@ -4,7 +4,7 @@ pub use pulldown_cmark::TagEnd as MarkdownTagEnd; use pulldown_cmark::{ Alignment, CowStr, HeadingLevel, LinkType, MetadataBlockKind, Options, Parser, }; -use std::{ops::Range, path::Path, sync::Arc}; +use std::{ops::Range, sync::Arc}; use collections::HashSet; @@ -25,7 +25,7 @@ pub fn parse_markdown( ) -> ( Vec<(Range, MarkdownEvent)>, HashSet, - HashSet>, + HashSet>, ) { let mut events = Vec::new(); let mut language_names = HashSet::default(); diff --git a/crates/markdown/src/path_range.rs b/crates/markdown/src/path_range.rs index 19cfda0d9dfb30c550852f64edcad29e3d1e1de9..f98325c9b5aa45420d4e1990d55888675d160d5f 100644 --- a/crates/markdown/src/path_range.rs +++ b/crates/markdown/src/path_range.rs @@ -1,8 +1,8 @@ -use std::{ops::Range, path::Path, sync::Arc}; +use std::{ops::Range, sync::Arc}; #[derive(Debug, Clone, PartialEq)] pub struct PathWithRange { - pub path: Arc, + pub path: Arc, pub range: Option>, } @@ -78,12 +78,12 @@ impl PathWithRange { }; Self { - path: Path::new(path).into(), + path: path.into(), range, } } None => Self { - path: Path::new(str).into(), + path: str.into(), range: None, }, } @@ -123,7 +123,7 @@ mod tests { #[test] fn test_pathrange_parsing() { let path_range = PathWithRange::new("file.rs#L10-L20"); - assert_eq!(path_range.path.as_ref(), Path::new("file.rs")); + assert_eq!(path_range.path.as_ref(), "file.rs"); assert!(path_range.range.is_some()); if let Some(range) = path_range.range { assert_eq!(range.start.line, 10); @@ -133,7 +133,7 @@ mod tests { } let single_line = PathWithRange::new("file.rs#L15"); - assert_eq!(single_line.path.as_ref(), Path::new("file.rs")); + assert_eq!(single_line.path.as_ref(), "file.rs"); assert!(single_line.range.is_some()); if let Some(range) = single_line.range { assert_eq!(range.start.line, 15); @@ -141,11 +141,11 @@ mod tests { } let no_range = PathWithRange::new("file.rs"); - assert_eq!(no_range.path.as_ref(), Path::new("file.rs")); + assert_eq!(no_range.path.as_ref(), "file.rs"); assert!(no_range.range.is_none()); let lowercase = PathWithRange::new("file.rs#l5-l10"); - assert_eq!(lowercase.path.as_ref(), Path::new("file.rs")); + assert_eq!(lowercase.path.as_ref(), "file.rs"); assert!(lowercase.range.is_some()); if let Some(range) = lowercase.range { assert_eq!(range.start.line, 5); @@ -153,7 +153,7 @@ mod tests { } let complex = PathWithRange::new("src/path/to/file.rs#L100"); - assert_eq!(complex.path.as_ref(), Path::new("src/path/to/file.rs")); + assert_eq!(complex.path.as_ref(), "src/path/to/file.rs"); assert!(complex.range.is_some()); } @@ -161,7 +161,7 @@ mod tests { fn test_pathrange_from_str() { let with_range = PathWithRange::new("file.rs#L10-L20"); assert!(with_range.range.is_some()); - assert_eq!(with_range.path.as_ref(), Path::new("file.rs")); + assert_eq!(with_range.path.as_ref(), "file.rs"); let without_range = PathWithRange::new("file.rs"); assert!(without_range.range.is_none()); @@ -173,18 +173,18 @@ mod tests { #[test] fn test_pathrange_leading_text_trimming() { let with_language = PathWithRange::new("```rust file.rs#L10"); - assert_eq!(with_language.path.as_ref(), Path::new("file.rs")); + assert_eq!(with_language.path.as_ref(), "file.rs"); assert!(with_language.range.is_some()); if let Some(range) = with_language.range { assert_eq!(range.start.line, 10); } let with_spaces = PathWithRange::new("``` file.rs#L10-L20"); - assert_eq!(with_spaces.path.as_ref(), Path::new("file.rs")); + assert_eq!(with_spaces.path.as_ref(), "file.rs"); assert!(with_spaces.range.is_some()); let with_words = PathWithRange::new("```rust code example file.rs#L15:10"); - assert_eq!(with_words.path.as_ref(), Path::new("file.rs")); + assert_eq!(with_words.path.as_ref(), "file.rs"); assert!(with_words.range.is_some()); if let Some(range) = with_words.range { assert_eq!(range.start.line, 15); @@ -192,18 +192,18 @@ mod tests { } let with_whitespace = PathWithRange::new(" file.rs#L5"); - assert_eq!(with_whitespace.path.as_ref(), Path::new("file.rs")); + assert_eq!(with_whitespace.path.as_ref(), "file.rs"); assert!(with_whitespace.range.is_some()); let no_leading = PathWithRange::new("file.rs#L10"); - assert_eq!(no_leading.path.as_ref(), Path::new("file.rs")); + assert_eq!(no_leading.path.as_ref(), "file.rs"); assert!(no_leading.range.is_some()); } #[test] fn test_pathrange_with_line_and_column() { let line_and_col = PathWithRange::new("file.rs#L10:5"); - assert_eq!(line_and_col.path.as_ref(), Path::new("file.rs")); + assert_eq!(line_and_col.path.as_ref(), "file.rs"); assert!(line_and_col.range.is_some()); if let Some(range) = line_and_col.range { assert_eq!(range.start.line, 10); @@ -213,7 +213,7 @@ mod tests { } let full_range = PathWithRange::new("file.rs#L10:5-L20:15"); - assert_eq!(full_range.path.as_ref(), Path::new("file.rs")); + assert_eq!(full_range.path.as_ref(), "file.rs"); assert!(full_range.range.is_some()); if let Some(range) = full_range.range { assert_eq!(range.start.line, 10); @@ -223,7 +223,7 @@ mod tests { } let mixed_range1 = PathWithRange::new("file.rs#L10:5-L20"); - assert_eq!(mixed_range1.path.as_ref(), Path::new("file.rs")); + assert_eq!(mixed_range1.path.as_ref(), "file.rs"); assert!(mixed_range1.range.is_some()); if let Some(range) = mixed_range1.range { assert_eq!(range.start.line, 10); @@ -233,7 +233,7 @@ mod tests { } let mixed_range2 = PathWithRange::new("file.rs#L10-L20:15"); - assert_eq!(mixed_range2.path.as_ref(), Path::new("file.rs")); + assert_eq!(mixed_range2.path.as_ref(), "file.rs"); assert!(mixed_range2.range.is_some()); if let Some(range) = mixed_range2.range { assert_eq!(range.start.line, 10); diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index cec1297e6fd52a8f2be273fb1b375b6610190416..15d6132d972b787f4d12ead244586a52b2ef96b2 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -37,7 +37,6 @@ use std::{ iter::{self, FromIterator}, mem, ops::{Range, RangeBounds, Sub}, - path::{Path, PathBuf}, rc::Rc, str, sync::Arc, @@ -169,23 +168,23 @@ impl MultiBufferDiffHunk { #[derive(PartialEq, Eq, Ord, PartialOrd, Clone, Hash, Debug)] pub struct PathKey { namespace: u32, - path: Arc, + path: Arc, } impl PathKey { - pub fn namespaced(namespace: u32, path: Arc) -> Self { + pub fn namespaced(namespace: u32, path: Arc) -> Self { Self { namespace, path } } pub fn for_buffer(buffer: &Entity, cx: &App) -> Self { if let Some(file) = buffer.read(cx).file() { - Self::namespaced(1, Arc::from(file.full_path(cx))) + Self::namespaced(1, file.full_path(cx).to_string_lossy().to_string().into()) } else { - Self::namespaced(0, Arc::from(PathBuf::from(buffer.entity_id().to_string()))) + Self::namespaced(0, buffer.entity_id().to_string().into()) } } - pub fn path(&self) -> &Arc { + pub fn path(&self) -> &Arc { &self.path } } @@ -2603,7 +2602,7 @@ impl MultiBuffer { let buffer = buffer.read(cx); if let Some(file) = buffer.file() { - return file.file_name(cx).to_string_lossy(); + return file.file_name(cx).into(); } if let Some(title) = self.buffer_content_title(buffer) { diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index 1be82500786b36fc014c2acf4fb49d4e8abc4d6b..a63fcbd60b912e1e6aba885dcacc882efd405117 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -1524,7 +1524,7 @@ fn test_set_excerpts_for_buffer_ordering(cx: &mut TestAppContext) { cx, ) }); - let path1: PathKey = PathKey::namespaced(0, Path::new("/").into()); + let path1: PathKey = PathKey::namespaced(0, "/".into()); let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); multibuffer.update(cx, |multibuffer, cx| { @@ -1619,7 +1619,7 @@ fn test_set_excerpts_for_buffer(cx: &mut TestAppContext) { cx, ) }); - let path1: PathKey = PathKey::namespaced(0, Path::new("/").into()); + let path1: PathKey = PathKey::namespaced(0, "/".into()); let buf2 = cx.new(|cx| { Buffer::local( indoc! { @@ -1638,7 +1638,7 @@ fn test_set_excerpts_for_buffer(cx: &mut TestAppContext) { cx, ) }); - let path2 = PathKey::namespaced(1, Path::new("/").into()); + let path2 = PathKey::namespaced(1, "/".into()); let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); multibuffer.update(cx, |multibuffer, cx| { @@ -1815,7 +1815,7 @@ fn test_set_excerpts_for_buffer_rename(cx: &mut TestAppContext) { cx, ) }); - let path: PathKey = PathKey::namespaced(0, Path::new("/").into()); + let path: PathKey = PathKey::namespaced(0, "/".into()); let buf2 = cx.new(|cx| { Buffer::local( indoc! { diff --git a/crates/outline/src/outline.rs b/crates/outline/src/outline.rs index 1f85d08cee850f18a30cca9b56061854f7cbc7b1..1d68240f6bcb8d3de042f6906793d0fece705003 100644 --- a/crates/outline/src/outline.rs +++ b/crates/outline/src/outline.rs @@ -389,7 +389,7 @@ mod tests { use language::{Language, LanguageConfig, LanguageMatcher}; use project::{FakeFs, Project}; use serde_json::json; - use util::path; + use util::{path, rel_path::rel_path}; use workspace::{AppState, Workspace}; #[gpui::test] @@ -430,7 +430,7 @@ mod tests { .unwrap(); let editor = workspace .update_in(cx, |workspace, window, cx| { - workspace.open_path((worktree_id, "a.rs"), None, true, window, cx) + workspace.open_path((worktree_id, rel_path("a.rs")), None, true, window, cx) }) .await .unwrap() diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 863725ad851d5b2e2e44895d97d59fdb2f062f1a..4ba636921844404590e06ea7c5402fffd9de41f7 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -29,7 +29,7 @@ use std::{ collections::BTreeMap, hash::Hash, ops::Range, - path::{MAIN_SEPARATOR_STR, Path, PathBuf}, + path::{Path, PathBuf}, sync::{ Arc, OnceLock, atomic::{self, AtomicBool}, @@ -51,7 +51,7 @@ use ui::{ IndentGuideLayout, Label, LabelCommon, ListItem, ScrollAxes, Scrollbars, StyledExt, StyledTypography, Toggleable, Tooltip, WithScrollbar, h_flex, v_flex, }; -use util::{RangeExt, ResultExt, TryFutureExt, debug_panic}; +use util::{RangeExt, ResultExt, TryFutureExt, debug_panic, rel_path::RelPath}; use workspace::{ OpenInTerminal, WeakItemHandle, Workspace, dock::{DockPosition, Panel, PanelEvent}, @@ -107,7 +107,7 @@ pub struct OutlinePanel { pending_serialization: Task>, fs_entries_depth: HashMap<(WorktreeId, ProjectEntryId), usize>, fs_entries: Vec, - fs_children_count: HashMap, FsChildren>>, + fs_children_count: HashMap, FsChildren>>, collapsed_entries: HashSet, unfolded_dirs: HashMap>, selected_entry: SelectedEntry, @@ -1905,6 +1905,7 @@ impl OutlinePanel { _: &mut Window, cx: &mut Context, ) { + let path_style = self.project.read(cx).path_style(cx); if let Some(clipboard_text) = self .selected_entry() .and_then(|entry| match entry { @@ -1914,7 +1915,7 @@ impl OutlinePanel { } PanelEntry::Search(_) | PanelEntry::Outline(..) => None, }) - .map(|p| p.to_string_lossy().to_string()) + .map(|p| p.display(path_style).to_string()) { cx.write_to_clipboard(ClipboardItem::new_string(clipboard_text)); } @@ -2272,7 +2273,7 @@ impl OutlinePanel { let color = entry_git_aware_label_color(entry.git_summary, entry.is_ignored, is_active); let icon = if settings.file_icons { - FileIcons::get_icon(&entry.path, cx) + FileIcons::get_icon(entry.path.as_std_path(), cx) .map(|icon_path| Icon::from_path(icon_path).color(color).into_any_element()) } else { None @@ -2303,7 +2304,7 @@ impl OutlinePanel { is_active, ); let icon = if settings.folder_icons { - FileIcons::get_folder_icon(is_expanded, &directory.entry.path, cx) + FileIcons::get_folder_icon(is_expanded, directory.entry.path.as_std_path(), cx) } else { FileIcons::get_chevron_icon(is_expanded, cx) } @@ -2329,13 +2330,13 @@ impl OutlinePanel { Some(file) => { let path = file.path(); let icon = if settings.file_icons { - FileIcons::get_icon(path.as_ref(), cx) + FileIcons::get_icon(path.as_std_path(), cx) } else { None } .map(Icon::from_path) .map(|icon| icon.color(color).into_any_element()); - (icon, file_name(path.as_ref())) + (icon, file_name(path.as_std_path())) } None => (None, "Untitled".to_string()), }, @@ -2615,19 +2616,17 @@ impl OutlinePanel { if root_entry.id == entry.id { file_name(worktree.abs_path().as_ref()) } else { - let path = worktree.absolutize(entry.path.as_ref()).ok(); - let path = path.as_deref().unwrap_or_else(|| entry.path.as_ref()); - file_name(path) + let path = worktree.absolutize(entry.path.as_ref()); + file_name(&path) } } None => { - let path = worktree.absolutize(entry.path.as_ref()).ok(); - let path = path.as_deref().unwrap_or_else(|| entry.path.as_ref()); - file_name(path) + let path = worktree.absolutize(entry.path.as_ref()); + file_name(&path) } } } - None => file_name(entry.path.as_ref()), + None => file_name(entry.path.as_std_path()), } } @@ -2842,7 +2841,7 @@ impl OutlinePanel { } let mut new_children_count = - HashMap::, FsChildren>>::default(); + HashMap::, FsChildren>>::default(); let worktree_entries = new_worktree_entries .into_iter() @@ -3518,17 +3517,17 @@ impl OutlinePanel { .buffer_snapshot_for_id(*buffer_id, cx) .and_then(|buffer_snapshot| { let file = File::from_dyn(buffer_snapshot.file())?; - file.worktree.read(cx).absolutize(&file.path).ok() + Some(file.worktree.read(cx).absolutize(&file.path)) }), PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { worktree_id, entry, .. - })) => self - .project - .read(cx) - .worktree_for_id(*worktree_id, cx)? - .read(cx) - .absolutize(&entry.path) - .ok(), + })) => Some( + self.project + .read(cx) + .worktree_for_id(*worktree_id, cx)? + .read(cx) + .absolutize(&entry.path), + ), PanelEntry::FoldedDirs(FoldedDirsEntry { worktree_id, entries: dirs, @@ -3537,13 +3536,13 @@ impl OutlinePanel { self.project .read(cx) .worktree_for_id(*worktree_id, cx) - .and_then(|worktree| worktree.read(cx).absolutize(&entry.path).ok()) + .map(|worktree| worktree.read(cx).absolutize(&entry.path)) }), PanelEntry::Search(_) | PanelEntry::Outline(..) => None, } } - fn relative_path(&self, entry: &FsEntry, cx: &App) -> Option> { + fn relative_path(&self, entry: &FsEntry, cx: &App) -> Option> { match entry { FsEntry::ExternalFile(FsEntryExternalFile { buffer_id, .. }) => { let buffer_snapshot = self.buffer_snapshot_for_id(*buffer_id, cx)?; @@ -3627,7 +3626,7 @@ impl OutlinePanel { #[derive(Debug)] struct ParentStats { - path: Arc, + path: Arc, folded: bool, expanded: bool, depth: usize, @@ -4023,8 +4022,9 @@ impl OutlinePanel { let id = state.entries.len(); match &entry { PanelEntry::Fs(fs_entry) => { - if let Some(file_name) = - self.relative_path(fs_entry, cx).as_deref().map(file_name) + if let Some(file_name) = self + .relative_path(fs_entry, cx) + .and_then(|path| Some(path.file_name()?.to_string())) { state .match_candidates @@ -4477,21 +4477,19 @@ impl OutlinePanel { let item_text_chars = match entry { PanelEntry::Fs(FsEntry::ExternalFile(external)) => self .buffer_snapshot_for_id(external.buffer_id, cx) - .and_then(|snapshot| { - Some(snapshot.file()?.path().file_name()?.to_string_lossy().len()) - }) + .and_then(|snapshot| Some(snapshot.file()?.path().file_name()?.len())) .unwrap_or_default(), PanelEntry::Fs(FsEntry::Directory(directory)) => directory .entry .path .file_name() - .map(|name| name.to_string_lossy().len()) + .map(|name| name.len()) .unwrap_or_default(), PanelEntry::Fs(FsEntry::File(file)) => file .entry .path .file_name() - .map(|name| name.to_string_lossy().len()) + .map(|name| name.len()) .unwrap_or_default(), PanelEntry::FoldedDirs(folded_dirs) => { folded_dirs @@ -4500,11 +4498,11 @@ impl OutlinePanel { .map(|dir| { dir.path .file_name() - .map(|name| name.to_string_lossy().len()) + .map(|name| name.len()) .unwrap_or_default() }) .sum::() - + folded_dirs.entries.len().saturating_sub(1) * MAIN_SEPARATOR_STR.len() + + folded_dirs.entries.len().saturating_sub(1) * "/".len() } PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => self .excerpt_label(excerpt.buffer_id, &excerpt.range, cx) @@ -4799,7 +4797,7 @@ fn workspace_active_editor( } fn back_to_common_visited_parent( - visited_dirs: &mut Vec<(ProjectEntryId, Arc)>, + visited_dirs: &mut Vec<(ProjectEntryId, Arc)>, worktree_id: &WorktreeId, new_entry: &Entry, ) -> Option<(WorktreeId, ProjectEntryId)> { @@ -5281,16 +5279,15 @@ mod tests { }); }); - let all_matches = format!( - r#"{root}/ + let all_matches = r#"rust-analyzer/ crates/ ide/src/ inlay_hints/ fn_lifetime_fn.rs - search: match config.param_names_for_lifetime_elision_hints {{ - search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints {{ - search: Some(it) if config.param_names_for_lifetime_elision_hints => {{ - search: InlayHintsConfig {{ param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG }}, + search: match config.param_names_for_lifetime_elision_hints { + search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints { + search: Some(it) if config.param_names_for_lifetime_elision_hints => { + search: InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG }, inlay_hints.rs search: pub param_names_for_lifetime_elision_hints: bool, search: param_names_for_lifetime_elision_hints: self @@ -5302,7 +5299,7 @@ mod tests { search: param_names_for_lifetime_elision_hints: true, config.rs search: param_names_for_lifetime_elision_hints: self"# - ); + .to_string(); let select_first_in_all_matches = |line_to_select: &str| { assert!(all_matches.contains(line_to_select)); @@ -5360,7 +5357,7 @@ mod tests { cx, ), format!( - r#"{root}/ + r#"rust-analyzer/ crates/ ide/src/ inlay_hints/ @@ -5430,7 +5427,7 @@ mod tests { cx, ), format!( - r#"{root}/ + r#"rust-analyzer/ crates/ ide/src/{SELECTED_MARKER} rust-analyzer/src/ @@ -5513,16 +5510,15 @@ mod tests { ); }); }); - let all_matches = format!( - r#"{root}/ + let all_matches = r#"rust-analyzer/ crates/ ide/src/ inlay_hints/ fn_lifetime_fn.rs - search: match config.param_names_for_lifetime_elision_hints {{ - search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints {{ - search: Some(it) if config.param_names_for_lifetime_elision_hints => {{ - search: InlayHintsConfig {{ param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG }}, + search: match config.param_names_for_lifetime_elision_hints { + search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints { + search: Some(it) if config.param_names_for_lifetime_elision_hints => { + search: InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG }, inlay_hints.rs search: pub param_names_for_lifetime_elision_hints: bool, search: param_names_for_lifetime_elision_hints: self @@ -5534,7 +5530,7 @@ mod tests { search: param_names_for_lifetime_elision_hints: true, config.rs search: param_names_for_lifetime_elision_hints: self"# - ); + .to_string(); cx.executor() .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); @@ -5653,16 +5649,15 @@ mod tests { ); }); }); - let all_matches = format!( - r#"{root}/ + let all_matches = r#"rust-analyzer/ crates/ ide/src/ inlay_hints/ fn_lifetime_fn.rs - search: match config.param_names_for_lifetime_elision_hints {{ - search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints {{ - search: Some(it) if config.param_names_for_lifetime_elision_hints => {{ - search: InlayHintsConfig {{ param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG }}, + search: match config.param_names_for_lifetime_elision_hints { + search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints { + search: Some(it) if config.param_names_for_lifetime_elision_hints => { + search: InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG }, inlay_hints.rs search: pub param_names_for_lifetime_elision_hints: bool, search: param_names_for_lifetime_elision_hints: self @@ -5674,7 +5669,7 @@ mod tests { search: param_names_for_lifetime_elision_hints: true, config.rs search: param_names_for_lifetime_elision_hints: self"# - ); + .to_string(); let select_first_in_all_matches = |line_to_select: &str| { assert!(all_matches.contains(line_to_select)); all_matches.replacen( @@ -5904,15 +5899,13 @@ mod tests { cx, ), format!( - r#"{}/ + r#"one/ a.txt search: aaa aaa <==== selected search: aaa aaa -{}/ +two/ b.txt search: a aaa"#, - path!("/root/one"), - path!("/root/two"), ), ); }); @@ -5934,13 +5927,11 @@ mod tests { cx, ), format!( - r#"{}/ + r#"one/ a.txt <==== selected -{}/ +two/ b.txt search: a aaa"#, - path!("/root/one"), - path!("/root/two"), ), ); }); @@ -5962,11 +5953,9 @@ mod tests { cx, ), format!( - r#"{}/ + r#"one/ a.txt -{}/ <==== selected"#, - path!("/root/one"), - path!("/root/two"), +two/ <==== selected"#, ), ); }); @@ -5987,13 +5976,11 @@ mod tests { cx, ), format!( - r#"{}/ + r#"one/ a.txt -{}/ <==== selected +two/ <==== selected b.txt search: a aaa"#, - path!("/root/one"), - path!("/root/two"), ) ); }); @@ -6455,7 +6442,7 @@ outline: struct OutlineEntryExcerpt cx, ), format!( - r#"{root}/ + r#"frontend-project/ public/lottie/ syntax-tree.json search: {{ "something": "static" }} <==== selected @@ -6494,7 +6481,7 @@ outline: struct OutlineEntryExcerpt cx, ), format!( - r#"{root}/ + r#"frontend-project/ public/lottie/ syntax-tree.json search: {{ "something": "static" }} @@ -6524,7 +6511,7 @@ outline: struct OutlineEntryExcerpt cx, ), format!( - r#"{root}/ + r#"frontend-project/ public/lottie/ syntax-tree.json search: {{ "something": "static" }} @@ -6558,7 +6545,7 @@ outline: struct OutlineEntryExcerpt cx, ), format!( - r#"{root}/ + r#"frontend-project/ public/lottie/ syntax-tree.json search: {{ "something": "static" }} @@ -6591,7 +6578,7 @@ outline: struct OutlineEntryExcerpt cx, ), format!( - r#"{root}/ + r#"frontend-project/ public/lottie/ syntax-tree.json search: {{ "something": "static" }} @@ -6649,6 +6636,7 @@ outline: struct OutlineEntryExcerpt selected_entry: Option<&PanelEntry>, cx: &mut App, ) -> String { + let project = project.read(cx); let mut display_string = String::new(); for entry in cached_entries { if !display_string.is_empty() { @@ -6663,44 +6651,39 @@ outline: struct OutlineEntryExcerpt panic!("Did not cover external files with tests") } FsEntry::Directory(directory) => { - match project - .read(cx) + let path = if let Some(worktree) = project .worktree_for_id(directory.worktree_id, cx) - .and_then(|worktree| { - if worktree.read(cx).root_entry() == Some(&directory.entry.entry) { - Some(worktree.read(cx).abs_path()) - } else { - None - } + .filter(|worktree| { + worktree.read(cx).root_entry() == Some(&directory.entry.entry) }) { - Some(root_path) => format!( - "{}/{}", - root_path.display(), - directory.entry.path.display(), - ), - None => format!( - "{}/", - directory - .entry - .path - .file_name() - .unwrap_or_default() - .to_string_lossy() - ), - } + worktree + .read(cx) + .root_name() + .join(&directory.entry.path) + .as_str() + .to_string() + } else { + directory + .entry + .path + .file_name() + .unwrap_or_default() + .to_string() + }; + format!("{path}/") } FsEntry::File(file) => file .entry .path .file_name() - .map(|name| name.to_string_lossy().to_string()) + .map(|name| name.to_string()) .unwrap_or_default(), }, PanelEntry::FoldedDirs(folded_dirs) => folded_dirs .entries .iter() .filter_map(|dir| dir.path.file_name()) - .map(|name| name.to_string_lossy().to_string() + "/") + .map(|name| name.to_string() + "/") .collect(), PanelEntry::Outline(outline_entry) => match outline_entry { OutlineEntry::Excerpt(_) => continue, diff --git a/crates/paths/src/paths.rs b/crates/paths/src/paths.rs index af1b97c1cb9367ab73562ae4ebd044a2b79d1604..b860cc1b02207d3d489624028f35d796bfc688fc 100644 --- a/crates/paths/src/paths.rs +++ b/crates/paths/src/paths.rs @@ -5,6 +5,7 @@ use std::path::{Path, PathBuf}; use std::sync::OnceLock; pub use util::paths::home_dir; +use util::rel_path::RelPath; /// A default editorconfig file name to use when resolving project settings. pub const EDITORCONFIG_NAME: &str = ".editorconfig"; @@ -29,13 +30,13 @@ static CURRENT_DATA_DIR: OnceLock = OnceLock::new(); static CONFIG_DIR: OnceLock = OnceLock::new(); /// Returns the relative path to the zed_server directory on the ssh host. -pub fn remote_server_dir_relative() -> &'static Path { - Path::new(".zed_server") +pub fn remote_server_dir_relative() -> &'static RelPath { + RelPath::new(".zed_server").unwrap() } /// Returns the relative path to the zed_wsl_server directory on the wsl host. -pub fn remote_wsl_server_dir_relative() -> &'static Path { - Path::new(".zed_wsl_server") +pub fn remote_wsl_server_dir_relative() -> &'static RelPath { + RelPath::new(".zed_wsl_server").unwrap() } /// Sets a custom directory for all user data, overriding the default data directory. @@ -398,28 +399,28 @@ pub fn remote_servers_dir() -> &'static PathBuf { } /// Returns the relative path to a `.zed` folder within a project. -pub fn local_settings_folder_relative_path() -> &'static Path { - Path::new(".zed") +pub fn local_settings_folder_name() -> &'static str { + ".zed" } /// Returns the relative path to a `.vscode` folder within a project. -pub fn local_vscode_folder_relative_path() -> &'static Path { - Path::new(".vscode") +pub fn local_vscode_folder_name() -> &'static str { + ".vscode" } /// Returns the relative path to a `settings.json` file within a project. -pub fn local_settings_file_relative_path() -> &'static Path { - Path::new(".zed/settings.json") +pub fn local_settings_file_relative_path() -> &'static RelPath { + RelPath::new(".zed/settings.json").unwrap() } /// Returns the relative path to a `tasks.json` file within a project. -pub fn local_tasks_file_relative_path() -> &'static Path { - Path::new(".zed/tasks.json") +pub fn local_tasks_file_relative_path() -> &'static RelPath { + RelPath::new(".zed/tasks.json").unwrap() } /// Returns the relative path to a `.vscode/tasks.json` file within a project. -pub fn local_vscode_tasks_file_relative_path() -> &'static Path { - Path::new(".vscode/tasks.json") +pub fn local_vscode_tasks_file_relative_path() -> &'static RelPath { + RelPath::new(".vscode/tasks.json").unwrap() } pub fn debug_task_file_name() -> &'static str { @@ -432,13 +433,13 @@ pub fn task_file_name() -> &'static str { /// Returns the relative path to a `debug.json` file within a project. /// .zed/debug.json -pub fn local_debug_file_relative_path() -> &'static Path { - Path::new(".zed/debug.json") +pub fn local_debug_file_relative_path() -> &'static RelPath { + RelPath::new(".zed/debug.json").unwrap() } /// Returns the relative path to a `.vscode/launch.json` file within a project. -pub fn local_vscode_launch_file_relative_path() -> &'static Path { - Path::new(".vscode/launch.json") +pub fn local_vscode_launch_file_relative_path() -> &'static RelPath { + RelPath::new(".vscode/launch.json").unwrap() } pub fn user_ssh_config_file() -> PathBuf { diff --git a/crates/prettier/src/prettier.rs b/crates/prettier/src/prettier.rs index 32e39d466f1a236da72b746fb4bf2a24b7300385..b9c40e814c4caf760123cf460e2eed7298f9e951 100644 --- a/crates/prettier/src/prettier.rs +++ b/crates/prettier/src/prettier.rs @@ -12,7 +12,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use util::paths::PathMatcher; +use util::paths::{PathMatcher, PathStyle}; #[derive(Debug, Clone)] pub enum Prettier { @@ -119,7 +119,7 @@ impl Prettier { None } }).any(|workspace_definition| { - workspace_definition == subproject_path.to_string_lossy() || PathMatcher::new(&[workspace_definition]).ok().is_some_and(|path_matcher| path_matcher.is_match(subproject_path)) + workspace_definition == subproject_path.to_string_lossy() || PathMatcher::new(&[workspace_definition], PathStyle::local()).ok().is_some_and(|path_matcher| path_matcher.is_match(subproject_path)) }) { anyhow::ensure!(has_prettier_in_node_modules(fs, &path_to_check).await?, "Path {path_to_check:?} is the workspace root for project in {closest_package_json_path:?}, but it has no prettier installed"); log::info!("Found prettier path {path_to_check:?} in the workspace root for project in {closest_package_json_path:?}"); @@ -215,11 +215,14 @@ impl Prettier { }) .any(|workspace_definition| { workspace_definition == subproject_path.to_string_lossy() - || PathMatcher::new(&[workspace_definition]) - .ok() - .is_some_and(|path_matcher| { - path_matcher.is_match(subproject_path) - }) + || PathMatcher::new( + &[workspace_definition], + PathStyle::local(), + ) + .ok() + .is_some_and( + |path_matcher| path_matcher.is_match(subproject_path), + ) }) { let workspace_ignore = path_to_check.join(".prettierignore"); diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index 1af622df351f05721c850debebfe5b3e84284dad..39dc0621732bfd42b3a24735ad803915fbf2885c 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -58,7 +58,6 @@ lsp.workspace = true markdown.workspace = true node_runtime.workspace = true parking_lot.workspace = true -pathdiff.workspace = true paths.workspace = true postage.workspace = true prettier.workspace = true diff --git a/crates/project/src/agent_server_store.rs b/crates/project/src/agent_server_store.rs index 3d270bcb0db13e9c616687ce5d40cbc48bd4cbb9..c11a142b57634e0bc6d4cca66c893cc19d599193 100644 --- a/crates/project/src/agent_server_store.rs +++ b/crates/project/src/agent_server_store.rs @@ -16,10 +16,7 @@ use gpui::{ }; use node_runtime::NodeRuntime; use remote::RemoteClient; -use rpc::{ - AnyProtoClient, TypedEnvelope, - proto::{self, ToProto}, -}; +use rpc::{AnyProtoClient, TypedEnvelope, proto}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{SettingsContent, SettingsStore}; @@ -845,7 +842,7 @@ impl ExternalAgentServer for LocalGemini { // Gemini CLI doesn't seem to have a dedicated invocation for logging in--we just run it normally without any arguments. let login = task::SpawnInTerminal { - command: Some(command.path.clone().to_proto()), + command: Some(command.path.to_string_lossy().to_string()), args: command.args.clone(), env: command.env.clone().unwrap_or_default(), label: "gemini /auth".into(), @@ -854,7 +851,7 @@ impl ExternalAgentServer for LocalGemini { command.env.get_or_insert_default().extend(extra_env); command.args.push("--experimental-acp".into()); - Ok((command, root_dir.to_proto(), Some(login))) + Ok((command, root_dir.to_string_lossy().to_string(), Some(login))) }) } @@ -922,7 +919,7 @@ impl ExternalAgentServer for LocalClaudeCode { path.strip_suffix("/@zed-industries/claude-code-acp/dist/index.js") }) .map(|path_prefix| task::SpawnInTerminal { - command: Some(command.path.clone().to_proto()), + command: Some(command.path.to_string_lossy().to_string()), args: vec![ Path::new(path_prefix) .join("@anthropic-ai/claude-code/cli.js") @@ -938,7 +935,7 @@ impl ExternalAgentServer for LocalClaudeCode { }; command.env.get_or_insert_default().extend(extra_env); - Ok((command, root_dir.to_proto(), login)) + Ok((command, root_dir.to_string_lossy().to_string(), login)) }) } @@ -977,7 +974,7 @@ impl ExternalAgentServer for LocalCustomAgent { env.extend(command.env.unwrap_or_default()); env.extend(extra_env); command.env = Some(env); - Ok((command, root_dir.to_proto(), None)) + Ok((command, root_dir.to_string_lossy().to_string(), None)) }) } diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index 07f8e0c95cf8551803d5f5828703dbec090fcedb..8a4d4f7918c12abd94cf7bf8fc97c939db7ce033 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -21,12 +21,12 @@ use language::{ }; use rpc::{ AnyProtoClient, ErrorCode, ErrorExt as _, TypedEnvelope, - proto::{self, ToProto}, + proto::{self}, }; use smol::channel::Receiver; -use std::{io, path::Path, pin::pin, sync::Arc, time::Instant}; +use std::{io, pin::pin, sync::Arc, time::Instant}; use text::BufferId; -use util::{ResultExt as _, TryFutureExt, debug_panic, maybe}; +use util::{ResultExt as _, TryFutureExt, debug_panic, maybe, rel_path::RelPath}; use worktree::{File, PathChange, ProjectEntryId, Worktree, WorktreeId}; /// A set of open buffers. @@ -292,7 +292,7 @@ impl RemoteBufferStore { fn open_buffer( &self, - path: Arc, + path: Arc, worktree: Entity, cx: &mut Context, ) -> Task>> { @@ -370,7 +370,7 @@ impl LocalBufferStore { &self, buffer_handle: Entity, worktree: Entity, - path: Arc, + path: Arc, mut has_changed_file: bool, cx: &mut Context, ) -> Task> { @@ -389,7 +389,7 @@ impl LocalBufferStore { } let save = worktree.update(cx, |worktree, cx| { - worktree.write_file(path.as_ref(), text, line_ending, cx) + worktree.write_file(path, text, line_ending, cx) }); cx.spawn(async move |this, cx| { @@ -443,7 +443,7 @@ impl LocalBufferStore { fn local_worktree_entries_changed( this: &mut BufferStore, worktree_handle: &Entity, - changes: &[(Arc, ProjectEntryId, PathChange)], + changes: &[(Arc, ProjectEntryId, PathChange)], cx: &mut Context, ) { let snapshot = worktree_handle.read(cx).snapshot(); @@ -462,7 +462,7 @@ impl LocalBufferStore { fn local_worktree_entry_changed( this: &mut BufferStore, entry_id: ProjectEntryId, - path: &Arc, + path: &Arc, worktree: &Entity, snapshot: &worktree::Snapshot, cx: &mut Context, @@ -615,7 +615,7 @@ impl LocalBufferStore { fn open_buffer( &self, - path: Arc, + path: Arc, worktree: Entity, cx: &mut Context, ) -> Task>> { @@ -1402,8 +1402,9 @@ impl BufferStore { .await?; let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?; - if let Some(new_path) = envelope.payload.new_path { - let new_path = ProjectPath::from_proto(new_path); + if let Some(new_path) = envelope.payload.new_path + && let Some(new_path) = ProjectPath::from_proto(new_path) + { this.update(&mut cx, |this, cx| { this.save_buffer_as(buffer.clone(), new_path, cx) })? diff --git a/crates/project/src/context_server_store.rs b/crates/project/src/context_server_store.rs index 364128ae4f8cf5703bf7987117b0109462fa4e3c..e4673744da6883a5aa298c2fca3a39f556cb1357 100644 --- a/crates/project/src/context_server_store.rs +++ b/crates/project/src/context_server_store.rs @@ -1,7 +1,7 @@ pub mod extension; pub mod registry; -use std::{path::Path, sync::Arc}; +use std::sync::Arc; use anyhow::{Context as _, Result}; use collections::{HashMap, HashSet}; @@ -10,7 +10,7 @@ use futures::{FutureExt as _, future::join_all}; use gpui::{App, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity, actions}; use registry::ContextServerDescriptorRegistry; use settings::{Settings as _, SettingsStore}; -use util::ResultExt as _; +use util::{ResultExt as _, rel_path::RelPath}; use crate::{ Project, @@ -510,7 +510,7 @@ impl ContextServerStore { .next() .map(|worktree| settings::SettingsLocation { worktree_id: worktree.read(cx).id(), - path: Path::new(""), + path: RelPath::empty(), }); &ProjectSettings::get(location, cx).context_servers } diff --git a/crates/project/src/debugger/breakpoint_store.rs b/crates/project/src/debugger/breakpoint_store.rs index c47e5d35d5948eb0c176bbc6d14281faa3f60451..42663ab9852a5dc2e9850d20dd20940c6723d03c 100644 --- a/crates/project/src/debugger/breakpoint_store.rs +++ b/crates/project/src/debugger/breakpoint_store.rs @@ -387,7 +387,7 @@ impl BreakpointStore { pub fn abs_path_from_buffer(buffer: &Entity, cx: &App) -> Option> { worktree::File::from_dyn(buffer.read(cx).file()) - .and_then(|file| file.worktree.read(cx).absolutize(&file.path).ok()) + .map(|file| file.worktree.read(cx).absolutize(&file.path)) .map(Arc::::from) } @@ -794,7 +794,7 @@ impl BreakpointStore { .update(cx, |this, cx| { let path = ProjectPath { worktree_id: worktree.read(cx).id(), - path: relative_path.into(), + path: relative_path, }; this.open_buffer(path, cx) })? diff --git a/crates/project/src/debugger/dap_store.rs b/crates/project/src/debugger/dap_store.rs index d96e5c220fcc98413a42dc46e0889004300d1e2f..923de3190cdf8d7f6bf4536a8ca8c67ebb924513 100644 --- a/crates/project/src/debugger/dap_store.rs +++ b/crates/project/src/debugger/dap_store.rs @@ -50,7 +50,7 @@ use std::{ sync::{Arc, Once}, }; use task::{DebugScenario, SpawnInTerminal, TaskContext, TaskTemplate}; -use util::ResultExt as _; +use util::{ResultExt as _, rel_path::RelPath}; use worktree::Worktree; #[derive(Debug)] @@ -206,7 +206,7 @@ impl DapStore { let settings_location = SettingsLocation { worktree_id: worktree.read(cx).id(), - path: Path::new(""), + path: RelPath::empty(), }; let dap_settings = ProjectSettings::get(Some(settings_location), cx) .dap @@ -943,15 +943,13 @@ impl dap::adapters::DapDelegate for DapAdapterDelegate { fn toolchain_store(&self) -> Arc { self.toolchain_store.clone() } - async fn read_text_file(&self, path: PathBuf) -> Result { + + async fn read_text_file(&self, path: &RelPath) -> Result { let entry = self .worktree - .entry_for_path(&path) + .entry_for_path(path) .with_context(|| format!("no worktree entry for path {path:?}"))?; - let abs_path = self - .worktree - .absolutize(&entry.path) - .with_context(|| format!("cannot absolutize path {path:?}"))?; + let abs_path = self.worktree.absolutize(&entry.path); self.fs.load(&abs_path).await } diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index aefdd8acb18841eecc9ec7aa5613126b44d7a429..168549d4eb7ae11b28bcf944f9dfe2bb26946748 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -20,7 +20,7 @@ use futures::{ stream::FuturesOrdered, }; use git::{ - BuildPermalinkParams, GitHostingProviderRegistry, Oid, WORK_DIRECTORY_REPO_PATH, + BuildPermalinkParams, GitHostingProviderRegistry, Oid, blame::Blame, parse_git_remote_url, repository::{ @@ -45,7 +45,7 @@ use parking_lot::Mutex; use postage::stream::Stream as _; use rpc::{ AnyProtoClient, TypedEnvelope, - proto::{self, FromProto, ToProto, git_reset, split_repository_update}, + proto::{self, git_reset, split_repository_update}, }; use serde::Deserialize; use std::{ @@ -63,7 +63,12 @@ use std::{ }; use sum_tree::{Edit, SumTree, TreeSet}; use text::{Bias, BufferId}; -use util::{ResultExt, debug_panic, paths::SanitizedPath, post_inc}; +use util::{ + ResultExt, debug_panic, + paths::{PathStyle, SanitizedPath}, + post_inc, + rel_path::RelPath, +}; use worktree::{ File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId, UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree, @@ -189,7 +194,7 @@ impl StatusEntry { }; proto::StatusEntry { - repo_path: self.repo_path.as_ref().to_proto(), + repo_path: self.repo_path.to_proto(), simple_status, status: Some(status_to_proto(self.status)), } @@ -200,7 +205,7 @@ impl TryFrom for StatusEntry { type Error = anyhow::Error; fn try_from(value: proto::StatusEntry) -> Result { - let repo_path = RepoPath(Arc::::from_proto(value.repo_path)); + let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?; let status = status_from_proto(value.simple_status, value.status)?; Ok(Self { repo_path, status }) } @@ -240,6 +245,7 @@ pub struct RepositorySnapshot { pub id: RepositoryId, pub statuses_by_path: SumTree, pub work_directory_abs_path: Arc, + pub path_style: PathStyle, pub branch: Option, pub head_commit: Option, pub scan_id: u64, @@ -947,9 +953,7 @@ impl GitStore { { return Task::ready(Err(anyhow!("no permalink available"))); } - let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else { - return Task::ready(Err(anyhow!("no permalink available"))); - }; + let file_path = file.worktree.read(cx).absolutize(&file.path); return cx.spawn(async move |cx| { let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?; get_permalink_in_rust_registry_src(provider_registry, file_path, selection) @@ -985,9 +989,7 @@ impl GitStore { parse_git_remote_url(provider_registry, &origin_url) .context("parsing Git remote URL")?; - let path = repo_path.to_str().with_context(|| { - format!("converting repo path {repo_path:?} to string") - })?; + let path = repo_path.as_str(); Ok(provider.build_permalink( remote, @@ -1313,7 +1315,7 @@ impl GitStore { }); if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) { let recv = repo.update(cx, |repo, cx| { - log::debug!("hunks changed for {}", path.display()); + log::debug!("hunks changed for {}", path.as_str()); repo.spawn_set_index_text_job( path, new_index_text.as_ref().map(|rope| rope.to_string()), @@ -1475,6 +1477,7 @@ impl GitStore { mut cx: AsyncApp, ) -> Result<()> { this.update(&mut cx, |this, cx| { + let path_style = this.worktree_store.read(cx).path_style(); let mut update = envelope.payload; let id = RepositoryId::from_proto(update.id); @@ -1488,6 +1491,7 @@ impl GitStore { Repository::remote( id, Path::new(&update.abs_path).into(), + path_style, ProjectId(update.project_id), client, git_store, @@ -1681,9 +1685,8 @@ impl GitStore { .payload .paths .into_iter() - .map(PathBuf::from) - .map(RepoPath::new) - .collect(); + .map(|path| RepoPath::new(&path)) + .collect::>>()?; repository_handle .update(&mut cx, |repository_handle, cx| { @@ -1705,9 +1708,8 @@ impl GitStore { .payload .paths .into_iter() - .map(PathBuf::from) - .map(RepoPath::new) - .collect(); + .map(|path| RepoPath::new(&path)) + .collect::>>()?; repository_handle .update(&mut cx, |repository_handle, cx| { @@ -1730,9 +1732,8 @@ impl GitStore { .payload .paths .into_iter() - .map(PathBuf::from) - .map(RepoPath::new) - .collect(); + .map(|path| RepoPath::new(&path)) + .collect::>>()?; repository_handle .update(&mut cx, |repository_handle, cx| { @@ -1804,7 +1805,7 @@ impl GitStore { ) -> Result { let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; - let repo_path = RepoPath::from_str(&envelope.payload.path); + let repo_path = RepoPath::from_proto(&envelope.payload.path)?; repository_handle .update(&mut cx, |repository_handle, cx| { @@ -2005,7 +2006,7 @@ impl GitStore { .files .into_iter() .map(|file| proto::CommitFile { - path: file.path.to_string(), + path: file.path.to_proto(), old_text: file.old_text, new_text: file.new_text, }) @@ -2045,8 +2046,8 @@ impl GitStore { .payload .paths .iter() - .map(|s| RepoPath::from_str(s)) - .collect(); + .map(|s| RepoPath::from_proto(s)) + .collect::>>()?; repository_handle .update(&mut cx, |repository_handle, cx| { @@ -2332,9 +2333,10 @@ impl GitStore { fn process_updated_entries( &self, worktree: &Entity, - updated_entries: &[(Arc, ProjectEntryId, PathChange)], + updated_entries: &[(Arc, ProjectEntryId, PathChange)], cx: &mut App, ) -> Task, Vec>> { + let path_style = worktree.read(cx).path_style(); let mut repo_paths = self .repositories .values() @@ -2349,7 +2351,7 @@ impl GitStore { let entries = entries .into_iter() - .filter_map(|path| worktree.absolutize(&path).ok()) + .map(|path| worktree.absolutize(&path)) .collect::>(); let executor = cx.background_executor().clone(); @@ -2369,8 +2371,9 @@ impl GitStore { let mut paths = Vec::new(); // All paths prefixed by a given repo will constitute a continuous range. while let Some(path) = entries.get(ix) - && let Some(repo_path) = - RepositorySnapshot::abs_path_to_repo_path_inner(&repo_path, path) + && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner( + &repo_path, path, path_style, + ) { paths.push((repo_path, ix)); ix += 1; @@ -2764,7 +2767,7 @@ impl RepositoryId { } impl RepositorySnapshot { - fn empty(id: RepositoryId, work_directory_abs_path: Arc) -> Self { + fn empty(id: RepositoryId, work_directory_abs_path: Arc, path_style: PathStyle) -> Self { Self { id, statuses_by_path: Default::default(), @@ -2776,6 +2779,7 @@ impl RepositorySnapshot { remote_origin_url: None, remote_upstream_url: None, stash_entries: Default::default(), + path_style, } } @@ -2798,7 +2802,7 @@ impl RepositorySnapshot { merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()), project_id, id: self.id.to_proto(), - abs_path: self.work_directory_abs_path.to_proto(), + abs_path: self.work_directory_abs_path.to_string_lossy().to_string(), entry_ids: vec![self.id.to_proto()], scan_id: self.scan_id, is_last_update: true, @@ -2836,13 +2840,13 @@ impl RepositorySnapshot { current_new_entry = new_statuses.next(); } Ordering::Greater => { - removed_statuses.push(old_entry.repo_path.as_ref().to_proto()); + removed_statuses.push(old_entry.repo_path.to_proto()); current_old_entry = old_statuses.next(); } } } (None, Some(old_entry)) => { - removed_statuses.push(old_entry.repo_path.as_ref().to_proto()); + removed_statuses.push(old_entry.repo_path.to_proto()); current_old_entry = old_statuses.next(); } (Some(new_entry), None) => { @@ -2862,12 +2866,12 @@ impl RepositorySnapshot { .merge .conflicted_paths .iter() - .map(|path| path.as_ref().to_proto()) + .map(|path| path.to_proto()) .collect(), merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()), project_id, id: self.id.to_proto(), - abs_path: self.work_directory_abs_path.to_proto(), + abs_path: self.work_directory_abs_path.to_string_lossy().to_string(), entry_ids: vec![], scan_id: self.scan_id, is_last_update: true, @@ -2895,18 +2899,19 @@ impl RepositorySnapshot { } pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option { - Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path) + Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style) } #[inline] fn abs_path_to_repo_path_inner( work_directory_abs_path: &Path, abs_path: &Path, + path_style: PathStyle, ) -> Option { abs_path .strip_prefix(&work_directory_abs_path) - .map(RepoPath::from) .ok() + .and_then(|path| RepoPath::from_std_path(path, path_style).ok()) } pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool { @@ -3032,7 +3037,8 @@ impl Repository { git_store: WeakEntity, cx: &mut Context, ) -> Self { - let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone()); + let snapshot = + RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local()); Repository { this: cx.weak_entity(), git_store, @@ -3058,12 +3064,13 @@ impl Repository { fn remote( id: RepositoryId, work_directory_abs_path: Arc, + path_style: PathStyle, project_id: ProjectId, client: AnyProtoClient, git_store: WeakEntity, cx: &mut Context, ) -> Self { - let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path); + let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style); Self { this: cx.weak_entity(), snapshot, @@ -3107,12 +3114,11 @@ impl Repository { let buffer_store = git_store.buffer_store.read(cx); let buffer = buffer_store.get(*buffer_id)?; let file = File::from_dyn(buffer.read(cx).file())?; - let abs_path = - file.worktree.read(cx).absolutize(&file.path).ok()?; + let abs_path = file.worktree.read(cx).absolutize(&file.path); let repo_path = this.abs_path_to_repo_path(&abs_path)?; log::debug!( "start reload diff bases for repo path {}", - repo_path.0.display() + repo_path.as_str() ); diff_state.update(cx, |diff_state, _| { let has_unstaged_diff = diff_state @@ -3335,12 +3341,15 @@ impl Repository { pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option { let git_store = self.git_store.upgrade()?; let worktree_store = git_store.read(cx).worktree_store.read(cx); - let abs_path = self.snapshot.work_directory_abs_path.join(&path.0); + let abs_path = self + .snapshot + .work_directory_abs_path + .join(path.as_std_path()); let abs_path = SanitizedPath::new(&abs_path); let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?; Some(ProjectPath { worktree_id: worktree.read(cx).id(), - path: relative_path.into(), + path: relative_path, }) } @@ -3464,10 +3473,7 @@ impl Repository { project_id: project_id.0, repository_id: id.to_proto(), commit, - paths: paths - .into_iter() - .map(|p| p.to_string_lossy().to_string()) - .collect(), + paths: paths.into_iter().map(|p| p.to_proto()).collect(), }) .await?; @@ -3557,12 +3563,14 @@ impl Repository { files: response .files .into_iter() - .map(|file| CommitFile { - path: Path::new(&file.path).into(), - old_text: file.old_text, - new_text: file.new_text, + .map(|file| { + Ok(CommitFile { + path: RepoPath::from_proto(&file.path)?, + old_text: file.old_text, + new_text: file.new_text, + }) }) - .collect(), + .collect::>>()?, }) } } @@ -3622,7 +3630,7 @@ impl Repository { repository_id: id.to_proto(), paths: entries .into_iter() - .map(|repo_path| repo_path.as_ref().to_proto()) + .map(|repo_path| repo_path.to_proto()) .collect(), }) .await @@ -3688,7 +3696,7 @@ impl Repository { repository_id: id.to_proto(), paths: entries .into_iter() - .map(|repo_path| repo_path.as_ref().to_proto()) + .map(|repo_path| repo_path.to_proto()) .collect(), }) .await @@ -3752,7 +3760,7 @@ impl Repository { repository_id: id.to_proto(), paths: entries .into_iter() - .map(|repo_path| repo_path.as_ref().to_proto()) + .map(|repo_path| repo_path.to_proto()) .collect(), }) .await @@ -4154,7 +4162,7 @@ impl Repository { Some(GitJobKey::WriteIndex(path.clone())), None, move |git_repo, mut cx| async move { - log::debug!("start updating index text for buffer {}", path.display()); + log::debug!("start updating index text for buffer {}", path.as_str()); match git_repo { RepositoryState::Local { backend, @@ -4170,13 +4178,13 @@ impl Repository { .request(proto::SetIndexText { project_id: project_id.0, repository_id: id.to_proto(), - path: path.as_ref().to_proto(), + path: path.to_proto(), text: content, }) .await?; } } - log::debug!("finish updating index text for buffer {}", path.display()); + log::debug!("finish updating index text for buffer {}", path.as_str()); if let Some(hunk_staging_operation_count) = hunk_staging_operation_count { let project_path = this @@ -4439,7 +4447,7 @@ impl Repository { update .current_merge_conflicts .into_iter() - .map(|path| RepoPath(Path::new(&path).into())), + .filter_map(|path| RepoPath::from_proto(&path).log_err()), ); self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch); self.snapshot.head_commit = update @@ -4460,7 +4468,11 @@ impl Repository { let edits = update .removed_statuses .into_iter() - .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path)))) + .filter_map(|path| { + Some(sum_tree::Edit::Remove(PathKey( + RelPath::from_proto(&path).log_err()?, + ))) + }) .chain( update .updated_statuses @@ -5060,9 +5072,7 @@ async fn compute_snapshot( let mut events = Vec::new(); let branches = backend.branches().await?; let branch = branches.into_iter().find(|branch| branch.is_head); - let statuses = backend - .status(std::slice::from_ref(&WORK_DIRECTORY_REPO_PATH)) - .await?; + let statuses = backend.status(&[RelPath::empty().into()]).await?; let stash_entries = backend.stash_entries().await?; let statuses_by_path = SumTree::from_iter( statuses @@ -5108,6 +5118,7 @@ async fn compute_snapshot( id, statuses_by_path, work_directory_abs_path, + path_style: prev_snapshot.path_style, scan_id: prev_snapshot.scan_id + 1, branch, head_commit, diff --git a/crates/project/src/git_store/conflict_set.rs b/crates/project/src/git_store/conflict_set.rs index 067af17820e58264006d0227cfb0f3c13069fcf9..13a082b35024b11870fb14fb3419c76841566193 100644 --- a/crates/project/src/git_store/conflict_set.rs +++ b/crates/project/src/git_store/conflict_set.rs @@ -255,20 +255,23 @@ impl EventEmitter for ConflictSet {} #[cfg(test)] mod tests { - use std::{path::Path, sync::mpsc}; + use std::sync::mpsc; use crate::Project; use super::*; use fs::FakeFs; - use git::status::{UnmergedStatus, UnmergedStatusCode}; + use git::{ + repository::repo_path, + status::{UnmergedStatus, UnmergedStatusCode}, + }; use gpui::{BackgroundExecutor, TestAppContext}; use language::language_settings::AllLanguageSettings; use serde_json::json; use settings::Settings as _; use text::{Buffer, BufferId, Point, ToOffset as _}; use unindent::Unindent as _; - use util::path; + use util::{path, rel_path::rel_path}; use worktree::WorktreeSettings; #[test] @@ -543,7 +546,7 @@ mod tests { fs.with_git_state(path!("/project/.git").as_ref(), true, |state| { state.unmerged_paths.insert( - "a.txt".into(), + repo_path("a.txt"), UnmergedStatus { first_head: UnmergedStatusCode::Updated, second_head: UnmergedStatusCode::Updated, @@ -621,7 +624,7 @@ mod tests { cx.run_until_parked(); fs.with_git_state(path!("/project/.git").as_ref(), true, |state| { state.unmerged_paths.insert( - "a.txt".into(), + rel_path("a.txt").into(), UnmergedStatus { first_head: UnmergedStatusCode::Updated, second_head: UnmergedStatusCode::Updated, @@ -647,7 +650,7 @@ mod tests { // Simulate the conflict being removed by e.g. staging the file. fs.with_git_state(path!("/project/.git").as_ref(), true, |state| { - state.unmerged_paths.remove(Path::new("a.txt")) + state.unmerged_paths.remove(&repo_path("a.txt")) }) .unwrap(); @@ -660,7 +663,7 @@ mod tests { // Simulate the conflict being re-added. fs.with_git_state(path!("/project/.git").as_ref(), true, |state| { state.unmerged_paths.insert( - "a.txt".into(), + repo_path("a.txt"), UnmergedStatus { first_head: UnmergedStatusCode::Updated, second_head: UnmergedStatusCode::Updated, diff --git a/crates/project/src/git_store/git_traversal.rs b/crates/project/src/git_store/git_traversal.rs index 93f5d3e24c883073eca657c1b724da64648f88ae..3da56b0246a84927202179a8b440c5b2e8116d29 100644 --- a/crates/project/src/git_store/git_traversal.rs +++ b/crates/project/src/git_store/git_traversal.rs @@ -3,6 +3,7 @@ use git::{repository::RepoPath, status::GitSummary}; use std::{collections::BTreeMap, ops::Deref, path::Path}; use sum_tree::Cursor; use text::Bias; +use util::rel_path::RelPath; use worktree::{Entry, PathProgress, PathTarget, Traversal}; use super::{RepositoryId, RepositorySnapshot, StatusEntry}; @@ -70,10 +71,7 @@ impl<'a> GitTraversal<'a> { return; }; - let Ok(abs_path) = self.traversal.snapshot().absolutize(&entry.path) else { - self.repo_location = None; - return; - }; + let abs_path = self.traversal.snapshot().absolutize(&entry.path); let Some((repo, repo_path)) = self.repo_root_for_path(&abs_path) else { self.repo_location = None; @@ -97,13 +95,13 @@ impl<'a> GitTraversal<'a> { if entry.is_dir() { let mut statuses = statuses.clone(); - statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left); - let summary = statuses.summary(&PathTarget::Successor(repo_path.as_ref()), Bias::Left); + statuses.seek_forward(&PathTarget::Path(&repo_path), Bias::Left); + let summary = statuses.summary(&PathTarget::Successor(&repo_path), Bias::Left); self.current_entry_summary = Some(summary); } else if entry.is_file() { // For a file entry, park the cursor on the corresponding status - if statuses.seek_forward(&PathTarget::Path(repo_path.as_ref()), Bias::Left) { + if statuses.seek_forward(&PathTarget::Path(&repo_path), Bias::Left) { // TODO: Investigate statuses.item() being None here. self.current_entry_summary = statuses.item().map(|item| item.status.into()); } else { @@ -159,7 +157,7 @@ impl<'a> Iterator for GitTraversal<'a> { } pub struct ChildEntriesGitIter<'a> { - parent_path: &'a Path, + parent_path: &'a RelPath, traversal: GitTraversal<'a>, } @@ -167,7 +165,7 @@ impl<'a> ChildEntriesGitIter<'a> { pub fn new( repo_snapshots: &'a HashMap, worktree_snapshot: &'a worktree::Snapshot, - parent_path: &'a Path, + parent_path: &'a RelPath, ) -> Self { let mut traversal = GitTraversal::new( repo_snapshots, @@ -265,7 +263,7 @@ mod tests { use gpui::TestAppContext; use serde_json::json; use settings::SettingsStore; - use util::path; + use util::{path, rel_path::rel_path}; const CONFLICT: FileStatus = FileStatus::Unmerged(UnmergedStatus { first_head: UnmergedStatusCode::Updated, @@ -312,17 +310,14 @@ mod tests { fs.set_status_for_repo( Path::new(path!("/root/x/.git")), &[ - (Path::new("x2.txt"), StatusCode::Modified.index()), - (Path::new("z.txt"), StatusCode::Added.index()), + ("x2.txt", StatusCode::Modified.index()), + ("z.txt", StatusCode::Added.index()), ], ); - fs.set_status_for_repo( - Path::new(path!("/root/x/y/.git")), - &[(Path::new("y1.txt"), CONFLICT)], - ); + fs.set_status_for_repo(Path::new(path!("/root/x/y/.git")), &[("y1.txt", CONFLICT)]); fs.set_status_for_repo( Path::new(path!("/root/z/.git")), - &[(Path::new("z2.txt"), StatusCode::Added.index())], + &[("z2.txt", StatusCode::Added.index())], ); let project = Project::test(fs, [path!("/root").as_ref()], cx).await; @@ -337,7 +332,7 @@ mod tests { let traversal = GitTraversal::new( &repo_snapshots, - worktree_snapshot.traverse_from_path(true, false, true, Path::new("x")), + worktree_snapshot.traverse_from_path(true, false, true, RelPath::new("x").unwrap()), ); let entries = traversal .map(|entry| (entry.path.clone(), entry.git_summary)) @@ -345,13 +340,13 @@ mod tests { pretty_assertions::assert_eq!( entries, [ - (Path::new("x/x1.txt").into(), GitSummary::UNCHANGED), - (Path::new("x/x2.txt").into(), MODIFIED), - (Path::new("x/y/y1.txt").into(), GitSummary::CONFLICT), - (Path::new("x/y/y2.txt").into(), GitSummary::UNCHANGED), - (Path::new("x/z.txt").into(), ADDED), - (Path::new("z/z1.txt").into(), GitSummary::UNCHANGED), - (Path::new("z/z2.txt").into(), ADDED), + (rel_path("x/x1.txt").into(), GitSummary::UNCHANGED), + (rel_path("x/x2.txt").into(), MODIFIED), + (rel_path("x/y/y1.txt").into(), GitSummary::CONFLICT), + (rel_path("x/y/y2.txt").into(), GitSummary::UNCHANGED), + (rel_path("x/z.txt").into(), ADDED), + (rel_path("z/z1.txt").into(), GitSummary::UNCHANGED), + (rel_path("z/z2.txt").into(), ADDED), ] ) } @@ -386,18 +381,15 @@ mod tests { fs.set_status_for_repo( Path::new(path!("/root/x/.git")), &[ - (Path::new("x2.txt"), StatusCode::Modified.index()), - (Path::new("z.txt"), StatusCode::Added.index()), + ("x2.txt", StatusCode::Modified.index()), + ("z.txt", StatusCode::Added.index()), ], ); - fs.set_status_for_repo( - Path::new(path!("/root/x/y/.git")), - &[(Path::new("y1.txt"), CONFLICT)], - ); + fs.set_status_for_repo(Path::new(path!("/root/x/y/.git")), &[("y1.txt", CONFLICT)]); fs.set_status_for_repo( Path::new(path!("/root/z/.git")), - &[(Path::new("z2.txt"), StatusCode::Added.index())], + &[("z2.txt", StatusCode::Added.index())], ); let project = Project::test(fs, [path!("/root").as_ref()], cx).await; @@ -415,18 +407,18 @@ mod tests { &repo_snapshots, &worktree_snapshot, &[ - (Path::new("x/y"), GitSummary::CONFLICT), - (Path::new("x/y/y1.txt"), GitSummary::CONFLICT), - (Path::new("x/y/y2.txt"), GitSummary::UNCHANGED), + ("x/y", GitSummary::CONFLICT), + ("x/y/y1.txt", GitSummary::CONFLICT), + ("x/y/y2.txt", GitSummary::UNCHANGED), ], ); check_git_statuses( &repo_snapshots, &worktree_snapshot, &[ - (Path::new("z"), ADDED), - (Path::new("z/z1.txt"), GitSummary::UNCHANGED), - (Path::new("z/z2.txt"), ADDED), + ("z", ADDED), + ("z/z1.txt", GitSummary::UNCHANGED), + ("z/z2.txt", ADDED), ], ); @@ -435,9 +427,9 @@ mod tests { &repo_snapshots, &worktree_snapshot, &[ - (Path::new("x"), MODIFIED + ADDED), - (Path::new("x/y"), GitSummary::CONFLICT), - (Path::new("x/y/y1.txt"), GitSummary::CONFLICT), + ("x", MODIFIED + ADDED), + ("x/y", GitSummary::CONFLICT), + ("x/y/y1.txt", GitSummary::CONFLICT), ], ); @@ -446,13 +438,13 @@ mod tests { &repo_snapshots, &worktree_snapshot, &[ - (Path::new("x"), MODIFIED + ADDED), - (Path::new("x/x1.txt"), GitSummary::UNCHANGED), - (Path::new("x/x2.txt"), MODIFIED), - (Path::new("x/y"), GitSummary::CONFLICT), - (Path::new("x/y/y1.txt"), GitSummary::CONFLICT), - (Path::new("x/y/y2.txt"), GitSummary::UNCHANGED), - (Path::new("x/z.txt"), ADDED), + ("x", MODIFIED + ADDED), + ("x/x1.txt", GitSummary::UNCHANGED), + ("x/x2.txt", MODIFIED), + ("x/y", GitSummary::CONFLICT), + ("x/y/y1.txt", GitSummary::CONFLICT), + ("x/y/y2.txt", GitSummary::UNCHANGED), + ("x/z.txt", ADDED), ], ); @@ -461,9 +453,9 @@ mod tests { &repo_snapshots, &worktree_snapshot, &[ - (Path::new(""), GitSummary::UNCHANGED), - (Path::new("x"), MODIFIED + ADDED), - (Path::new("x/x1.txt"), GitSummary::UNCHANGED), + ("", GitSummary::UNCHANGED), + ("x", MODIFIED + ADDED), + ("x/x1.txt", GitSummary::UNCHANGED), ], ); @@ -472,17 +464,17 @@ mod tests { &repo_snapshots, &worktree_snapshot, &[ - (Path::new(""), GitSummary::UNCHANGED), - (Path::new("x"), MODIFIED + ADDED), - (Path::new("x/x1.txt"), GitSummary::UNCHANGED), - (Path::new("x/x2.txt"), MODIFIED), - (Path::new("x/y"), GitSummary::CONFLICT), - (Path::new("x/y/y1.txt"), GitSummary::CONFLICT), - (Path::new("x/y/y2.txt"), GitSummary::UNCHANGED), - (Path::new("x/z.txt"), ADDED), - (Path::new("z"), ADDED), - (Path::new("z/z1.txt"), GitSummary::UNCHANGED), - (Path::new("z/z2.txt"), ADDED), + ("", GitSummary::UNCHANGED), + ("x", MODIFIED + ADDED), + ("x/x1.txt", GitSummary::UNCHANGED), + ("x/x2.txt", MODIFIED), + ("x/y", GitSummary::CONFLICT), + ("x/y/y1.txt", GitSummary::CONFLICT), + ("x/y/y2.txt", GitSummary::UNCHANGED), + ("x/z.txt", ADDED), + ("z", ADDED), + ("z/z1.txt", GitSummary::UNCHANGED), + ("z/z2.txt", ADDED), ], ); } @@ -520,9 +512,9 @@ mod tests { fs.set_status_for_repo( Path::new(path!("/root/.git")), &[ - (Path::new("a/b/c1.txt"), StatusCode::Added.index()), - (Path::new("a/d/e2.txt"), StatusCode::Modified.index()), - (Path::new("g/h2.txt"), CONFLICT), + ("a/b/c1.txt", StatusCode::Added.index()), + ("a/d/e2.txt", StatusCode::Modified.index()), + ("g/h2.txt", CONFLICT), ], ); @@ -540,9 +532,9 @@ mod tests { &repo_snapshots, &worktree_snapshot, &[ - (Path::new(""), GitSummary::CONFLICT + MODIFIED + ADDED), - (Path::new("g"), GitSummary::CONFLICT), - (Path::new("g/h2.txt"), GitSummary::CONFLICT), + ("", GitSummary::CONFLICT + MODIFIED + ADDED), + ("g", GitSummary::CONFLICT), + ("g/h2.txt", GitSummary::CONFLICT), ], ); @@ -550,17 +542,17 @@ mod tests { &repo_snapshots, &worktree_snapshot, &[ - (Path::new(""), GitSummary::CONFLICT + ADDED + MODIFIED), - (Path::new("a"), ADDED + MODIFIED), - (Path::new("a/b"), ADDED), - (Path::new("a/b/c1.txt"), ADDED), - (Path::new("a/b/c2.txt"), GitSummary::UNCHANGED), - (Path::new("a/d"), MODIFIED), - (Path::new("a/d/e2.txt"), MODIFIED), - (Path::new("f"), GitSummary::UNCHANGED), - (Path::new("f/no-status.txt"), GitSummary::UNCHANGED), - (Path::new("g"), GitSummary::CONFLICT), - (Path::new("g/h2.txt"), GitSummary::CONFLICT), + ("", GitSummary::CONFLICT + ADDED + MODIFIED), + ("a", ADDED + MODIFIED), + ("a/b", ADDED), + ("a/b/c1.txt", ADDED), + ("a/b/c2.txt", GitSummary::UNCHANGED), + ("a/d", MODIFIED), + ("a/d/e2.txt", MODIFIED), + ("f", GitSummary::UNCHANGED), + ("f/no-status.txt", GitSummary::UNCHANGED), + ("g", GitSummary::CONFLICT), + ("g/h2.txt", GitSummary::CONFLICT), ], ); @@ -568,15 +560,15 @@ mod tests { &repo_snapshots, &worktree_snapshot, &[ - (Path::new("a/b"), ADDED), - (Path::new("a/b/c1.txt"), ADDED), - (Path::new("a/b/c2.txt"), GitSummary::UNCHANGED), - (Path::new("a/d"), MODIFIED), - (Path::new("a/d/e1.txt"), GitSummary::UNCHANGED), - (Path::new("a/d/e2.txt"), MODIFIED), - (Path::new("f"), GitSummary::UNCHANGED), - (Path::new("f/no-status.txt"), GitSummary::UNCHANGED), - (Path::new("g"), GitSummary::CONFLICT), + ("a/b", ADDED), + ("a/b/c1.txt", ADDED), + ("a/b/c2.txt", GitSummary::UNCHANGED), + ("a/d", MODIFIED), + ("a/d/e1.txt", GitSummary::UNCHANGED), + ("a/d/e2.txt", MODIFIED), + ("f", GitSummary::UNCHANGED), + ("f/no-status.txt", GitSummary::UNCHANGED), + ("g", GitSummary::CONFLICT), ], ); @@ -584,11 +576,11 @@ mod tests { &repo_snapshots, &worktree_snapshot, &[ - (Path::new("a/b/c1.txt"), ADDED), - (Path::new("a/b/c2.txt"), GitSummary::UNCHANGED), - (Path::new("a/d/e1.txt"), GitSummary::UNCHANGED), - (Path::new("a/d/e2.txt"), MODIFIED), - (Path::new("f/no-status.txt"), GitSummary::UNCHANGED), + ("a/b/c1.txt", ADDED), + ("a/b/c2.txt", GitSummary::UNCHANGED), + ("a/d/e1.txt", GitSummary::UNCHANGED), + ("a/d/e2.txt", MODIFIED), + ("f/no-status.txt", GitSummary::UNCHANGED), ], ); } @@ -621,18 +613,18 @@ mod tests { fs.set_status_for_repo( Path::new(path!("/root/x/.git")), - &[(Path::new("x1.txt"), StatusCode::Added.index())], + &[("x1.txt", StatusCode::Added.index())], ); fs.set_status_for_repo( Path::new(path!("/root/y/.git")), &[ - (Path::new("y1.txt"), CONFLICT), - (Path::new("y2.txt"), StatusCode::Modified.index()), + ("y1.txt", CONFLICT), + ("y2.txt", StatusCode::Modified.index()), ], ); fs.set_status_for_repo( Path::new(path!("/root/z/.git")), - &[(Path::new("z2.txt"), StatusCode::Modified.index())], + &[("z2.txt", StatusCode::Modified.index())], ); let project = Project::test(fs, [path!("/root").as_ref()], cx).await; @@ -648,47 +640,44 @@ mod tests { check_git_statuses( &repo_snapshots, &worktree_snapshot, - &[(Path::new("x"), ADDED), (Path::new("x/x1.txt"), ADDED)], + &[("x", ADDED), ("x/x1.txt", ADDED)], ); check_git_statuses( &repo_snapshots, &worktree_snapshot, &[ - (Path::new("y"), GitSummary::CONFLICT + MODIFIED), - (Path::new("y/y1.txt"), GitSummary::CONFLICT), - (Path::new("y/y2.txt"), MODIFIED), + ("y", GitSummary::CONFLICT + MODIFIED), + ("y/y1.txt", GitSummary::CONFLICT), + ("y/y2.txt", MODIFIED), ], ); check_git_statuses( &repo_snapshots, &worktree_snapshot, - &[ - (Path::new("z"), MODIFIED), - (Path::new("z/z2.txt"), MODIFIED), - ], + &[("z", MODIFIED), ("z/z2.txt", MODIFIED)], ); check_git_statuses( &repo_snapshots, &worktree_snapshot, - &[(Path::new("x"), ADDED), (Path::new("x/x1.txt"), ADDED)], + &[("x", ADDED), ("x/x1.txt", ADDED)], ); check_git_statuses( &repo_snapshots, &worktree_snapshot, &[ - (Path::new("x"), ADDED), - (Path::new("x/x1.txt"), ADDED), - (Path::new("x/x2.txt"), GitSummary::UNCHANGED), - (Path::new("y"), GitSummary::CONFLICT + MODIFIED), - (Path::new("y/y1.txt"), GitSummary::CONFLICT), - (Path::new("y/y2.txt"), MODIFIED), - (Path::new("z"), MODIFIED), - (Path::new("z/z1.txt"), GitSummary::UNCHANGED), - (Path::new("z/z2.txt"), MODIFIED), + ("x", ADDED), + ("x/x1.txt", ADDED), + ("x/x2.txt", GitSummary::UNCHANGED), + ("y", GitSummary::CONFLICT + MODIFIED), + ("y/y1.txt", GitSummary::CONFLICT), + ("y/y2.txt", MODIFIED), + ("z", MODIFIED), + ("z/z1.txt", GitSummary::UNCHANGED), + ("z/z2.txt", MODIFIED), ], ); } @@ -722,7 +711,7 @@ mod tests { .await; fs.set_head_and_index_for_repo( path!("/root/.git").as_ref(), - &[("a.txt".into(), "".into()), ("b/c.txt".into(), "".into())], + &[("a.txt", "".into()), ("b/c.txt", "".into())], ); cx.run_until_parked(); @@ -757,10 +746,7 @@ mod tests { // detected. fs.set_head_for_repo( path!("/root/.git").as_ref(), - &[ - ("a.txt".into(), "".into()), - ("b/c.txt".into(), "something-else".into()), - ], + &[("a.txt", "".into()), ("b/c.txt", "something-else".into())], "deadbeef", ); cx.executor().run_until_parked(); @@ -777,9 +763,9 @@ mod tests { &repo_snapshots, &worktree_snapshot, &[ - (Path::new(""), MODIFIED), - (Path::new("a.txt"), GitSummary::UNCHANGED), - (Path::new("b/c.txt"), MODIFIED), + ("", MODIFIED), + ("a.txt", GitSummary::UNCHANGED), + ("b/c.txt", MODIFIED), ], ); } @@ -788,17 +774,17 @@ mod tests { fn check_git_statuses( repo_snapshots: &HashMap, worktree_snapshot: &worktree::Snapshot, - expected_statuses: &[(&Path, GitSummary)], + expected_statuses: &[(&str, GitSummary)], ) { let mut traversal = GitTraversal::new( repo_snapshots, - worktree_snapshot.traverse_from_path(true, true, false, "".as_ref()), + worktree_snapshot.traverse_from_path(true, true, false, RelPath::empty()), ); let found_statuses = expected_statuses .iter() .map(|&(path, _)| { let git_entry = traversal - .find(|git_entry| &*git_entry.path == path) + .find(|git_entry| git_entry.path.as_ref() == rel_path(path)) .unwrap_or_else(|| panic!("Traversal has no entry for {path:?}")); (path, git_entry.git_summary) }) diff --git a/crates/project/src/image_store.rs b/crates/project/src/image_store.rs index e499d4e026f724f12e023738f12afb2735f9ce2d..71394ead2eb27067706023d4870c78c557c3747b 100644 --- a/crates/project/src/image_store.rs +++ b/crates/project/src/image_store.rs @@ -13,10 +13,9 @@ use image::{ExtendedColorType, GenericImageView, ImageReader}; use language::{DiskState, File}; use rpc::{AnyProtoClient, ErrorExt as _}; use std::num::NonZeroU64; -use std::path::Path; +use std::path::PathBuf; use std::sync::Arc; -use std::{ffi::OsStr, path::PathBuf}; -use util::ResultExt; +use util::{ResultExt, rel_path::RelPath}; use worktree::{LoadedBinaryFile, PathChange, Worktree}; #[derive(Clone, Copy, Debug, Hash, PartialEq, PartialOrd, Ord, Eq)] @@ -207,8 +206,7 @@ pub fn is_image_file(project: &Entity, path: &ProjectPath, cx: &App) -> .abs_path(); path.path .extension() - .or_else(|| worktree_abs_path.extension()) - .and_then(OsStr::to_str) + .or_else(|| worktree_abs_path.extension()?.to_str()) .map(str::to_lowercase) }); @@ -255,7 +253,7 @@ impl ProjectItem for ImageItem { trait ImageStoreImpl { fn open_image( &self, - path: Arc, + path: Arc, worktree: Entity, cx: &mut Context, ) -> Task>>; @@ -458,7 +456,7 @@ impl ImageStore { impl ImageStoreImpl for Entity { fn open_image( &self, - path: Arc, + path: Arc, worktree: Entity, cx: &mut Context, ) -> Task>> { @@ -539,7 +537,7 @@ impl LocalImageStore { fn local_worktree_entries_changed( &mut self, worktree_handle: &Entity, - changes: &[(Arc, ProjectEntryId, PathChange)], + changes: &[(Arc, ProjectEntryId, PathChange)], cx: &mut Context, ) { let snapshot = worktree_handle.read(cx).snapshot(); @@ -551,7 +549,7 @@ impl LocalImageStore { fn local_worktree_entry_changed( &mut self, entry_id: ProjectEntryId, - path: &Arc, + path: &Arc, worktree: &Entity, snapshot: &worktree::Snapshot, cx: &mut Context, @@ -698,7 +696,7 @@ fn create_gpui_image(content: Vec) -> anyhow::Result> { impl ImageStoreImpl for Entity { fn open_image( &self, - _path: Arc, + _path: Arc, _worktree: Entity, _cx: &mut Context, ) -> Task>> { @@ -729,7 +727,7 @@ mod tests { use gpui::TestAppContext; use serde_json::json; use settings::SettingsStore; - use std::path::PathBuf; + use util::rel_path::rel_path; pub fn init_test(cx: &mut TestAppContext) { zlog::init_test(); @@ -768,7 +766,7 @@ mod tests { let project_path = ProjectPath { worktree_id, - path: PathBuf::from("image_1.png").into(), + path: rel_path("image_1.png").into(), }; let (task1, task2) = project.update(cx, |project, cx| { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 14a3f1921c04a6572fb5f4e4535ba4895c556d94..77ec10d01ea5400194d5a9288f5d1b0a4e874648 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -33,7 +33,6 @@ use crate::{ }, prettier_store::{self, PrettierStore, PrettierStoreEvent}, project_settings::{LspSettings, ProjectSettings}, - relativize_path, resolve_path, toolchain_store::{LocalToolchainStore, ToolchainStoreEvent}, worktree_store::{WorktreeStore, WorktreeStoreEvent}, yarn::YarnPathStore, @@ -88,7 +87,7 @@ use postage::{mpsc, sink::Sink, stream::Stream, watch}; use rand::prelude::*; use rpc::{ AnyProtoClient, - proto::{FromProto, LspRequestId, LspRequestMessage as _, ToProto}, + proto::{LspRequestId, LspRequestMessage as _}, }; use serde::Serialize; use settings::{Settings, SettingsLocation, SettingsStore}; @@ -116,8 +115,9 @@ use text::{Anchor, BufferId, LineEnding, OffsetRangeExt, ToPoint as _}; use util::{ ConnectionResult, ResultExt as _, debug_panic, defer, maybe, merge_json_value_into, - paths::{PathExt, SanitizedPath}, + paths::{PathStyle, SanitizedPath}, post_inc, + rel_path::RelPath, }; pub use fs::*; @@ -158,7 +158,7 @@ impl FormatTrigger { #[derive(Clone)] struct UnifiedLanguageServer { id: LanguageServerId, - project_roots: HashSet>, + project_roots: HashSet>, } #[derive(Clone, Hash, PartialEq, Eq)] @@ -209,7 +209,7 @@ pub struct LocalLspStore { diagnostics: HashMap< WorktreeId, HashMap< - Arc, + Arc, Vec<( LanguageServerId, Vec>>, @@ -1086,7 +1086,7 @@ impl LocalLspStore { if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) { let worktree_id = file.worktree_id(cx); - let path: Arc = file + let path: Arc = file .path() .parent() .map(Arc::from) @@ -1842,17 +1842,19 @@ impl LocalLspStore { } if !project_transaction_command.0.is_empty() { - let extra_buffers = project_transaction_command - .0 - .keys() - .filter_map(|buffer_handle| { - buffer_handle - .read_with(cx, |b, cx| b.project_path(cx)) - .ok() - .flatten() - }) - .map(|p| p.path.to_sanitized_string()) - .join(", "); + let mut extra_buffers = String::new(); + for buffer in project_transaction_command.0.keys() { + buffer + .read_with(cx, |b, cx| { + if let Some(path) = b.project_path(cx) { + if !extra_buffers.is_empty() { + extra_buffers.push_str(", "); + } + extra_buffers.push_str(path.path.as_str()); + } + }) + .ok(); + } zlog::warn!( logger => "Unexpected edits to buffers other than the buffer actively being formatted due to command {}. Impacted buffers: [{}].", @@ -2347,7 +2349,7 @@ impl LocalLspStore { let Some(language) = buffer.language().cloned() else { return; }; - let path: Arc = file + let path: Arc = file .path() .parent() .map(Arc::from) @@ -2403,8 +2405,7 @@ impl LocalLspStore { let path = &disposition.path; { - let uri = - Uri::from_file_path(worktree.read(cx).abs_path().join(&path.path)); + let uri = Uri::from_file_path(worktree.read(cx).absolutize(&path.path)); let server_id = self.get_or_insert_language_server( &worktree, @@ -3172,7 +3173,7 @@ impl LocalLspStore { if let Some((tree, glob)) = worktree.as_local_mut().zip(Glob::new(&pattern).log_err()) { - tree.add_path_prefix_to_scan(literal_prefix.into()); + tree.add_path_prefix_to_scan(literal_prefix); worktree_globs .entry(tree.id()) .or_insert_with(GlobSetBuilder::new) @@ -3268,10 +3269,11 @@ impl LocalLspStore { worktrees: &[Entity], watcher: &FileSystemWatcher, cx: &App, - ) -> Option<(Entity, PathBuf, String)> { + ) -> Option<(Entity, Arc, String)> { worktrees.iter().find_map(|worktree| { let tree = worktree.read(cx); let worktree_root_path = tree.abs_path(); + let path_style = tree.path_style(); match &watcher.glob_pattern { lsp::GlobPattern::String(s) => { let watcher_path = SanitizedPath::new(s); @@ -3282,7 +3284,7 @@ impl LocalLspStore { let literal_prefix = glob_literal_prefix(relative); Some(( worktree.clone(), - literal_prefix, + RelPath::from_std_path(&literal_prefix, path_style).ok()?, relative.to_string_lossy().to_string(), )) } @@ -3296,7 +3298,11 @@ impl LocalLspStore { let relative = base_uri.strip_prefix(&worktree_root_path).ok()?; let mut literal_prefix = relative.to_owned(); literal_prefix.push(glob_literal_prefix(Path::new(&rp.pattern))); - Some((worktree.clone(), literal_prefix, rp.pattern.clone())) + Some(( + worktree.clone(), + RelPath::from_std_path(&literal_prefix, path_style).ok()?, + rp.pattern.clone(), + )) } } }) @@ -3483,7 +3489,7 @@ pub struct LspStore { _maintain_workspace_config: (Task>, watch::Sender<()>), _maintain_buffer_languages: Task<()>, diagnostic_summaries: - HashMap, HashMap>>, + HashMap, HashMap>>, pub lsp_server_capabilities: HashMap, lsp_document_colors: HashMap, lsp_code_lens: HashMap, @@ -3569,11 +3575,28 @@ struct CoreSymbol { pub language_server_name: LanguageServerName, pub source_worktree_id: WorktreeId, pub source_language_server_id: LanguageServerId, - pub path: ProjectPath, + pub path: SymbolLocation, pub name: String, pub kind: lsp::SymbolKind, pub range: Range>, - pub signature: [u8; 32], +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum SymbolLocation { + InProject(ProjectPath), + OutsideProject { + abs_path: Arc, + signature: [u8; 32], + }, +} + +impl SymbolLocation { + fn file_name(&self) -> Option<&str> { + match self { + Self::InProject(path) => path.path.file_name(), + Self::OutsideProject { abs_path, .. } => abs_path.file_name()?.to_str(), + } + } } impl LspStore { @@ -4353,7 +4376,7 @@ impl LspStore { let mut summaries = diangostic_summaries.iter().flat_map(|(path, summaries)| { summaries .iter() - .map(|(server_id, summary)| summary.to_proto(*server_id, path)) + .map(|(server_id, summary)| summary.to_proto(*server_id, path.as_ref())) }); if let Some(summary) = summaries.next() { client @@ -4655,7 +4678,6 @@ impl LspStore { .unwrap_or_else(|| file.path().clone()); let worktree_path = ProjectPath { worktree_id, path }; let abs_path = file.abs_path(cx); - let worktree_root = worktree.read(cx).abs_path(); let nodes = rebase .walk( worktree_path, @@ -4668,7 +4690,7 @@ impl LspStore { for node in nodes { let server_id = node.server_id_or_init(|disposition| { let path = &disposition.path; - let uri = Uri::from_file_path(worktree_root.join(&path.path)); + let uri = Uri::from_file_path(worktree.read(cx).absolutize(&path.path)); let key = LanguageServerSeed { worktree_id, name: disposition.server_name.clone(), @@ -6965,7 +6987,6 @@ impl LspStore { server_id: LanguageServerId, lsp_adapter: Arc, worktree: WeakEntity, - worktree_abs_path: Arc, lsp_symbols: Vec<(String, SymbolKind, lsp::Location)>, } @@ -7004,7 +7025,6 @@ impl LspStore { if !supports_workspace_symbol_request { continue; } - let worktree_abs_path = worktree.abs_path().clone(); let worktree_handle = worktree_handle.clone(); let server_id = server.server_id(); requests.push( @@ -7044,7 +7064,6 @@ impl LspStore { server_id, lsp_adapter, worktree: worktree_handle.downgrade(), - worktree_abs_path, lsp_symbols, } }), @@ -7069,33 +7088,29 @@ impl LspStore { let source_worktree = result.worktree.upgrade()?; let source_worktree_id = source_worktree.read(cx).id(); - let path; - let worktree; - if let Some((tree, rel_path)) = + let path = if let Some((tree, rel_path)) = this.worktree_store.read(cx).find_worktree(&abs_path, cx) { - worktree = tree; - path = rel_path; + let worktree_id = tree.read(cx).id(); + SymbolLocation::InProject(ProjectPath { + worktree_id, + path: rel_path, + }) } else { - worktree = source_worktree; - path = relativize_path(&result.worktree_abs_path, &abs_path); - } - - let worktree_id = worktree.read(cx).id(); - let project_path = ProjectPath { - worktree_id, - path: path.into(), + SymbolLocation::OutsideProject { + signature: this.symbol_signature(&abs_path), + abs_path: abs_path.into(), + } }; - let signature = this.symbol_signature(&project_path); + Some(CoreSymbol { source_language_server_id: result.server_id, language_server_name: result.lsp_adapter.name.clone(), source_worktree_id, - path: project_path, + path, kind: symbol_kind, name: symbol_name, range: range_from_lsp(symbol_location.range), - signature, }) }) .collect() @@ -7638,7 +7653,7 @@ impl LspStore { let worktree_id = worktree.read(cx).id(); let project_path = ProjectPath { worktree_id, - path: relative_path.into(), + path: relative_path, }; if let Some(buffer_handle) = self.buffer_store.read(cx).get_by_path(&project_path) { @@ -7735,7 +7750,7 @@ impl LspStore { &mut self, worktree_id: WorktreeId, server_id: LanguageServerId, - path_in_worktree: Arc, + path_in_worktree: Arc, diagnostics: Vec>>, _: &mut Context, ) -> Result>> { @@ -7827,18 +7842,21 @@ impl LspStore { ))); }; - let worktree_abs_path = if let Some(worktree_abs_path) = self - .worktree_store - .read(cx) - .worktree_for_id(symbol.path.worktree_id, cx) - .map(|worktree| worktree.read(cx).abs_path()) - { - worktree_abs_path - } else { - return Task::ready(Err(anyhow!("worktree not found for symbol"))); + let symbol_abs_path = match &symbol.path { + SymbolLocation::InProject(project_path) => self + .worktree_store + .read(cx) + .absolutize(&project_path, cx) + .context("no such worktree"), + SymbolLocation::OutsideProject { + abs_path, + signature: _, + } => Ok(abs_path.to_path_buf()), + }; + let symbol_abs_path = match symbol_abs_path { + Ok(abs_path) => abs_path, + Err(err) => return Task::ready(Err(err)), }; - - let symbol_abs_path = resolve_path(&worktree_abs_path, &symbol.path.path); let symbol_uri = if let Ok(uri) = lsp::Uri::from_file_path(symbol_abs_path) { uri } else { @@ -7891,8 +7909,7 @@ impl LspStore { worktree_store.find_worktree(&worktree_root_target, cx) }) })? { - let relative_path = - known_relative_path.unwrap_or_else(|| Arc::::from(result.1)); + let relative_path = known_relative_path.unwrap_or_else(|| result.1.clone()); (result.0, relative_path) } else { let worktree = lsp_store @@ -7919,7 +7936,11 @@ impl LspStore { let relative_path = if let Some(known_path) = known_relative_path { known_path } else { - abs_path.strip_prefix(worktree_root)?.into() + RelPath::from_std_path( + abs_path.strip_prefix(worktree_root)?, + PathStyle::local(), + ) + .context("failed to create relative path")? }; (worktree, relative_path) }; @@ -8326,39 +8347,56 @@ impl LspStore { mut cx: AsyncApp, ) -> Result { let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id); - let (worktree_id, worktree, old_path, is_dir) = this + let new_worktree_id = WorktreeId::from_proto(envelope.payload.new_worktree_id); + let new_path = + RelPath::from_proto(&envelope.payload.new_path).context("invalid relative path")?; + + let (worktree_store, old_worktree, new_worktree, old_entry) = this .update(&mut cx, |this, cx| { - this.worktree_store + let (worktree, entry) = this + .worktree_store .read(cx) - .worktree_and_entry_for_id(entry_id, cx) - .map(|(worktree, entry)| { - ( - worktree.read(cx).id(), - worktree, - entry.path.clone(), - entry.is_dir(), - ) - }) + .worktree_and_entry_for_id(entry_id, cx)?; + let new_worktree = this + .worktree_store + .read(cx) + .worktree_for_id(new_worktree_id, cx)?; + Some(( + this.worktree_store.clone(), + worktree, + new_worktree, + entry.clone(), + )) })? .context("worktree not found")?; - let (old_abs_path, new_abs_path) = { - let root_path = worktree.read_with(&cx, |this, _| this.abs_path())?; - let new_path = PathBuf::from_proto(envelope.payload.new_path.clone()); - (root_path.join(&old_path), root_path.join(&new_path)) - }; + let (old_abs_path, old_worktree_id) = old_worktree.read_with(&cx, |worktree, _| { + (worktree.absolutize(&old_entry.path), worktree.id()) + })?; + let new_abs_path = + new_worktree.read_with(&cx, |worktree, _| worktree.absolutize(&new_path))?; let _transaction = Self::will_rename_entry( this.downgrade(), - worktree_id, + old_worktree_id, &old_abs_path, &new_abs_path, - is_dir, + old_entry.is_dir(), + cx.clone(), + ) + .await; + let response = WorktreeStore::handle_rename_project_entry( + worktree_store, + envelope.payload, cx.clone(), ) .await; - let response = Worktree::handle_rename_entry(worktree, envelope.payload, cx.clone()).await; this.read_with(&cx, |this, _| { - this.did_rename_entry(worktree_id, &old_abs_path, &new_abs_path, is_dir); + this.did_rename_entry( + old_worktree_id, + &old_abs_path, + &new_abs_path, + old_entry.is_dir(), + ); }) .ok(); response @@ -8381,7 +8419,7 @@ impl LspStore { { let project_path = ProjectPath { worktree_id, - path: Arc::::from_proto(message_summary.path), + path: RelPath::from_proto(&message_summary.path).context("invalid path")?, }; let path = project_path.path.clone(); let server_id = LanguageServerId(message_summary.language_server_id as usize); @@ -9436,10 +9474,16 @@ impl LspStore { let peer_id = envelope.original_sender_id().unwrap_or_default(); let symbol = envelope.payload.symbol.context("invalid symbol")?; let symbol = Self::deserialize_symbol(symbol)?; - let symbol = this.read_with(&cx, |this, _| { - let signature = this.symbol_signature(&symbol.path); - anyhow::ensure!(signature == symbol.signature, "invalid symbol signature"); - Ok(symbol) + this.read_with(&cx, |this, _| { + if let SymbolLocation::OutsideProject { + abs_path, + signature, + } = &symbol.path + { + let new_signature = this.symbol_signature(&abs_path); + anyhow::ensure!(&new_signature == signature, "invalid symbol signature"); + } + Ok(()) })??; let buffer = this .update(&mut cx, |this, cx| { @@ -9452,7 +9496,6 @@ impl LspStore { name: symbol.name, kind: symbol.kind, range: symbol.range, - signature: symbol.signature, label: CodeLabel { text: Default::default(), runs: Default::default(), @@ -9484,10 +9527,9 @@ impl LspStore { })? } - fn symbol_signature(&self, project_path: &ProjectPath) -> [u8; 32] { + fn symbol_signature(&self, abs_path: &Path) -> [u8; 32] { let mut hasher = Sha256::new(); - hasher.update(project_path.worktree_id.to_proto().to_be_bytes()); - hasher.update(project_path.path.to_string_lossy().as_bytes()); + hasher.update(abs_path.to_string_lossy().as_bytes()); hasher.update(self.nonce.to_be_bytes()); hasher.finalize().as_slice().try_into().unwrap() } @@ -10233,7 +10275,7 @@ impl LspStore { let project_path = ProjectPath { worktree_id: worktree.read(cx).id(), - path: relative_path.into(), + path: relative_path, }; Some( @@ -10799,7 +10841,7 @@ impl LspStore { pub(super) fn update_local_worktree_language_servers( &mut self, worktree_handle: &Entity, - changes: &[(Arc, ProjectEntryId, PathChange)], + changes: &[(Arc, ProjectEntryId, PathChange)], cx: &mut Context, ) { if changes.is_empty() { @@ -10821,7 +10863,7 @@ impl LspStore { language_server_ids.sort(); language_server_ids.dedup(); - let abs_path = worktree_handle.read(cx).abs_path(); + // let abs_path = worktree_handle.read(cx).abs_path(); for server_id in &language_server_ids { if let Some(LanguageServerState::Running { server, .. }) = local.language_servers.get(server_id) @@ -10834,7 +10876,7 @@ impl LspStore { changes: changes .iter() .filter_map(|(path, _, change)| { - if !watched_paths.is_match(path) { + if !watched_paths.is_match(path.as_std_path()) { return None; } let typ = match change { @@ -10844,10 +10886,11 @@ impl LspStore { PathChange::Updated => lsp::FileChangeType::CHANGED, PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED, }; - Some(lsp::FileEvent { - uri: lsp::Uri::from_file_path(abs_path.join(path)).unwrap(), - typ, - }) + let uri = lsp::Uri::from_file_path( + worktree_handle.read(cx).absolutize(&path), + ) + .ok()?; + Some(lsp::FileEvent { uri, typ }) }) .collect(), }; @@ -10859,7 +10902,7 @@ impl LspStore { } } for (path, _, _) in changes { - if let Some(file_name) = path.file_name().and_then(|file_name| file_name.to_str()) + if let Some(file_name) = path.file_name() && local.watched_manifest_filenames.contains(file_name) { self.request_workspace_config_refresh(); @@ -10879,12 +10922,10 @@ impl LspStore { } fn serialize_symbol(symbol: &Symbol) -> proto::Symbol { - proto::Symbol { + let mut result = proto::Symbol { language_server_name: symbol.language_server_name.0.to_string(), source_worktree_id: symbol.source_worktree_id.to_proto(), language_server_id: symbol.source_language_server_id.to_proto(), - worktree_id: symbol.path.worktree_id.to_proto(), - path: symbol.path.path.as_ref().to_proto(), name: symbol.name.clone(), kind: unsafe { mem::transmute::(symbol.kind) }, start: Some(proto::PointUtf16 { @@ -10895,17 +10936,45 @@ impl LspStore { row: symbol.range.end.0.row, column: symbol.range.end.0.column, }), - signature: symbol.signature.to_vec(), + worktree_id: Default::default(), + path: Default::default(), + signature: Default::default(), + }; + match &symbol.path { + SymbolLocation::InProject(path) => { + result.worktree_id = path.worktree_id.to_proto(); + result.path = path.path.to_proto(); + } + SymbolLocation::OutsideProject { + abs_path, + signature, + } => { + result.path = abs_path.to_string_lossy().to_string(); + result.signature = signature.to_vec(); + } } + result } fn deserialize_symbol(serialized_symbol: proto::Symbol) -> Result { let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id); let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id); let kind = unsafe { mem::transmute::(serialized_symbol.kind) }; - let path = ProjectPath { - worktree_id, - path: Arc::::from_proto(serialized_symbol.path), + + let path = if serialized_symbol.signature.is_empty() { + SymbolLocation::InProject(ProjectPath { + worktree_id, + path: RelPath::from_proto(&serialized_symbol.path) + .context("invalid symbol path")?, + }) + } else { + SymbolLocation::OutsideProject { + abs_path: Path::new(&serialized_symbol.path).into(), + signature: serialized_symbol + .signature + .try_into() + .map_err(|_| anyhow!("invalid signature"))?, + } }; let start = serialized_symbol.start.context("invalid start")?; @@ -10921,10 +10990,6 @@ impl LspStore { range: Unclipped(PointUtf16::new(start.row, start.column)) ..Unclipped(PointUtf16::new(end.row, end.column)), kind, - signature: serialized_symbol - .signature - .try_into() - .map_err(|_| anyhow!("invalid signature"))?, }) } @@ -12484,7 +12549,7 @@ impl DiagnosticSummary { pub fn to_proto( self, language_server_id: LanguageServerId, - path: &Path, + path: &RelPath, ) -> proto::DiagnosticSummary { proto::DiagnosticSummary { path: path.to_proto(), @@ -12657,7 +12722,7 @@ pub fn language_server_settings<'a>( language_server_settings_for( SettingsLocation { worktree_id: delegate.worktree_id(), - path: delegate.worktree_root_path(), + path: RelPath::empty(), }, language, cx, @@ -12847,16 +12912,12 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate { Some(dir) } - async fn read_text_file(&self, path: PathBuf) -> Result { + async fn read_text_file(&self, path: &RelPath) -> Result { let entry = self .worktree - .entry_for_path(&path) + .entry_for_path(path) .with_context(|| format!("no worktree entry for path {path:?}"))?; - let abs_path = self - .worktree - .absolutize(&entry.path) - .with_context(|| format!("cannot absolutize path {path:?}"))?; - + let abs_path = self.worktree.absolutize(&entry.path); self.fs.load(&abs_path).await } } @@ -12870,14 +12931,17 @@ async fn populate_labels_for_symbols( #[allow(clippy::mutable_key_type)] let mut symbols_by_language = HashMap::>, Vec>::default(); - let mut unknown_paths = BTreeSet::new(); + let mut unknown_paths = BTreeSet::>::new(); for symbol in symbols { + let Some(file_name) = symbol.path.file_name() else { + continue; + }; let language = language_registry - .language_for_file_path(&symbol.path.path) + .language_for_file_path(Path::new(file_name)) .await .ok() .or_else(|| { - unknown_paths.insert(symbol.path.path.clone()); + unknown_paths.insert(file_name.into()); None }); symbols_by_language @@ -12887,10 +12951,7 @@ async fn populate_labels_for_symbols( } for unknown_path in unknown_paths { - log::info!( - "no language found for symbol path {}", - unknown_path.display() - ); + log::info!("no language found for symbol in file {unknown_path:?}"); } let mut label_params = Vec::new(); @@ -12933,7 +12994,6 @@ async fn populate_labels_for_symbols( name, kind: symbol.kind, range: symbol.range, - signature: symbol.signature, }); } } diff --git a/crates/project/src/manifest_tree.rs b/crates/project/src/manifest_tree.rs index 5a3c7bd40fb11ee5bebe340ddc57ec71a112270b..ffa4872ca78e2295e18c515a03e81e4d7b63c07b 100644 --- a/crates/project/src/manifest_tree.rs +++ b/crates/project/src/manifest_tree.rs @@ -7,7 +7,7 @@ mod manifest_store; mod path_trie; mod server_tree; -use std::{borrow::Borrow, collections::hash_map::Entry, ops::ControlFlow, path::Path, sync::Arc}; +use std::{borrow::Borrow, collections::hash_map::Entry, ops::ControlFlow, sync::Arc}; use collections::HashMap; use gpui::{App, AppContext as _, Context, Entity, Subscription}; @@ -15,6 +15,7 @@ use language::{ManifestDelegate, ManifestName, ManifestQuery}; pub use manifest_store::ManifestProvidersStore; use path_trie::{LabelPresence, RootPathTrie, TriePath}; use settings::{SettingsStore, WorktreeId}; +use util::rel_path::RelPath; use worktree::{Event as WorktreeEvent, Snapshot, Worktree}; use crate::{ @@ -184,7 +185,7 @@ impl ManifestTree { .and_then(|manifest_name| self.root_for_path(project_path, manifest_name, delegate, cx)) .unwrap_or_else(|| ProjectPath { worktree_id, - path: Arc::from(Path::new("")), + path: RelPath::empty().into(), }) } @@ -211,7 +212,7 @@ impl ManifestQueryDelegate { } impl ManifestDelegate for ManifestQueryDelegate { - fn exists(&self, path: &Path, is_dir: Option) -> bool { + fn exists(&self, path: &RelPath, is_dir: Option) -> bool { self.worktree.entry_for_path(path).is_some_and(|entry| { is_dir.is_none_or(|is_required_to_be_dir| is_required_to_be_dir == entry.is_dir()) }) diff --git a/crates/project/src/manifest_tree/path_trie.rs b/crates/project/src/manifest_tree/path_trie.rs index 9cebfda25c69fa35b06cefe9ec744b5e6152a820..2dd301fb13a89a61086d61a55c0f53568d1f3dd0 100644 --- a/crates/project/src/manifest_tree/path_trie.rs +++ b/crates/project/src/manifest_tree/path_trie.rs @@ -1,11 +1,11 @@ use std::{ collections::{BTreeMap, btree_map::Entry}, - ffi::OsStr, ops::ControlFlow, - path::{Path, PathBuf}, sync::Arc, }; +use util::rel_path::RelPath; + /// [RootPathTrie] is a workhorse of [super::ManifestTree]. It is responsible for determining the closest known entry for a given path. /// It also determines how much of a given path is unexplored, thus letting callers fill in that gap if needed. /// Conceptually, it allows one to annotate Worktree entries with arbitrary extra metadata and run closest-ancestor searches. @@ -14,9 +14,9 @@ use std::{ /// For example, if there's a project root at path `python/project` and we query for a path `python/project/subdir/another_subdir/file.py`, there is /// a known root at `python/project` and the unexplored part is `subdir/another_subdir` - we need to run a scan on these 2 directories. pub(super) struct RootPathTrie