Cargo.lock 🔗
@@ -17528,6 +17528,7 @@ dependencies = [
"libc",
"log",
"nix 0.29.0",
+ "pretty_assertions",
"rand 0.9.1",
"regex",
"rust-embed",
Max Brunsfeld and Cole Miller created
Consolidate constructors and accessors.
Release Notes:
- N/A
---------
Co-authored-by: Cole Miller <cole@zed.dev>
Cargo.lock | 1
crates/agent/src/thread_store.rs | 4
crates/agent2/src/agent.rs | 4
crates/agent2/src/tools/edit_file_tool.rs | 19
crates/assistant_slash_commands/src/file_command.rs | 10
crates/assistant_tools/src/edit_file_tool.rs | 2
crates/assistant_tools/src/list_directory_tool.rs | 3
crates/client/src/telemetry.rs | 2
crates/collab/src/tests/editor_tests.rs | 24
crates/collab/src/tests/following_tests.rs | 30
crates/collab/src/tests/integration_tests.rs | 79 +
crates/collab/src/tests/random_project_collaboration_tests.rs | 6
crates/dap_adapters/src/python.rs | 4
crates/debugger_ui/src/debugger_panel.rs | 8
crates/debugger_ui/src/new_process_modal.rs | 8
crates/debugger_ui/src/session/running/breakpoint_list.rs | 5
crates/editor/src/editor_tests.rs | 5
crates/editor/src/test/editor_test_context.rs | 4
crates/eval/src/examples/add_arg_to_trait_method.rs | 4
crates/extension_host/src/wasm_host/wit/since_v0_1_0.rs | 2
crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs | 4
crates/file_finder/src/file_finder.rs | 12
crates/file_finder/src/file_finder_tests.rs | 2
crates/fs/src/fake_git_repo.rs | 2
crates/fs/src/fs.rs | 4
crates/fuzzy/src/paths.rs | 9
crates/git/src/blame.rs | 2
crates/git/src/repository.rs | 41
crates/git/src/status.rs | 2
crates/git_ui/src/commit_view.rs | 8
crates/git_ui/src/project_diff.rs | 4
crates/language_tools/src/lsp_button.rs | 2
crates/languages/src/json.rs | 4
crates/languages/src/python.rs | 4
crates/languages/src/rust.rs | 2
crates/languages/src/typescript.rs | 4
crates/languages/src/vtsls.rs | 2
crates/outline_panel/src/outline_panel.rs | 2
crates/paths/src/paths.rs | 14
crates/project/src/git_store.rs | 16
crates/project/src/git_store/git_traversal.rs | 2
crates/project/src/lsp_store.rs | 13
crates/project/src/manifest_tree/path_trie.rs | 18
crates/project/src/prettier_store.rs | 2
crates/project/src/project.rs | 28
crates/project/src/project_settings.rs | 12
crates/project/src/project_tests.rs | 10
crates/project/src/task_inventory.rs | 8
crates/project/src/toolchain_store.rs | 2
crates/project/src/worktree_store.rs | 10
crates/project/src/yarn.rs | 5
crates/project_panel/src/project_panel.rs | 28
crates/remote/src/transport/ssh.rs | 6
crates/remote/src/transport/wsl.rs | 6
crates/remote_server/src/remote_editing_tests.rs | 2
crates/remote_server/src/unix.rs | 2
crates/settings/src/settings_store.rs | 9
crates/terminal_view/src/terminal_path_like_target.rs | 6
crates/util/Cargo.toml | 1
crates/util/src/rel_path.rs | 230 ++--
crates/vim/src/command.rs | 18
crates/vim/src/state.rs | 5
crates/workspace/src/persistence.rs | 8
crates/workspace/src/workspace.rs | 10
crates/worktree/src/worktree.rs | 65
crates/zed/src/zed.rs | 5
crates/zeta/src/license_detection.rs | 4
crates/zeta/src/zeta.rs | 4
crates/zeta2_tools/src/zeta2_tools.rs | 4
crates/zeta_cli/src/main.rs | 2
70 files changed, 482 insertions(+), 407 deletions(-)
@@ -17528,6 +17528,7 @@ dependencies = [
"libc",
"log",
"nix 0.29.0",
+ "pretty_assertions",
"rand 0.9.1",
"regex",
"rust-embed",
@@ -235,7 +235,7 @@ impl ThreadStore {
if items.iter().any(|(path, _, _)| {
RULES_FILE_NAMES
.iter()
- .any(|name| path.as_ref() == RelPath::new(name).unwrap())
+ .any(|name| path.as_ref() == RelPath::unix(name).unwrap())
}) {
self.enqueue_system_prompt_reload();
}
@@ -368,7 +368,7 @@ impl ThreadStore {
.into_iter()
.filter_map(|name| {
worktree
- .entry_for_path(RelPath::new(name).unwrap())
+ .entry_for_path(RelPath::unix(name).unwrap())
.filter(|entry| entry.is_file())
.map(|entry| entry.path.clone())
})
@@ -475,7 +475,7 @@ impl NativeAgent {
.into_iter()
.filter_map(|name| {
worktree
- .entry_for_path(RelPath::new(name).unwrap())
+ .entry_for_path(RelPath::unix(name).unwrap())
.filter(|entry| entry.is_file())
.map(|entry| entry.path.clone())
})
@@ -559,7 +559,7 @@ impl NativeAgent {
if items.iter().any(|(path, _, _)| {
RULES_FILE_NAMES
.iter()
- .any(|name| path.as_ref() == RelPath::new(name).unwrap())
+ .any(|name| path.as_ref() == RelPath::unix(name).unwrap())
}) {
self.project_context_needs_refresh.send(()).ok();
}
@@ -541,7 +541,7 @@ fn resolve_path(
.path
.file_name()
.and_then(|file_name| file_name.to_str())
- .and_then(|file_name| RelPath::new(file_name).ok())
+ .and_then(|file_name| RelPath::unix(file_name).ok())
.context("Can't create file: invalid filename")?;
let new_file_path = parent_project_path.map(|parent| ProjectPath {
@@ -565,7 +565,7 @@ mod tests {
use prompt_store::ProjectContext;
use serde_json::json;
use settings::SettingsStore;
- use util::path;
+ use util::{path, rel_path::rel_path};
#[gpui::test]
async fn test_edit_nonexistent_file(cx: &mut TestAppContext) {
@@ -614,13 +614,13 @@ mod tests {
let mode = &EditFileMode::Create;
let result = test_resolve_path(mode, "root/new.txt", cx);
- assert_resolved_path_eq(result.await, "new.txt");
+ assert_resolved_path_eq(result.await, rel_path("new.txt"));
let result = test_resolve_path(mode, "new.txt", cx);
- assert_resolved_path_eq(result.await, "new.txt");
+ assert_resolved_path_eq(result.await, rel_path("new.txt"));
let result = test_resolve_path(mode, "dir/new.txt", cx);
- assert_resolved_path_eq(result.await, "dir/new.txt");
+ assert_resolved_path_eq(result.await, rel_path("dir/new.txt"));
let result = test_resolve_path(mode, "root/dir/subdir/existing.txt", cx);
assert_eq!(
@@ -642,10 +642,10 @@ mod tests {
let path_with_root = "root/dir/subdir/existing.txt";
let path_without_root = "dir/subdir/existing.txt";
let result = test_resolve_path(mode, path_with_root, cx);
- assert_resolved_path_eq(result.await, path_without_root);
+ assert_resolved_path_eq(result.await, rel_path(path_without_root));
let result = test_resolve_path(mode, path_without_root, cx);
- assert_resolved_path_eq(result.await, path_without_root);
+ assert_resolved_path_eq(result.await, rel_path(path_without_root));
let result = test_resolve_path(mode, "root/nonexistent.txt", cx);
assert_eq!(
@@ -691,10 +691,9 @@ mod tests {
}
#[track_caller]
- fn assert_resolved_path_eq(path: anyhow::Result<ProjectPath>, expected: &str) {
+ fn assert_resolved_path_eq(path: anyhow::Result<ProjectPath>, expected: &RelPath) {
let actual = path.expect("Should return valid path").path;
- let actual = actual.as_str();
- assert_eq!(actual, expected);
+ assert_eq!(actual.as_ref(), expected);
}
#[gpui::test]
@@ -290,7 +290,7 @@ fn collect_files(
folded_directory_names.join(&path_including_worktree_name);
} else {
folded_directory_names =
- folded_directory_names.join(RelPath::new(&filename).unwrap());
+ folded_directory_names.join(RelPath::unix(&filename).unwrap());
}
continue;
}
@@ -320,7 +320,7 @@ fn collect_files(
directory_stack.push(entry.path.clone());
} else {
let entry_name =
- folded_directory_names.join(RelPath::new(&filename).unwrap());
+ folded_directory_names.join(RelPath::unix(&filename).unwrap());
let entry_name = entry_name.display(path_style);
events_tx.unbounded_send(Ok(SlashCommandEvent::StartSection {
icon: IconName::Folder,
@@ -505,7 +505,7 @@ mod custom_path_matcher {
.iter()
.zip(self.sources_with_trailing_slash.iter())
.any(|(source, with_slash)| {
- let as_bytes = other.as_str().as_bytes();
+ let as_bytes = other.as_unix_str().as_bytes();
let with_slash = if source.ends_with('/') {
source.as_bytes()
} else {
@@ -514,12 +514,12 @@ mod custom_path_matcher {
as_bytes.starts_with(with_slash) || as_bytes.ends_with(source.as_bytes())
})
- || self.glob.is_match(other)
+ || self.glob.is_match(other.as_std_path())
|| self.check_with_end_separator(other)
}
fn check_with_end_separator(&self, path: &RelPath) -> bool {
- let path_str = path.as_str();
+ let path_str = path.as_unix_str();
let separator = "/";
if path_str.ends_with(separator) {
false
@@ -554,7 +554,7 @@ fn resolve_path(
.context("Can't create file: invalid filename")?;
let new_file_path = parent_project_path.map(|parent| ProjectPath {
- path: parent.path.join(RelPath::new(file_name).unwrap()),
+ path: parent.path.join(RelPath::unix(file_name).unwrap()),
..parent
});
@@ -86,6 +86,7 @@ impl Tool for ListDirectoryTool {
_window: Option<AnyWindowHandle>,
cx: &mut App,
) -> ToolResult {
+ let path_style = project.read(cx).path_style(cx);
let input = match serde_json::from_value::<ListDirectoryToolInput>(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
@@ -100,7 +101,7 @@ impl Tool for ListDirectoryTool {
.filter_map(|worktree| {
worktree.read(cx).root_entry().and_then(|entry| {
if entry.is_dir() {
- Some(entry.path.as_str())
+ Some(entry.path.display(path_style))
} else {
None
}
@@ -858,7 +858,7 @@ mod tests {
.enumerate()
.filter_map(|(i, path)| {
Some((
- Arc::from(RelPath::new(path).ok()?),
+ Arc::from(RelPath::unix(path).ok()?),
ProjectEntryId::from_proto(i as u64 + 1),
PathChange::Added,
))
@@ -1408,12 +1408,12 @@ async fn test_share_project(
project_b.read_with(cx_b, |project, cx| {
let worktree = project.worktrees(cx).next().unwrap().read(cx);
assert_eq!(
- worktree.paths().map(AsRef::as_ref).collect::<Vec<_>>(),
+ worktree.paths().collect::<Vec<_>>(),
[
- Path::new(".gitignore"),
- Path::new("a.txt"),
- Path::new("b.txt"),
- Path::new("ignored-dir"),
+ rel_path(".gitignore"),
+ rel_path("a.txt"),
+ rel_path("b.txt"),
+ rel_path("ignored-dir"),
]
);
});
@@ -1433,14 +1433,14 @@ async fn test_share_project(
project_b.read_with(cx_b, |project, cx| {
let worktree = project.worktrees(cx).next().unwrap().read(cx);
assert_eq!(
- worktree.paths().map(AsRef::as_ref).collect::<Vec<_>>(),
+ worktree.paths().collect::<Vec<_>>(),
[
- Path::new(".gitignore"),
- Path::new("a.txt"),
- Path::new("b.txt"),
- Path::new("ignored-dir"),
- Path::new("ignored-dir/c.txt"),
- Path::new("ignored-dir/d.txt"),
+ rel_path(".gitignore"),
+ rel_path("a.txt"),
+ rel_path("b.txt"),
+ rel_path("ignored-dir"),
+ rel_path("ignored-dir/c.txt"),
+ rel_path("ignored-dir/d.txt"),
]
);
});
@@ -632,13 +632,16 @@ async fn test_following_tab_order(
let pane_paths = |pane: &Entity<workspace::Pane>, cx: &mut VisualTestContext| {
pane.update(cx, |pane, cx| {
pane.items()
- .map(|item| item.project_path(cx).unwrap().path.as_str().to_owned())
+ .map(|item| item.project_path(cx).unwrap().path)
.collect::<Vec<_>>()
})
};
//Verify that the tabs opened in the order we expect
- assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt"]);
+ assert_eq!(
+ &pane_paths(&pane_a, cx_a),
+ &[rel_path("1.txt").into(), rel_path("3.txt").into()]
+ );
//Follow client B as client A
workspace_a.update_in(cx_a, |workspace, window, cx| {
@@ -656,7 +659,14 @@ async fn test_following_tab_order(
executor.run_until_parked();
// Verify that newly opened followed file is at the end
- assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt", "2.txt"]);
+ assert_eq!(
+ &pane_paths(&pane_a, cx_a),
+ &[
+ rel_path("1.txt").into(),
+ rel_path("3.txt").into(),
+ rel_path("2.txt").into()
+ ]
+ );
//Open just 1 on client B
workspace_b
@@ -665,11 +675,21 @@ async fn test_following_tab_order(
})
.await
.unwrap();
- assert_eq!(&pane_paths(&pane_b, cx_b), &["2.txt", "1.txt"]);
+ assert_eq!(
+ &pane_paths(&pane_b, cx_b),
+ &[rel_path("2.txt").into(), rel_path("1.txt").into()]
+ );
executor.run_until_parked();
// Verify that following into 1 did not reorder
- assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt", "2.txt"]);
+ assert_eq!(
+ &pane_paths(&pane_a, cx_a),
+ &[
+ rel_path("1.txt").into(),
+ rel_path("3.txt").into(),
+ rel_path("2.txt").into()
+ ]
+ );
}
#[gpui::test(iterations = 10)]
@@ -1699,13 +1699,8 @@ async fn test_project_reconnect(
);
assert!(worktree_a3.read(cx).has_update_observer());
assert_eq!(
- worktree_a3
- .read(cx)
- .snapshot()
- .paths()
- .map(|p| p.as_str())
- .collect::<Vec<_>>(),
- vec!["w.txt", "x.txt", "y.txt"]
+ worktree_a3.read(cx).snapshot().paths().collect::<Vec<_>>(),
+ vec![rel_path("w.txt"), rel_path("x.txt"), rel_path("y.txt")]
);
});
@@ -1737,9 +1732,8 @@ async fn test_project_reconnect(
.read(cx)
.snapshot()
.paths()
- .map(|p| p.as_str())
.collect::<Vec<_>>(),
- vec!["w.txt", "x.txt", "y.txt"]
+ vec![rel_path("w.txt"), rel_path("x.txt"), rel_path("y.txt")]
);
});
@@ -1833,7 +1827,7 @@ async fn test_project_reconnect(
.read(cx)
.snapshot()
.paths()
- .map(|p| p.as_str())
+ .map(|p| p.as_unix_str())
.collect::<Vec<_>>(),
vec!["z.txt"]
);
@@ -2471,39 +2465,39 @@ async fn test_propagate_saves_and_fs_changes(
worktree_a.read_with(cx_a, |tree, _| {
assert_eq!(
- tree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
- ["file1.js", "file3", "file4"]
+ tree.paths().collect::<Vec<_>>(),
+ [rel_path("file1.js"), rel_path("file3"), rel_path("file4")]
)
});
worktree_b.read_with(cx_b, |tree, _| {
assert_eq!(
- tree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
- ["file1.js", "file3", "file4"]
+ tree.paths().collect::<Vec<_>>(),
+ [rel_path("file1.js"), rel_path("file3"), rel_path("file4")]
)
});
worktree_c.read_with(cx_c, |tree, _| {
assert_eq!(
- tree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
- ["file1.js", "file3", "file4"]
+ tree.paths().collect::<Vec<_>>(),
+ [rel_path("file1.js"), rel_path("file3"), rel_path("file4")]
)
});
// Ensure buffer files are updated as well.
buffer_a.read_with(cx_a, |buffer, _| {
- assert_eq!(buffer.file().unwrap().path().as_str(), "file1.js");
+ assert_eq!(buffer.file().unwrap().path().as_ref(), rel_path("file1.js"));
assert_eq!(buffer.language().unwrap().name(), "JavaScript".into());
});
buffer_b.read_with(cx_b, |buffer, _| {
- assert_eq!(buffer.file().unwrap().path().as_str(), "file1.js");
+ assert_eq!(buffer.file().unwrap().path().as_ref(), rel_path("file1.js"));
assert_eq!(buffer.language().unwrap().name(), "JavaScript".into());
});
buffer_c.read_with(cx_c, |buffer, _| {
- assert_eq!(buffer.file().unwrap().path().as_str(), "file1.js");
+ assert_eq!(buffer.file().unwrap().path().as_ref(), rel_path("file1.js"));
assert_eq!(buffer.language().unwrap().name(), "JavaScript".into());
});
@@ -3217,15 +3211,15 @@ async fn test_fs_operations(
worktree_a.read_with(cx_a, |worktree, _| {
assert_eq!(
- worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
- ["a.txt", "b.txt", "c.txt"]
+ worktree.paths().collect::<Vec<_>>(),
+ [rel_path("a.txt"), rel_path("b.txt"), rel_path("c.txt")]
);
});
worktree_b.read_with(cx_b, |worktree, _| {
assert_eq!(
- worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
- ["a.txt", "b.txt", "c.txt"]
+ worktree.paths().collect::<Vec<_>>(),
+ [rel_path("a.txt"), rel_path("b.txt"), rel_path("c.txt")]
);
});
@@ -3240,14 +3234,17 @@ async fn test_fs_operations(
worktree_a.read_with(cx_a, |worktree, _| {
assert_eq!(
- worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
- ["a.txt", "b.txt", "d.txt"]
+ worktree.paths().collect::<Vec<_>>(),
+ [rel_path("a.txt"), rel_path("b.txt"), rel_path("d.txt")]
);
});
worktree_b.read_with(cx_b, |worktree, _| {
assert_eq!(
- worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
+ worktree
+ .paths()
+ .map(|p| p.as_unix_str())
+ .collect::<Vec<_>>(),
["a.txt", "b.txt", "d.txt"]
);
});
@@ -3263,14 +3260,20 @@ async fn test_fs_operations(
worktree_a.read_with(cx_a, |worktree, _| {
assert_eq!(
- worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
+ worktree
+ .paths()
+ .map(|p| p.as_unix_str())
+ .collect::<Vec<_>>(),
["DIR", "a.txt", "b.txt", "d.txt"]
);
});
worktree_b.read_with(cx_b, |worktree, _| {
assert_eq!(
- worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
+ worktree
+ .paths()
+ .map(|p| p.as_unix_str())
+ .collect::<Vec<_>>(),
["DIR", "a.txt", "b.txt", "d.txt"]
);
});
@@ -3386,14 +3389,20 @@ async fn test_fs_operations(
worktree_a.read_with(cx_a, |worktree, _| {
assert_eq!(
- worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
+ worktree
+ .paths()
+ .map(|p| p.as_unix_str())
+ .collect::<Vec<_>>(),
["a.txt", "b.txt", "d.txt", "f.txt"]
);
});
worktree_b.read_with(cx_b, |worktree, _| {
assert_eq!(
- worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
+ worktree
+ .paths()
+ .map(|p| p.as_unix_str())
+ .collect::<Vec<_>>(),
["a.txt", "b.txt", "d.txt", "f.txt"]
);
});
@@ -3407,14 +3416,20 @@ async fn test_fs_operations(
worktree_a.read_with(cx_a, |worktree, _| {
assert_eq!(
- worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
+ worktree
+ .paths()
+ .map(|p| p.as_unix_str())
+ .collect::<Vec<_>>(),
["a.txt", "b.txt", "f.txt"]
);
});
worktree_b.read_with(cx_b, |worktree, _| {
assert_eq!(
- worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
+ worktree
+ .paths()
+ .map(|p| p.as_unix_str())
+ .collect::<Vec<_>>(),
["a.txt", "b.txt", "f.txt"]
);
});
@@ -973,7 +973,7 @@ impl RandomizedTest for ProjectCollaborationTest {
let dot_git_dir = repo_path.join(".git");
let contents = contents
.iter()
- .map(|(path, contents)| (path.as_str(), contents.clone()))
+ .map(|(path, contents)| (path.as_unix_str(), contents.clone()))
.collect::<Vec<_>>();
if client.fs().metadata(&dot_git_dir).await?.is_none() {
client.fs().create_dir(&dot_git_dir).await?;
@@ -1031,7 +1031,7 @@ impl RandomizedTest for ProjectCollaborationTest {
let statuses = statuses
.iter()
- .map(|(path, val)| (path.as_str(), *val))
+ .map(|(path, val)| (path.as_unix_str(), *val))
.collect::<Vec<_>>();
if client.fs().metadata(&dot_git_dir).await?.is_none() {
@@ -1463,7 +1463,7 @@ fn generate_git_operation(rng: &mut StdRng, client: &TestClient) -> GitOperation
paths
.iter()
.map(|path| {
- RelPath::from_std_path(path.strip_prefix(repo_path).unwrap(), PathStyle::local())
+ RelPath::new(path.strip_prefix(repo_path).unwrap(), PathStyle::local())
.unwrap()
.to_rel_path_buf()
})
@@ -726,7 +726,7 @@ impl DebugAdapter for PythonDebugAdapter {
.config
.get("cwd")
.and_then(|cwd| {
- RelPath::from_std_path(
+ RelPath::new(
cwd.as_str()
.map(Path::new)?
.strip_prefix(delegate.worktree_root_path())
@@ -740,7 +740,7 @@ impl DebugAdapter for PythonDebugAdapter {
.toolchain_store()
.active_toolchain(
delegate.worktree_id(),
- base_path,
+ base_path.into_arc(),
language::LanguageName::new(Self::LANGUAGE_NAME),
cx,
)
@@ -1062,10 +1062,10 @@ impl DebugPanel {
directory_in_worktree: dir,
..
} => {
- let relative_path = if dir.ends_with(RelPath::new(".vscode").unwrap()) {
- dir.join(RelPath::new("launch.json").unwrap())
+ let relative_path = if dir.ends_with(RelPath::unix(".vscode").unwrap()) {
+ dir.join(RelPath::unix("launch.json").unwrap())
} else {
- dir.join(RelPath::new("debug.json").unwrap())
+ dir.join(RelPath::unix("debug.json").unwrap())
};
ProjectPath {
worktree_id: id,
@@ -1136,7 +1136,7 @@ impl DebugPanel {
}
path.pop();
- path.push(paths::local_debug_file_relative_path());
+ path.push(paths::local_debug_file_relative_path().as_std_path());
let path = path.as_path();
if !fs.is_file(path).await {
@@ -1037,10 +1037,10 @@ impl DebugDelegate {
match path.components().next_back() {
Some(".zed") => {
- path.push(RelPath::new("debug.json").unwrap());
+ path.push(RelPath::unix("debug.json").unwrap());
}
Some(".vscode") => {
- path.push(RelPath::new("launch.json").unwrap());
+ path.push(RelPath::unix("launch.json").unwrap());
}
_ => {}
}
@@ -1133,7 +1133,7 @@ impl DebugDelegate {
id: _,
directory_in_worktree: dir,
id_base: _,
- } => dir.ends_with(RelPath::new(".zed").unwrap()),
+ } => dir.ends_with(RelPath::unix(".zed").unwrap()),
_ => false,
});
@@ -1154,7 +1154,7 @@ impl DebugDelegate {
id_base: _,
} => {
!(hide_vscode
- && dir.ends_with(RelPath::new(".vscode").unwrap()))
+ && dir.ends_with(RelPath::unix(".vscode").unwrap()))
}
_ => true,
})
@@ -682,10 +682,11 @@ impl Render for BreakpointList {
breakpoints.into_iter().filter_map(move |breakpoint| {
debug_assert_eq!(&path, &breakpoint.path);
let file_name = breakpoint.path.file_name()?;
+ let breakpoint_path = RelPath::new(&breakpoint.path, path_style).ok();
let dir = relative_worktree_path
- .clone()
- .or_else(|| RelPath::from_std_path(&breakpoint.path, path_style).ok())?
+ .as_deref()
+ .or(breakpoint_path.as_deref())?
.parent()
.map(|parent| SharedString::from(parent.display(path_style).to_string()));
let name = file_name
@@ -20905,7 +20905,10 @@ async fn test_display_diff_hunks(cx: &mut TestAppContext) {
for buffer in &buffers {
let snapshot = buffer.read(cx).snapshot();
multibuffer.set_excerpts_for_path(
- PathKey::namespaced(0, buffer.read(cx).file().unwrap().path().as_str().into()),
+ PathKey::namespaced(
+ 0,
+ buffer.read(cx).file().unwrap().path().as_unix_str().into(),
+ ),
buffer.clone(),
vec![text::Anchor::MIN.to_point(&snapshot)..text::Anchor::MAX.to_point(&snapshot)],
2,
@@ -296,7 +296,7 @@ impl EditorTestContext {
let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone());
fs.set_head_for_repo(
&Self::root_path().join(".git"),
- &[(path.as_str(), diff_base.to_string())],
+ &[(path.as_unix_str(), diff_base.to_string())],
"deadbeef",
);
self.cx.run_until_parked();
@@ -317,7 +317,7 @@ impl EditorTestContext {
let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone());
fs.set_index_for_repo(
&Self::root_path().join(".git"),
- &[(path.as_str(), diff_base.to_string())],
+ &[(path.as_unix_str(), diff_base.to_string())],
);
self.cx.run_until_parked();
}
@@ -67,7 +67,7 @@ impl Example for AddArgToTraitMethod {
for tool_name in add_ignored_window_paths {
let path_str = format!("crates/assistant_tools/src/{}.rs", tool_name);
- let edits = edits.get(RelPath::new(&path_str).unwrap());
+ let edits = edits.get(RelPath::unix(&path_str).unwrap());
let ignored = edits.is_some_and(|edits| {
edits.has_added_line(" _window: Option<gpui::AnyWindowHandle>,\n")
@@ -86,7 +86,7 @@ impl Example for AddArgToTraitMethod {
// Adds unignored argument to `batch_tool`
let batch_tool_edits =
- edits.get(RelPath::new("crates/assistant_tools/src/batch_tool.rs").unwrap());
+ edits.get(RelPath::unix("crates/assistant_tools/src/batch_tool.rs").unwrap());
cx.assert(
batch_tool_edits.is_some_and(|edits| {
@@ -425,7 +425,7 @@ impl ExtensionImports for WasmState {
let location = location.as_ref().and_then(|location| {
Some(::settings::SettingsLocation {
worktree_id: WorktreeId::from_proto(location.worktree_id),
- path: RelPath::new(&location.path).ok()?,
+ path: RelPath::unix(&location.path).ok()?,
})
});
@@ -564,7 +564,7 @@ impl HostWorktree for WasmState {
) -> wasmtime::Result<Result<String, String>> {
let delegate = self.table.get(&delegate)?;
Ok(delegate
- .read_text_file(RelPath::new(&path)?)
+ .read_text_file(RelPath::unix(&path)?)
.await
.map_err(|error| error.to_string()))
}
@@ -917,7 +917,7 @@ impl ExtensionImports for WasmState {
let location = location.as_ref().and_then(|location| {
Some(::settings::SettingsLocation {
worktree_id: WorktreeId::from_proto(location.worktree_id),
- path: RelPath::new(&location.path).ok()?,
+ path: RelPath::unix(&location.path).ok()?,
})
});
@@ -666,7 +666,7 @@ impl Matches {
}
if let Some(filename) = panel_match.0.path.file_name() {
- let path_str = panel_match.0.path.as_str();
+ let path_str = panel_match.0.path.as_unix_str();
if let Some(filename_pos) = path_str.rfind(filename)
&& panel_match.0.positions[0] >= filename_pos
@@ -940,7 +940,7 @@ impl FileFinderDelegate {
let path_style = self.project.read(cx).path_style(cx);
let query_path = query.raw_query.as_str();
- if let Ok(mut query_path) = RelPath::from_std_path(Path::new(query_path), path_style) {
+ if let Ok(mut query_path) = RelPath::new(Path::new(query_path), path_style) {
let available_worktree = self
.project
.read(cx)
@@ -953,7 +953,7 @@ impl FileFinderDelegate {
let worktree_root = worktree.read(cx).root_name();
if worktree_count > 1 {
if let Ok(suffix) = query_path.strip_prefix(worktree_root) {
- query_path = suffix.into();
+ query_path = Cow::Owned(suffix.to_owned());
expect_worktree = Some(worktree);
break;
}
@@ -973,7 +973,7 @@ impl FileFinderDelegate {
{
self.matches.matches.push(Match::CreateNew(ProjectPath {
worktree_id: worktree.id(),
- path: query_path,
+ path: query_path.into_arc(),
}));
}
}
@@ -1128,7 +1128,7 @@ impl FileFinderDelegate {
let mut path_positions = path_match.positions.clone();
let file_name = full_path.file_name().unwrap_or("");
- let file_name_start = full_path.as_str().len() - file_name.len();
+ let file_name_start = full_path.as_unix_str().len() - file_name.len();
let file_name_positions = path_positions
.iter()
.filter_map(|pos| {
@@ -1325,7 +1325,7 @@ impl PickerDelegate for FileFinderDelegate {
.all(|worktree| {
worktree
.read(cx)
- .entry_for_path(RelPath::new(prefix.split_at(1).0).unwrap())
+ .entry_for_path(RelPath::unix(prefix.split_at(1).0).unwrap())
.is_none_or(|entry| !entry.is_dir())
})
{
@@ -2192,7 +2192,7 @@ async fn test_nonexistent_history_items_not_shown(cx: &mut gpui::TestAppContext)
collect_search_matches(picker).history,
vec![
rel_path("test/first.rs").into(),
- rel_path("test/third.rs").into(),
+ rel_path("test/third.rs").into()
],
"Should have all opened files in the history, except the ones that do not exist on disk"
);
@@ -226,7 +226,7 @@ impl GitRepository for FakeGitRepository {
.read_file_sync(path)
.ok()
.map(|content| String::from_utf8(content).unwrap())?;
- let repo_path = RelPath::from_std_path(repo_path, PathStyle::local()).ok()?;
+ let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
Some((repo_path.into(), (content, is_ignored)))
})
.collect();
@@ -1671,10 +1671,10 @@ impl FakeFs {
for (path, content) in workdir_contents {
use util::{paths::PathStyle, rel_path::RelPath};
- let repo_path: RepoPath = RelPath::from_std_path(path.strip_prefix(&workdir_path).unwrap(), PathStyle::local()).unwrap().into();
+ let repo_path: RepoPath = RelPath::new(path.strip_prefix(&workdir_path).unwrap(), PathStyle::local()).unwrap().into();
let status = statuses
.iter()
- .find_map(|(p, status)| (*p == repo_path.as_str()).then_some(status));
+ .find_map(|(p, status)| (*p == repo_path.as_unix_str()).then_some(status));
let mut content = String::from_utf8_lossy(&content).to_string();
let mut index_content = None;
@@ -52,7 +52,7 @@ impl<'a> MatchCandidate for PathMatchCandidate<'a> {
}
fn candidate_chars(&self) -> impl Iterator<Item = char> {
- self.path.as_str().chars()
+ self.path.as_unix_str().chars()
}
}
@@ -184,8 +184,11 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>(
let candidates = candidate_set.candidates(start).take(end - start);
let worktree_id = candidate_set.id();
- let mut prefix =
- candidate_set.prefix().as_str().chars().collect::<Vec<_>>();
+ let mut prefix = candidate_set
+ .prefix()
+ .as_unix_str()
+ .chars()
+ .collect::<Vec<_>>();
if !candidate_set.root_is_file() && !prefix.is_empty() {
prefix.push('/');
}
@@ -77,7 +77,7 @@ async fn run_git_blame(
.arg("-w")
.arg("--contents")
.arg("-")
- .arg(path.as_str())
+ .arg(path.as_unix_str())
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
@@ -12,6 +12,7 @@ use parking_lot::Mutex;
use rope::Rope;
use schemars::JsonSchema;
use serde::Deserialize;
+use std::borrow::Cow;
use std::ffi::{OsStr, OsString};
use std::io::prelude::*;
use std::process::{ExitStatus, Stdio};
@@ -719,7 +720,7 @@ impl GitRepository for RealGitRepository {
let mut newline = [b'\0'];
for (path, status_code) in changes {
// git-show outputs `/`-delimited paths even on Windows.
- let Ok(rel_path) = RelPath::new(path) else {
+ let Some(rel_path) = RelPath::unix(path).log_err() else {
continue;
};
@@ -828,7 +829,7 @@ impl GitRepository for RealGitRepository {
.current_dir(&working_directory?)
.envs(env.iter())
.args(["checkout", &commit, "--"])
- .args(paths.iter().map(|path| path.as_str()))
+ .args(paths.iter().map(|path| path.as_unix_str()))
.output()
.await?;
anyhow::ensure!(
@@ -920,7 +921,7 @@ impl GitRepository for RealGitRepository {
.current_dir(&working_directory)
.envs(env.iter())
.args(["update-index", "--add", "--cacheinfo", "100644", sha])
- .arg(path.as_str())
+ .arg(path.as_unix_str())
.output()
.await?;
@@ -935,7 +936,7 @@ impl GitRepository for RealGitRepository {
.current_dir(&working_directory)
.envs(env.iter())
.args(["update-index", "--force-remove"])
- .arg(path.as_str())
+ .arg(path.as_unix_str())
.output()
.await?;
anyhow::ensure!(
@@ -1253,7 +1254,7 @@ impl GitRepository for RealGitRepository {
.current_dir(&working_directory?)
.envs(env.iter())
.args(["update-index", "--add", "--remove", "--"])
- .args(paths.iter().map(|p| p.as_str()))
+ .args(paths.iter().map(|p| p.as_unix_str()))
.output()
.await?;
anyhow::ensure!(
@@ -1282,7 +1283,7 @@ impl GitRepository for RealGitRepository {
.current_dir(&working_directory?)
.envs(env.iter())
.args(["reset", "--quiet", "--"])
- .args(paths.iter().map(|p| p.as_ref()))
+ .args(paths.iter().map(|p| p.as_std_path()))
.output()
.await?;
@@ -1311,7 +1312,7 @@ impl GitRepository for RealGitRepository {
.args(["stash", "push", "--quiet"])
.arg("--include-untracked");
- cmd.args(paths.iter().map(|p| p.as_ref()));
+ cmd.args(paths.iter().map(|p| p.as_unix_str()));
let output = cmd.output().await?;
@@ -1817,7 +1818,7 @@ fn git_status_args(path_prefixes: &[RepoPath]) -> Vec<OsString> {
if path_prefix.is_empty() {
Path::new(".").into()
} else {
- path_prefix.as_os_str().into()
+ path_prefix.as_std_path().into()
}
}));
args
@@ -2073,7 +2074,7 @@ pub struct RepoPath(pub Arc<RelPath>);
impl RepoPath {
pub fn new<S: AsRef<str> + ?Sized>(s: &S) -> Result<Self> {
- let rel_path = RelPath::new(s)?;
+ let rel_path = RelPath::unix(s.as_ref())?;
Ok(rel_path.into())
}
@@ -2083,14 +2084,14 @@ impl RepoPath {
}
pub fn from_std_path(path: &Path, path_style: PathStyle) -> Result<Self> {
- let rel_path = RelPath::from_std_path(path, path_style)?;
- Ok(rel_path.into())
+ let rel_path = RelPath::new(path, path_style)?;
+ Ok(Self(rel_path.as_ref().into()))
}
}
#[cfg(any(test, feature = "test-support"))]
pub fn repo_path<S: AsRef<str> + ?Sized>(s: &S) -> RepoPath {
- RepoPath(RelPath::new(s).unwrap().into())
+ RepoPath(RelPath::unix(s.as_ref()).unwrap().into())
}
impl From<&RelPath> for RepoPath {
@@ -2099,6 +2100,12 @@ impl From<&RelPath> for RepoPath {
}
}
+impl<'a> From<Cow<'a, RelPath>> for RepoPath {
+ fn from(value: Cow<'a, RelPath>) -> Self {
+ value.as_ref().into()
+ }
+}
+
impl From<Arc<RelPath>> for RepoPath {
fn from(value: Arc<RelPath>) -> Self {
RepoPath(value)
@@ -2119,11 +2126,11 @@ impl std::ops::Deref for RepoPath {
}
}
-impl AsRef<Path> for RepoPath {
- fn as_ref(&self) -> &Path {
- RelPath::as_ref(&self.0)
- }
-}
+// impl AsRef<Path> for RepoPath {
+// fn as_ref(&self) -> &Path {
+// RelPath::as_ref(&self.0)
+// }
+// }
#[derive(Debug)]
pub struct RepoPathDescendants<'a>(pub &'a RepoPath);
@@ -448,7 +448,7 @@ impl FromStr for GitStatus {
let status = entry.as_bytes()[0..2].try_into().unwrap();
let status = FileStatus::from_bytes(status).log_err()?;
// git-status outputs `/`-delimited repo paths, even on Windows.
- let path = RepoPath(RelPath::new(path).log_err()?.into());
+ let path = RepoPath(RelPath::unix(path).log_err()?.into());
Some((path, status))
})
.collect::<Vec<_>>();
@@ -128,7 +128,7 @@ impl CommitView {
let mut metadata_buffer_id = None;
if let Some(worktree_id) = first_worktree_id {
let file = Arc::new(CommitMetadataFile {
- title: RelPath::new(&format!("commit {}", commit.sha))
+ title: RelPath::unix(&format!("commit {}", commit.sha))
.unwrap()
.into(),
worktree_id,
@@ -145,7 +145,7 @@ impl CommitView {
});
multibuffer.update(cx, |multibuffer, cx| {
multibuffer.set_excerpts_for_path(
- PathKey::namespaced(COMMIT_METADATA_NAMESPACE, file.title.as_str().into()),
+ PathKey::namespaced(COMMIT_METADATA_NAMESPACE, file.title.as_unix_str().into()),
buffer.clone(),
vec![Point::zero()..buffer.read(cx).max_point()],
0,
@@ -193,7 +193,7 @@ impl CommitView {
.collect::<Vec<_>>();
let path = snapshot.file().unwrap().path().clone();
let _is_newly_added = multibuffer.set_excerpts_for_path(
- PathKey::namespaced(FILE_NAMESPACE, path.as_str().into()),
+ PathKey::namespaced(FILE_NAMESPACE, path.as_unix_str().into()),
buffer,
diff_hunk_ranges,
multibuffer_context_lines(cx),
@@ -275,7 +275,7 @@ impl language::File for CommitMetadataFile {
}
fn full_path(&self, _: &App) -> PathBuf {
- PathBuf::from(self.title.as_str().to_owned())
+ PathBuf::from(self.title.as_unix_str().to_owned())
}
fn file_name<'a>(&'a self, _: &'a App) -> &'a str {
@@ -243,7 +243,7 @@ impl ProjectDiff {
TRACKED_NAMESPACE
};
- let path_key = PathKey::namespaced(namespace, entry.repo_path.as_str().into());
+ let path_key = PathKey::namespaced(namespace, entry.repo_path.as_unix_str().into());
self.move_to_path(path_key, window, cx)
}
@@ -397,7 +397,7 @@ impl ProjectDiff {
} else {
TRACKED_NAMESPACE
};
- let path_key = PathKey::namespaced(namespace, entry.repo_path.as_str().into());
+ let path_key = PathKey::namespaced(namespace, entry.repo_path.as_unix_str().into());
previous_paths.remove(&path_key);
let load_buffer = self
@@ -162,7 +162,7 @@ impl LanguageServerState {
let relative_path =
abs_path.strip_prefix(&worktree.abs_path()).ok()?;
let relative_path =
- RelPath::from_std_path(relative_path, path_style)
+ RelPath::new(relative_path, path_style)
.log_err()?;
let entry = worktree.entry_for_path(&relative_path)?;
let project_path =
@@ -55,8 +55,8 @@ impl ContextProvider for JsonTaskProvider {
let Some(file) = project::File::from_dyn(file.as_ref()).cloned() else {
return Task::ready(None);
};
- let is_package_json = file.path.ends_with(RelPath::new("package.json").unwrap());
- let is_composer_json = file.path.ends_with(RelPath::new("composer.json").unwrap());
+ let is_package_json = file.path.ends_with(RelPath::unix("package.json").unwrap());
+ let is_composer_json = file.path.ends_with(RelPath::unix("composer.json").unwrap());
if !is_package_json && !is_composer_json {
return Task::ready(None);
}
@@ -55,7 +55,7 @@ impl ManifestProvider for PyprojectTomlManifestProvider {
}: ManifestQuery,
) -> Option<Arc<RelPath>> {
for path in path.ancestors().take(depth) {
- let p = path.join(RelPath::new("pyproject.toml").unwrap());
+ let p = path.join(RelPath::unix("pyproject.toml").unwrap());
if delegate.exists(&p, Some(false)) {
return Some(path.into());
}
@@ -1030,7 +1030,7 @@ impl ToolchainLister for PythonToolchainProvider {
config.workspace_directories = Some(
subroot_relative_path
.ancestors()
- .map(|ancestor| worktree_root.join(ancestor))
+ .map(|ancestor| worktree_root.join(ancestor.as_std_path()))
.collect(),
);
for locator in locators.iter() {
@@ -92,7 +92,7 @@ impl ManifestProvider for CargoManifestProvider {
) -> Option<Arc<RelPath>> {
let mut outermost_cargo_toml = None;
for path in path.ancestors().take(depth) {
- let p = path.join(RelPath::new("Cargo.toml").unwrap());
+ let p = path.join(RelPath::unix("Cargo.toml").unwrap());
if delegate.exists(&p, Some(false)) {
outermost_cargo_toml = Some(Arc::from(path));
}
@@ -269,7 +269,7 @@ impl TypeScriptContextProvider {
) -> Task<anyhow::Result<PackageJsonData>> {
let new_json_data = file_relative_path
.ancestors()
- .map(|path| worktree_root.join(path))
+ .map(|path| worktree_root.join(path.as_std_path()))
.map(|parent_path| {
self.package_json_data(&parent_path, self.last_package_json.clone(), fs.clone(), cx)
})
@@ -533,7 +533,7 @@ impl TypeScriptLspAdapter {
}
async fn tsdk_path(&self, adapter: &Arc<dyn LspAdapterDelegate>) -> Option<&'static str> {
let is_yarn = adapter
- .read_text_file(RelPath::new(".yarn/sdks/typescript/lib/typescript.js").unwrap())
+ .read_text_file(RelPath::unix(".yarn/sdks/typescript/lib/typescript.js").unwrap())
.await
.is_ok();
@@ -36,7 +36,7 @@ impl VtslsLspAdapter {
async fn tsdk_path(&self, adapter: &Arc<dyn LspAdapterDelegate>) -> Option<&'static str> {
let is_yarn = adapter
- .read_text_file(RelPath::new(".yarn/sdks/typescript/lib/typescript.js").unwrap())
+ .read_text_file(RelPath::unix(".yarn/sdks/typescript/lib/typescript.js").unwrap())
.await
.is_ok();
@@ -6660,7 +6660,7 @@ outline: struct OutlineEntryExcerpt
.read(cx)
.root_name()
.join(&directory.entry.path)
- .as_str()
+ .as_unix_str()
.to_string()
} else {
directory
@@ -31,12 +31,12 @@ static CONFIG_DIR: OnceLock<PathBuf> = OnceLock::new();
/// Returns the relative path to the zed_server directory on the ssh host.
pub fn remote_server_dir_relative() -> &'static RelPath {
- RelPath::new(".zed_server").unwrap()
+ RelPath::unix(".zed_server").unwrap()
}
/// Returns the relative path to the zed_wsl_server directory on the wsl host.
pub fn remote_wsl_server_dir_relative() -> &'static RelPath {
- RelPath::new(".zed_wsl_server").unwrap()
+ RelPath::unix(".zed_wsl_server").unwrap()
}
/// Sets a custom directory for all user data, overriding the default data directory.
@@ -410,17 +410,17 @@ pub fn local_vscode_folder_name() -> &'static str {
/// Returns the relative path to a `settings.json` file within a project.
pub fn local_settings_file_relative_path() -> &'static RelPath {
- RelPath::new(".zed/settings.json").unwrap()
+ RelPath::unix(".zed/settings.json").unwrap()
}
/// Returns the relative path to a `tasks.json` file within a project.
pub fn local_tasks_file_relative_path() -> &'static RelPath {
- RelPath::new(".zed/tasks.json").unwrap()
+ RelPath::unix(".zed/tasks.json").unwrap()
}
/// Returns the relative path to a `.vscode/tasks.json` file within a project.
pub fn local_vscode_tasks_file_relative_path() -> &'static RelPath {
- RelPath::new(".vscode/tasks.json").unwrap()
+ RelPath::unix(".vscode/tasks.json").unwrap()
}
pub fn debug_task_file_name() -> &'static str {
@@ -434,12 +434,12 @@ pub fn task_file_name() -> &'static str {
/// Returns the relative path to a `debug.json` file within a project.
/// .zed/debug.json
pub fn local_debug_file_relative_path() -> &'static RelPath {
- RelPath::new(".zed/debug.json").unwrap()
+ RelPath::unix(".zed/debug.json").unwrap()
}
/// Returns the relative path to a `.vscode/launch.json` file within a project.
pub fn local_vscode_launch_file_relative_path() -> &'static RelPath {
- RelPath::new(".vscode/launch.json").unwrap()
+ RelPath::unix(".vscode/launch.json").unwrap()
}
pub fn user_ssh_config_file() -> PathBuf {
@@ -989,7 +989,7 @@ impl GitStore {
parse_git_remote_url(provider_registry, &origin_url)
.context("parsing Git remote URL")?;
- let path = repo_path.as_str();
+ let path = repo_path.as_unix_str();
Ok(provider.build_permalink(
remote,
@@ -1315,7 +1315,7 @@ impl GitStore {
});
if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
let recv = repo.update(cx, |repo, cx| {
- log::debug!("hunks changed for {}", path.as_str());
+ log::debug!("hunks changed for {}", path.as_unix_str());
repo.spawn_set_index_text_job(
path,
new_index_text.as_ref().map(|rope| rope.to_string()),
@@ -3118,7 +3118,7 @@ impl Repository {
let repo_path = this.abs_path_to_repo_path(&abs_path)?;
log::debug!(
"start reload diff bases for repo path {}",
- repo_path.as_str()
+ repo_path.as_unix_str()
);
diff_state.update(cx, |diff_state, _| {
let has_unstaged_diff = diff_state
@@ -4162,7 +4162,10 @@ impl Repository {
Some(GitJobKey::WriteIndex(path.clone())),
None,
move |git_repo, mut cx| async move {
- log::debug!("start updating index text for buffer {}", path.as_str());
+ log::debug!(
+ "start updating index text for buffer {}",
+ path.as_unix_str()
+ );
match git_repo {
RepositoryState::Local {
backend,
@@ -4184,7 +4187,10 @@ impl Repository {
.await?;
}
}
- log::debug!("finish updating index text for buffer {}", path.as_str());
+ log::debug!(
+ "finish updating index text for buffer {}",
+ path.as_unix_str()
+ );
if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
let project_path = this
@@ -332,7 +332,7 @@ mod tests {
let traversal = GitTraversal::new(
&repo_snapshots,
- worktree_snapshot.traverse_from_path(true, false, true, RelPath::new("x").unwrap()),
+ worktree_snapshot.traverse_from_path(true, false, true, RelPath::unix("x").unwrap()),
);
let entries = traversal
.map(|entry| (entry.path.clone(), entry.git_summary))
@@ -1850,7 +1850,7 @@ impl LocalLspStore {
if !extra_buffers.is_empty() {
extra_buffers.push_str(", ");
}
- extra_buffers.push_str(path.path.as_str());
+ extra_buffers.push_str(path.path.as_unix_str());
}
})
.ok();
@@ -3284,7 +3284,7 @@ impl LocalLspStore {
let literal_prefix = glob_literal_prefix(relative);
Some((
worktree.clone(),
- RelPath::from_std_path(&literal_prefix, path_style).ok()?,
+ RelPath::new(&literal_prefix, path_style).ok()?.into_arc(),
relative.to_string_lossy().to_string(),
))
}
@@ -3300,7 +3300,7 @@ impl LocalLspStore {
literal_prefix.push(glob_literal_prefix(Path::new(&rp.pattern)));
Some((
worktree.clone(),
- RelPath::from_std_path(&literal_prefix, path_style).ok()?,
+ RelPath::new(&literal_prefix, path_style).ok()?.into_arc(),
rp.pattern.clone(),
))
}
@@ -7936,11 +7936,8 @@ impl LspStore {
let relative_path = if let Some(known_path) = known_relative_path {
known_path
} else {
- RelPath::from_std_path(
- abs_path.strip_prefix(worktree_root)?,
- PathStyle::local(),
- )
- .context("failed to create relative path")?
+ RelPath::new(abs_path.strip_prefix(worktree_root)?, PathStyle::local())?
+ .into_arc()
};
(worktree, relative_path)
};
@@ -61,7 +61,7 @@ impl<Label: Ord + Clone> RootPathTrie<Label> {
let mut path_so_far = <Arc<RelPath>>::from(RelPath::empty());
for key in path.0.iter() {
- path_so_far = path_so_far.join(RelPath::new(key).unwrap());
+ path_so_far = path_so_far.join(RelPath::unix(key.as_ref()).unwrap());
current = match current.children.entry(key.clone()) {
Entry::Vacant(vacant_entry) => {
vacant_entry.insert(RootPathTrie::new_with_key(path_so_far.clone()))
@@ -157,7 +157,7 @@ mod tests {
trie.walk(&TriePath::new(rel_path("a/b/c")), &mut |path, nodes| {
assert_eq!(nodes.get(&()), Some(&LabelPresence::Present));
- assert_eq!(path.as_str(), "a/b/c");
+ assert_eq!(path.as_unix_str(), "a/b/c");
ControlFlow::Continue(())
});
// Now let's annotate a parent with "Known missing" node.
@@ -170,10 +170,10 @@ mod tests {
// Ensure that we walk from the root to the leaf.
let mut visited_paths = BTreeSet::new();
trie.walk(&TriePath::new(rel_path("a/b/c")), &mut |path, nodes| {
- if path.as_str() == "a/b/c" {
+ if path.as_unix_str() == "a/b/c" {
assert_eq!(visited_paths, BTreeSet::from_iter([rel_path("a").into()]));
assert_eq!(nodes.get(&()), Some(&LabelPresence::Present));
- } else if path.as_str() == "a" {
+ } else if path.as_unix_str() == "a" {
assert!(visited_paths.is_empty());
assert_eq!(nodes.get(&()), Some(&LabelPresence::KnownAbsent));
} else {
@@ -189,10 +189,10 @@ mod tests {
trie.walk(
&TriePath::new(rel_path("a/b/c/d/e/f/g")),
&mut |path, nodes| {
- if path.as_str() == "a/b/c" {
+ if path.as_unix_str() == "a/b/c" {
assert_eq!(visited_paths, BTreeSet::from_iter([rel_path("a").into()]));
assert_eq!(nodes.get(&()), Some(&LabelPresence::Present));
- } else if path.as_str() == "a" {
+ } else if path.as_unix_str() == "a" {
assert!(visited_paths.is_empty());
assert_eq!(nodes.get(&()), Some(&LabelPresence::KnownAbsent));
} else {
@@ -207,7 +207,7 @@ mod tests {
// Test breaking from the tree-walk.
let mut visited_paths = BTreeSet::new();
trie.walk(&TriePath::new(rel_path("a/b/c")), &mut |path, nodes| {
- if path.as_str() == "a" {
+ if path.as_unix_str() == "a" {
assert!(visited_paths.is_empty());
assert_eq!(nodes.get(&()), Some(&LabelPresence::KnownAbsent));
} else {
@@ -254,8 +254,8 @@ mod tests {
let mut visited_paths = BTreeSet::new();
trie.walk(&TriePath::new(rel_path("a/b/c")), &mut |path, nodes| {
assert_eq!(nodes.get(&()), Some(&LabelPresence::Present));
- if path.as_str() != "a" && path.as_str() != "a/b" {
- panic!("Unexpected path: {}", path.as_str());
+ if path.as_unix_str() != "a" && path.as_unix_str() != "a/b" {
+ panic!("Unexpected path: {}", path.as_unix_str());
}
assert!(visited_paths.insert(path.clone()));
ControlFlow::Continue(())
@@ -447,7 +447,7 @@ impl PrettierStore {
) {
let prettier_config_files = Prettier::CONFIG_FILE_NAMES
.iter()
- .map(|name| RelPath::new(name).unwrap())
+ .map(|name| RelPath::unix(name).unwrap())
.collect::<HashSet<_>>();
let prettier_config_file_changed = changes
@@ -2032,7 +2032,7 @@ impl Project {
pub fn worktree_root_names<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a str> {
self.visible_worktrees(cx)
- .map(|tree| tree.read(cx).root_name().as_str())
+ .map(|tree| tree.read(cx).root_name().as_unix_str())
}
pub fn worktree_for_id(&self, id: WorktreeId, cx: &App) -> Option<Entity<Worktree>> {
@@ -4242,20 +4242,18 @@ impl Project {
cx: &mut Context<Self>,
) -> Task<Option<ResolvedPath>> {
let mut candidates = vec![];
- if let Ok(path) = RelPath::from_std_path(Path::new(path), self.path_style(cx)) {
- candidates.push(path);
+ let path_style = self.path_style(cx);
+ if let Ok(path) = RelPath::new(path.as_ref(), path_style) {
+ candidates.push(path.into_arc());
}
if let Some(file) = buffer.read(cx).file()
&& let Some(dir) = file.path().parent()
{
- if let Some(joined) = self
- .path_style(cx)
- .join(&*dir.display(self.path_style(cx)), path)
- && let Some(joined) =
- RelPath::from_std_path(Path::new(&joined), self.path_style(cx)).ok()
+ if let Some(joined) = path_style.join(&*dir.display(path_style), path)
+ && let Some(joined) = RelPath::new(joined.as_ref(), path_style).ok()
{
- candidates.push(joined);
+ candidates.push(joined.into_arc());
}
}
@@ -4470,30 +4468,30 @@ impl Project {
let worktree_abs_path = worktree.read(cx).abs_path();
if let Ok(relative_path) = path.strip_prefix(worktree_abs_path)
- && let Ok(path) = RelPath::from_std_path(relative_path, path_style)
+ && let Ok(path) = RelPath::new(relative_path, path_style)
{
return Some(ProjectPath {
worktree_id: worktree.read(cx).id(),
- path,
+ path: path.into_arc(),
});
}
}
} else {
for worktree in worktree_store.visible_worktrees(cx) {
let worktree_root_name = worktree.read(cx).root_name();
- if let Ok(relative_path) = path.strip_prefix(worktree_root_name)
- && let Ok(path) = RelPath::from_std_path(relative_path, path_style)
+ if let Ok(relative_path) = path.strip_prefix(worktree_root_name.as_std_path())
+ && let Ok(path) = RelPath::new(relative_path, path_style)
{
return Some(ProjectPath {
worktree_id: worktree.read(cx).id(),
- path,
+ path: path.into_arc(),
});
}
}
for worktree in worktree_store.visible_worktrees(cx) {
let worktree = worktree.read(cx);
- if let Ok(path) = RelPath::from_std_path(path, path_style)
+ if let Ok(path) = RelPath::new(path, path_style)
&& let Some(entry) = worktree.entry_for_path(&path)
{
return Some(ProjectPath {
@@ -859,7 +859,7 @@ impl SettingsObserver {
.unwrap()
.into();
(settings_dir, LocalSettingsKind::Debug)
- } else if path.ends_with(RelPath::new(EDITORCONFIG_NAME).unwrap()) {
+ } else if path.ends_with(RelPath::unix(EDITORCONFIG_NAME).unwrap()) {
let Some(settings_dir) = path.parent().map(Arc::from) else {
continue;
};
@@ -881,7 +881,7 @@ impl SettingsObserver {
Some(
async move {
let content = fs.load(&abs_path).await?;
- if abs_path.ends_with(local_vscode_tasks_file_relative_path()) {
+ if abs_path.ends_with(local_vscode_tasks_file_relative_path().as_std_path()) {
let vscode_tasks =
parse_json_with_comments::<VsCodeTaskFile>(&content)
.with_context(|| {
@@ -898,7 +898,7 @@ impl SettingsObserver {
"serializing Zed tasks into JSON, file {abs_path:?}"
)
})
- } else if abs_path.ends_with(local_vscode_launch_file_relative_path()) {
+ } else if abs_path.ends_with(local_vscode_launch_file_relative_path().as_std_path()) {
let vscode_tasks =
parse_json_with_comments::<VsCodeDebugTaskFile>(&content)
.with_context(|| {
@@ -984,7 +984,7 @@ impl SettingsObserver {
Ok(()) => {
cx.emit(SettingsObserverEvent::LocalSettingsUpdated(Ok(directory
.as_std_path()
- .join(local_settings_file_relative_path()))));
+ .join(local_settings_file_relative_path().as_std_path()))));
}
}
}),
@@ -1013,7 +1013,7 @@ impl SettingsObserver {
Ok(()) => {
cx.emit(SettingsObserverEvent::LocalTasksUpdated(Ok(directory
.as_std_path()
- .join(RelPath::new(task_file_name()).unwrap()))));
+ .join(task_file_name()))));
}
}
}
@@ -1044,7 +1044,7 @@ impl SettingsObserver {
Ok(()) => {
cx.emit(SettingsObserverEvent::LocalTasksUpdated(Ok(directory
.as_std_path()
- .join(RelPath::new(task_file_name()).unwrap()))));
+ .join(task_file_name()))));
}
}
}
@@ -608,7 +608,7 @@ async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
}: ManifestQuery,
) -> Option<Arc<RelPath>> {
for path in path.ancestors().take(depth) {
- let p = path.join(RelPath::new("pyproject.toml").unwrap());
+ let p = path.join(rel_path("pyproject.toml"));
if delegate.exists(&p, Some(false)) {
return Some(path.into());
}
@@ -737,7 +737,7 @@ async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
})
.await
.expect("A toolchain to be discovered");
- assert_eq!(root_path.as_ref(), RelPath::new("project-b").unwrap());
+ assert_eq!(root_path.as_ref(), rel_path("project-b"));
assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
let currently_active_toolchain = project
.update(cx, |this, cx| {
@@ -1285,7 +1285,7 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
.read(cx)
.snapshot()
.entries(true, 0)
- .map(|entry| (entry.path.as_str(), entry.is_ignored))
+ .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
.collect::<Vec<_>>(),
&[
("", false),
@@ -1403,7 +1403,7 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
.read(cx)
.snapshot()
.entries(true, 0)
- .map(|entry| (entry.path.as_str(), entry.is_ignored))
+ .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
.collect::<Vec<_>>(),
&[
("", false),
@@ -9336,7 +9336,7 @@ fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
let mut toolchains = vec![];
for ancestor in ancestors {
- let venv_path = worktree_root.join(ancestor).join(".venv");
+ let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
if self.0.is_dir(&venv_path).await {
toolchains.push(Toolchain {
name: SharedString::new("Python Venv"),
@@ -228,7 +228,7 @@ impl TaskSourceKind {
id_base,
directory_in_worktree,
} => {
- format!("{id_base}_{id}_{}", directory_in_worktree.as_str())
+ format!("{id_base}_{id}_{}", directory_in_worktree.as_unix_str())
}
Self::Language { name } => format!("language_{name}"),
Self::Lsp {
@@ -1182,7 +1182,7 @@ mod tests {
let worktree_id = WorktreeId::from_usize(0);
let local_worktree_location = SettingsLocation {
worktree_id,
- path: RelPath::new("foo").unwrap(),
+ path: rel_path("foo"),
};
inventory.update(cx, |inventory, _| {
inventory
@@ -1476,7 +1476,7 @@ mod tests {
.update_file_based_tasks(
TaskSettingsLocation::Worktree(SettingsLocation {
worktree_id: worktree_1,
- path: RelPath::new(".zed").unwrap(),
+ path: rel_path(".zed"),
}),
Some(&mock_tasks_from_names(
worktree_1_tasks.iter().map(|(_, name)| name.as_str()),
@@ -1487,7 +1487,7 @@ mod tests {
.update_file_based_tasks(
TaskSettingsLocation::Worktree(SettingsLocation {
worktree_id: worktree_2,
- path: RelPath::new(".zed").unwrap(),
+ path: rel_path(".zed"),
}),
Some(&mock_tasks_from_names(
worktree_2_tasks.iter().map(|(_, name)| name.as_str()),
@@ -257,7 +257,7 @@ impl ToolchainStore {
envelope: TypedEnvelope<proto::ActiveToolchain>,
mut cx: AsyncApp,
) -> Result<proto::ActiveToolchainResponse> {
- let path = RelPath::new(envelope.payload.path.as_deref().unwrap_or(""))?;
+ let path = RelPath::unix(envelope.payload.path.as_deref().unwrap_or(""))?;
let toolchain = this
.update(&mut cx, |this, cx| {
let language_name = LanguageName::from_proto(envelope.payload.language_name);
@@ -161,9 +161,9 @@ impl WorktreeStore {
for tree in self.worktrees() {
let path_style = tree.read(cx).path_style();
if let Ok(relative_path) = abs_path.as_ref().strip_prefix(tree.read(cx).abs_path())
- && let Ok(relative_path) = RelPath::from_std_path(relative_path, path_style)
+ && let Ok(relative_path) = RelPath::new(relative_path, path_style)
{
- return Some((tree.clone(), relative_path));
+ return Some((tree.clone(), relative_path.into_arc()));
}
}
None
@@ -1025,9 +1025,9 @@ impl WorktreeStore {
continue;
}
let relative_path = file.strip_prefix(snapshot.abs_path())?;
- let relative_path = RelPath::from_std_path(&relative_path, snapshot.path_style())
+ let relative_path = RelPath::new(&relative_path, snapshot.path_style())
.context("getting relative path")?;
- results.push((relative_path, !metadata.is_dir))
+ results.push((relative_path.into_arc(), !metadata.is_dir))
}
results.sort_by(|(a_path, _), (b_path, _)| a_path.cmp(b_path));
for (path, is_file) in results {
@@ -1052,7 +1052,7 @@ impl WorktreeStore {
worktree_root: snapshot.abs_path().clone(),
path: ProjectPath {
worktree_id: snapshot.id(),
- path,
+ path: path.into_arc(),
},
})
.await?;
@@ -114,9 +114,8 @@ impl YarnPathStore {
};
// Rebase zip-path onto new temp path.
let as_relative =
- RelPath::from_std_path(path.strip_prefix(zip_file).ok()?, PathStyle::local())
- .ok()?;
- Some((zip_root.into(), as_relative))
+ RelPath::new(path.strip_prefix(zip_file).ok()?, PathStyle::local()).ok()?;
+ Some((zip_root.into(), as_relative.into_arc()))
})
} else {
Task::ready(None)
@@ -1425,7 +1425,7 @@ impl ProjectPanel {
}
let trimmed_filename = trimmed_filename.trim_start_matches('/');
- let Ok(filename) = RelPath::new(trimmed_filename) else {
+ let Ok(filename) = RelPath::unix(trimmed_filename) else {
edit_state.validation_state = ValidationState::Warning(
"File or directory name contains leading or trailing whitespace.".to_string(),
);
@@ -1460,7 +1460,7 @@ impl ProjectPanel {
if already_exists {
edit_state.validation_state = ValidationState::Error(format!(
"File or directory '{}' already exists at location. Please choose a different name.",
- filename.as_str()
+ filename.as_unix_str()
));
cx.notify();
return;
@@ -1495,7 +1495,7 @@ impl ProjectPanel {
} else {
filename.trim_start_matches('/')
};
- let filename = RelPath::from_std_path(filename.as_ref(), path_style).ok()?;
+ let filename = RelPath::new(filename.as_ref(), path_style).ok()?.into_arc();
edit_state.is_dir =
edit_state.is_dir || (edit_state.is_new_entry() && filename_indicates_dir);
@@ -2438,7 +2438,7 @@ impl ProjectPanel {
.path
.file_name()?
.to_string();
- new_path.push(RelPath::new(&clipboard_entry_file_name).unwrap());
+ new_path.push(RelPath::unix(&clipboard_entry_file_name).unwrap());
let extension = new_path.extension().map(|s| s.to_string());
let file_name_without_extension = new_path.file_stem()?.to_string();
let file_name_len = file_name_without_extension.len();
@@ -2466,7 +2466,7 @@ impl ProjectPanel {
new_file_name.push_str(extension);
}
- new_path.push(RelPath::new(&new_file_name).unwrap());
+ new_path.push(RelPath::unix(&new_file_name).unwrap());
disambiguation_range = Some(file_name_len..(file_name_len + disambiguation_len));
ix += 1;
@@ -2839,7 +2839,7 @@ impl ProjectPanel {
}
let mut new_path = destination_path.to_rel_path_buf();
- new_path.push(RelPath::new(source_path.path.file_name()?).unwrap());
+ new_path.push(RelPath::unix(source_path.path.file_name()?).unwrap());
if new_path.as_rel_path() != source_path.path.as_ref() {
let task = project.rename_entry(
entry_to_move,
@@ -3007,7 +3007,7 @@ impl ProjectPanel {
entry: Entry {
id: NEW_ENTRY_ID,
kind: new_entry_kind,
- path: parent_entry.path.join(RelPath::new("\0").unwrap()),
+ path: parent_entry.path.join(RelPath::unix("\0").unwrap()),
inode: 0,
mtime: parent_entry.mtime,
size: parent_entry.size,
@@ -3185,7 +3185,7 @@ impl ProjectPanel {
entry.path.strip_prefix(root_folded_entry).ok().and_then(
|suffix| {
Some(
- RelPath::new(root_folded_entry.file_name()?)
+ RelPath::unix(root_folded_entry.file_name()?)
.unwrap()
.join(suffix),
)
@@ -3196,11 +3196,11 @@ impl ProjectPanel {
entry
.path
.file_name()
- .map(|file_name| RelPath::new(file_name).unwrap().into())
+ .map(|file_name| RelPath::unix(file_name).unwrap().into())
})
.unwrap_or_else(|| entry.path.clone());
let depth = path.components().count();
- (depth, path.as_str().chars().count())
+ (depth, path.as_unix_str().chars().count())
};
let width_estimate =
item_width_estimate(depth, chars, entry.canonical_path.is_some());
@@ -3337,7 +3337,7 @@ impl ProjectPanel {
if let Some(name) = path.file_name()
&& let Some(name) = name.to_str()
{
- let target_path = target_directory.join(RelPath::new(name).unwrap());
+ let target_path = target_directory.join(RelPath::unix(name).unwrap());
if worktree.read(cx).entry_for_path(&target_path).is_some() {
paths_to_replace.push((name.to_string(), path.clone()));
}
@@ -3650,7 +3650,7 @@ impl ProjectPanel {
}
} else {
details.filename.clear();
- details.filename.push_str(processing_filename.as_str());
+ details.filename.push_str(processing_filename.as_unix_str());
}
} else {
if edit_state.is_new_entry() {
@@ -4019,7 +4019,7 @@ impl ProjectPanel {
.path_for_entry(drag_state.active_selection.entry_id, cx)
{
if let Some(parent_path) = entry_path.path.parent() {
- if !parent_path.as_os_str().is_empty() {
+ if !parent_path.is_empty() {
return true;
}
}
@@ -4784,7 +4784,7 @@ impl ProjectPanel {
.path
.file_name()
.map(|name| name.to_string())
- .unwrap_or_else(|| root_name.as_str().to_string())
+ .unwrap_or_else(|| root_name.as_unix_str().to_string())
};
let selection = SelectedEntry {
@@ -419,7 +419,7 @@ impl SshRemoteConnection {
version_str
);
let dst_path =
- paths::remote_server_dir_relative().join(RelPath::new(&binary_name).unwrap());
+ paths::remote_server_dir_relative().join(RelPath::unix(&binary_name).unwrap());
#[cfg(debug_assertions)]
if let Some(remote_server_path) =
@@ -427,7 +427,7 @@ impl SshRemoteConnection {
.await?
{
let tmp_path = paths::remote_server_dir_relative().join(
- RelPath::new(&format!(
+ RelPath::unix(&format!(
"download-{}-{}",
std::process::id(),
remote_server_path.file_name().unwrap().to_string_lossy()
@@ -462,7 +462,7 @@ impl SshRemoteConnection {
})??;
let tmp_path_gz = remote_server_dir_relative().join(
- RelPath::new(&format!(
+ RelPath::unix(&format!(
"{}-download-{}.gz",
binary_name,
std::process::id()
@@ -142,7 +142,7 @@ impl WslRemoteConnection {
);
let dst_path =
- paths::remote_wsl_server_dir_relative().join(RelPath::new(&binary_name).unwrap());
+ paths::remote_wsl_server_dir_relative().join(RelPath::unix(&binary_name).unwrap());
if let Some(parent) = dst_path.parent() {
self.run_wsl_command("mkdir", &["-p", &parent.display(PathStyle::Posix)])
@@ -155,7 +155,7 @@ impl WslRemoteConnection {
super::build_remote_server_from_source(&self.platform, delegate.as_ref(), cx).await?
{
let tmp_path = paths::remote_wsl_server_dir_relative().join(
- &RelPath::new(&format!(
+ &RelPath::unix(&format!(
"download-{}-{}",
std::process::id(),
remote_server_path.file_name().unwrap().to_string_lossy()
@@ -193,7 +193,7 @@ impl WslRemoteConnection {
dst_path.display(PathStyle::Posix),
std::process::id()
);
- let tmp_path = RelPath::new(&tmp_path).unwrap();
+ let tmp_path = RelPath::unix(&tmp_path).unwrap();
self.upload_file(&src_path, &tmp_path, delegate, cx).await?;
self.extract_and_install(&tmp_path, &dst_path, delegate, cx)
@@ -1039,7 +1039,7 @@ async fn test_adding_then_removing_then_adding_worktrees(
assert!(worktree.is_visible());
let entries = worktree.entries(true, 0).collect::<Vec<_>>();
assert_eq!(entries.len(), 2);
- assert_eq!(entries[1].path.as_str(), "README.md")
+ assert_eq!(entries[1].path.as_unix_str(), "README.md")
})
}
@@ -953,7 +953,7 @@ fn cleanup_old_binaries() -> Result<()> {
let release_channel = release_channel::RELEASE_CHANNEL.dev_name();
let prefix = format!("zed-remote-server-{}-", release_channel);
- for entry in std::fs::read_dir(server_dir)? {
+ for entry in std::fs::read_dir(server_dir.as_std_path())? {
let path = entry?.path();
if let Some(file_name) = path.file_name()
@@ -633,7 +633,7 @@ impl SettingsStore {
return Err(InvalidSettingsError::Tasks {
message: "Attempted to submit tasks into the settings store".to_string(),
path: directory_path
- .join(RelPath::new(task_file_name()).unwrap())
+ .join(RelPath::unix(task_file_name()).unwrap())
.as_std_path()
.to_path_buf(),
});
@@ -643,7 +643,7 @@ impl SettingsStore {
message: "Attempted to submit debugger config into the settings store"
.to_string(),
path: directory_path
- .join(RelPath::new(task_file_name()).unwrap())
+ .join(RelPath::unix(task_file_name()).unwrap())
.as_std_path()
.to_path_buf(),
});
@@ -698,7 +698,8 @@ impl SettingsStore {
v.insert((editorconfig_contents.to_owned(), None));
return Err(InvalidSettingsError::Editorconfig {
message: e.to_string(),
- path: directory_path.join(RelPath::new(EDITORCONFIG_NAME).unwrap()),
+ path: directory_path
+ .join(RelPath::unix(EDITORCONFIG_NAME).unwrap()),
});
}
},
@@ -716,7 +717,7 @@ impl SettingsStore {
return Err(InvalidSettingsError::Editorconfig {
message: e.to_string(),
path: directory_path
- .join(RelPath::new(EDITORCONFIG_NAME).unwrap()),
+ .join(RelPath::unix(EDITORCONFIG_NAME).unwrap()),
});
}
}
@@ -183,7 +183,7 @@ fn possible_open_target(
let mut paths_to_check = Vec::with_capacity(potential_paths.len());
let relative_cwd = cwd
.and_then(|cwd| cwd.strip_prefix(&worktree_root).ok())
- .and_then(|cwd| RelPath::from_std_path(cwd, PathStyle::local()).ok())
+ .and_then(|cwd| RelPath::new(cwd, PathStyle::local()).ok())
.and_then(|cwd_stripped| {
(cwd_stripped.as_ref() != RelPath::empty()).then(|| {
is_cwd_in_worktree = true;
@@ -223,7 +223,7 @@ fn possible_open_target(
};
if let Ok(relative_path_to_check) =
- RelPath::from_std_path(&path_to_check.path, PathStyle::local())
+ RelPath::new(&path_to_check.path, PathStyle::local())
&& !worktree.read(cx).is_single_file()
&& let Some(entry) = relative_cwd
.clone()
@@ -369,7 +369,7 @@ fn possible_open_target(
for entry in traversal {
if let Some(path_in_worktree) =
worktree_paths_to_check.iter().find(|path_to_check| {
- RelPath::from_std_path(&path_to_check.path, PathStyle::local())
+ RelPath::new(&path_to_check.path, PathStyle::local())
.is_ok_and(|path| entry.path.ends_with(&path))
})
{
@@ -57,3 +57,4 @@ git2.workspace = true
indoc.workspace = true
rand.workspace = true
util_macros.workspace = true
+pretty_assertions.workspace = true
@@ -1,9 +1,8 @@
use crate::paths::{PathStyle, is_absolute};
-use anyhow::{Context as _, Result, anyhow, bail};
+use anyhow::{Context as _, Result, anyhow};
use serde::{Deserialize, Serialize};
use std::{
- borrow::Cow,
- ffi::OsStr,
+ borrow::{Borrow, Cow},
fmt,
ops::Deref,
path::{Path, PathBuf},
@@ -23,51 +22,71 @@ impl RelPath {
}
#[track_caller]
- pub fn new<S: AsRef<str> + ?Sized>(s: &S) -> anyhow::Result<&Self> {
- let this = unsafe { Self::new_unchecked(s) };
- if this.0.starts_with("/")
- || this.0.ends_with("/")
- || this
- .components()
- .any(|component| component == ".." || component == "." || component.is_empty())
+ pub fn new<'a>(path: &'a Path, path_style: PathStyle) -> Result<Cow<'a, Self>> {
+ let mut path = path.to_str().context("non utf-8 path")?;
+
+ let (prefixes, suffixes): (&[_], &[_]) = match path_style {
+ PathStyle::Posix => (&["./"], &['/']),
+ PathStyle::Windows => (&["./", ".\\"], &['/', '\\']),
+ };
+
+ while prefixes.iter().any(|prefix| path.starts_with(prefix)) {
+ path = &path[prefixes[0].len()..];
+ }
+ while let Some(prefix) = path.strip_suffix(suffixes)
+ && !prefix.is_empty()
{
- bail!("invalid relative path: {:?}", &this.0);
+ path = prefix;
}
- Ok(this)
- }
-
- #[track_caller]
- pub fn from_std_path(path: &Path, path_style: PathStyle) -> Result<Arc<Self>> {
- let path = path.to_str().context("non utf-8 path")?;
- let mut string = Cow::Borrowed(path);
- if is_absolute(&string, path_style) {
+ if is_absolute(&path, path_style) {
return Err(anyhow!("absolute path not allowed: {path:?}"));
}
- if path_style == PathStyle::Windows {
+ let mut string = Cow::Borrowed(path);
+ if path_style == PathStyle::Windows && path.contains('\\') {
string = Cow::Owned(string.as_ref().replace('\\', "/"))
}
- let mut this = RelPathBuf::new();
- for component in unsafe { Self::new_unchecked(string.as_ref()) }.components() {
- match component {
- "" => {}
- "." => {}
- ".." => {
- if !this.pop() {
- return Err(anyhow!("path is not relative: {string:?}"));
+ let mut result = match string {
+ Cow::Borrowed(string) => Cow::Borrowed(unsafe { Self::new_unchecked(string) }),
+ Cow::Owned(string) => Cow::Owned(RelPathBuf(string)),
+ };
+
+ if result
+ .components()
+ .any(|component| component == "" || component == "." || component == "..")
+ {
+ let mut normalized = RelPathBuf::new();
+ for component in result.components() {
+ match component {
+ "" => {}
+ "." => {}
+ ".." => {
+ if !normalized.pop() {
+ return Err(anyhow!("path is not relative: {result:?}"));
+ }
}
+ other => normalized.push(&RelPath::unix(other)?),
}
- other => this.push(RelPath::new(other)?),
}
+ result = Cow::Owned(normalized)
}
- Ok(this.into())
+ Ok(result)
}
- pub unsafe fn new_unchecked<S: AsRef<str> + ?Sized>(s: &S) -> &Self {
- unsafe { &*(s.as_ref() as *const str as *const Self) }
+ #[track_caller]
+ pub fn unix<S: AsRef<Path> + ?Sized>(path: &S) -> anyhow::Result<&Self> {
+ let path = path.as_ref();
+ match Self::new(path, PathStyle::Posix)? {
+ Cow::Borrowed(path) => Ok(path),
+ Cow::Owned(_) => Err(anyhow!("invalid relative path {path:?}")),
+ }
+ }
+
+ unsafe fn new_unchecked(s: &str) -> &Self {
+ unsafe { &*(s as *const str as *const Self) }
}
pub fn is_empty(&self) -> bool {
@@ -115,7 +134,7 @@ impl RelPath {
false
}
- pub fn strip_prefix(&self, other: &Self) -> Result<&Self> {
+ pub fn strip_prefix<'a>(&'a self, other: &Self) -> Result<&'a Self> {
if other.is_empty() {
return Ok(self);
}
@@ -146,21 +165,6 @@ impl RelPath {
}
}
- pub fn push(&self, component: &str) -> Result<Arc<Self>> {
- if component.is_empty() {
- bail!("pushed component is empty");
- } else if component.contains('/') {
- bail!("pushed component contains a separator: {component:?}");
- }
- let path = format!(
- "{}{}{}",
- &self.0,
- if self.is_empty() { "" } else { "/" },
- component
- );
- Ok(Arc::from(unsafe { Self::new_unchecked(&path) }))
- }
-
pub fn join(&self, other: &Self) -> Arc<Self> {
let result = if self.0.is_empty() {
Cow::Borrowed(&other.0)
@@ -173,7 +177,7 @@ impl RelPath {
}
pub fn to_proto(&self) -> String {
- self.0.to_owned()
+ self.as_unix_str().to_owned()
}
pub fn to_rel_path_buf(&self) -> RelPathBuf {
@@ -181,13 +185,17 @@ impl RelPath {
}
pub fn from_proto(path: &str) -> Result<Arc<Self>> {
- Ok(Arc::from(Self::new(path)?))
+ Ok(Arc::from(Self::unix(path)?))
}
- pub fn as_str(&self) -> &str {
+ pub fn as_unix_str(&self) -> &str {
&self.0
}
+ pub fn into_arc(&self) -> Arc<Self> {
+ Arc::from(self)
+ }
+
pub fn display(&self, style: PathStyle) -> Cow<'_, str> {
match style {
PathStyle::Posix => Cow::Borrowed(&self.0),
@@ -195,16 +203,22 @@ impl RelPath {
}
}
- pub fn as_bytes(&self) -> &[u8] {
- &self.0.as_bytes()
+ pub fn as_std_path(&self) -> &Path {
+ Path::new(&self.0)
}
+}
+
+impl ToOwned for RelPath {
+ type Owned = RelPathBuf;
- pub fn as_os_str(&self) -> &OsStr {
- self.0.as_ref()
+ fn to_owned(&self) -> Self::Owned {
+ self.to_rel_path_buf()
}
+}
- pub fn as_std_path(&self) -> &Path {
- Path::new(&self.0)
+impl Borrow<RelPath> for RelPathBuf {
+ fn borrow(&self) -> &RelPath {
+ self.as_rel_path()
}
}
@@ -293,9 +307,9 @@ impl Deref for RelPathBuf {
}
}
-impl AsRef<Path> for RelPath {
- fn as_ref(&self) -> &Path {
- Path::new(&self.0)
+impl<'a> From<&'a RelPath> for Cow<'a, RelPath> {
+ fn from(value: &'a RelPath) -> Self {
+ Self::Borrowed(value)
}
}
@@ -309,7 +323,7 @@ impl From<&RelPath> for Arc<RelPath> {
#[cfg(any(test, feature = "test-support"))]
#[track_caller]
pub fn rel_path(path: &str) -> &RelPath {
- RelPath::new(path).unwrap()
+ RelPath::unix(path).unwrap()
}
impl PartialEq<str> for RelPath {
@@ -384,35 +398,52 @@ impl<'a> DoubleEndedIterator for RelPathComponents<'a> {
mod tests {
use super::*;
use itertools::Itertools;
- use std::path::PathBuf;
+ use pretty_assertions::assert_matches;
#[test]
- fn test_path_construction() {
- assert!(RelPath::new("/").is_err());
- assert!(RelPath::new("/foo").is_err());
- assert!(RelPath::new("foo/").is_err());
- assert!(RelPath::new("foo//bar").is_err());
- assert!(RelPath::new("foo/../bar").is_err());
- assert!(RelPath::new("./foo/bar").is_err());
- assert!(RelPath::new("..").is_err());
-
- assert!(RelPath::from_std_path(Path::new("/"), PathStyle::local()).is_err());
- assert!(RelPath::from_std_path(Path::new("//"), PathStyle::local()).is_err());
- assert!(RelPath::from_std_path(Path::new("/foo/"), PathStyle::local()).is_err());
- assert_eq!(
- RelPath::from_std_path(&PathBuf::from_iter(["foo", ""]), PathStyle::local()).unwrap(),
- Arc::from(rel_path("foo"))
- );
- }
+ fn test_rel_path_new() {
+ assert!(RelPath::new(Path::new("/"), PathStyle::local()).is_err());
+ assert!(RelPath::new(Path::new("//"), PathStyle::local()).is_err());
+ assert!(RelPath::new(Path::new("/foo/"), PathStyle::local()).is_err());
+
+ let path = RelPath::new("foo/".as_ref(), PathStyle::local()).unwrap();
+ assert_eq!(path, rel_path("foo").into());
+ assert_matches!(path, Cow::Borrowed(_));
+
+ let path = RelPath::new("foo\\".as_ref(), PathStyle::Windows).unwrap();
+ assert_eq!(path, rel_path("foo").into());
+ assert_matches!(path, Cow::Borrowed(_));
- #[test]
- fn test_rel_path_from_std_path() {
assert_eq!(
- RelPath::from_std_path(Path::new("foo/bar/../baz/./quux/"), PathStyle::local())
+ RelPath::new("foo/bar/../baz/./quux/".as_ref(), PathStyle::local())
.unwrap()
.as_ref(),
rel_path("foo/baz/quux")
);
+
+ let path = RelPath::new("./foo/bar".as_ref(), PathStyle::Posix).unwrap();
+ assert_eq!(path.as_ref(), rel_path("foo/bar"));
+ assert_matches!(path, Cow::Borrowed(_));
+
+ let path = RelPath::new(".\\foo".as_ref(), PathStyle::Windows).unwrap();
+ assert_eq!(path, rel_path("foo").into());
+ assert_matches!(path, Cow::Borrowed(_));
+
+ let path = RelPath::new("./.\\./foo/\\/".as_ref(), PathStyle::Windows).unwrap();
+ assert_eq!(path, rel_path("foo").into());
+ assert_matches!(path, Cow::Borrowed(_));
+
+ let path = RelPath::new("foo/./bar".as_ref(), PathStyle::Posix).unwrap();
+ assert_eq!(path.as_ref(), rel_path("foo/bar"));
+ assert_matches!(path, Cow::Owned(_));
+
+ let path = RelPath::new("./foo/bar".as_ref(), PathStyle::Windows).unwrap();
+ assert_eq!(path.as_ref(), rel_path("foo/bar"));
+ assert_matches!(path, Cow::Borrowed(_));
+
+ let path = RelPath::new(".\\foo\\bar".as_ref(), PathStyle::Windows).unwrap();
+ assert_eq!(path.as_ref(), rel_path("foo/bar"));
+ assert_matches!(path, Cow::Owned(_));
}
#[test]
@@ -456,10 +487,7 @@ mod tests {
#[test]
fn test_rel_path_parent() {
- assert_eq!(
- rel_path("foo/bar/baz").parent(),
- Some(RelPath::new("foo/bar").unwrap())
- );
+ assert_eq!(rel_path("foo/bar/baz").parent(), Some(rel_path("foo/bar")));
assert_eq!(rel_path("foo").parent(), Some(RelPath::empty()));
assert_eq!(rel_path("").parent(), None);
}
@@ -470,7 +498,9 @@ mod tests {
for [lhs, rhs] in test_cases.iter().array_combinations::<2>() {
assert_eq!(
Path::new(lhs).cmp(Path::new(rhs)),
- RelPath::new(lhs).unwrap().cmp(RelPath::new(rhs).unwrap())
+ RelPath::unix(lhs)
+ .unwrap()
+ .cmp(&RelPath::unix(rhs).unwrap())
);
}
}
@@ -486,30 +516,22 @@ mod tests {
#[test]
fn test_rel_path_constructors_absolute_path() {
- assert!(RelPath::from_std_path(Path::new("/a/b"), PathStyle::Windows).is_err());
- assert!(RelPath::from_std_path(Path::new("\\a\\b"), PathStyle::Windows).is_err());
- assert!(RelPath::from_std_path(Path::new("/a/b"), PathStyle::Posix).is_err());
- assert!(RelPath::from_std_path(Path::new("C:/a/b"), PathStyle::Windows).is_err());
- assert!(RelPath::from_std_path(Path::new("C:\\a\\b"), PathStyle::Windows).is_err());
- assert!(RelPath::from_std_path(Path::new("C:/a/b"), PathStyle::Posix).is_ok());
- }
-
- #[test]
- fn test_push() {
- assert_eq!(rel_path("a/b").push("c").unwrap().as_str(), "a/b/c");
- assert_eq!(rel_path("").push("c").unwrap().as_str(), "c");
- assert!(rel_path("a/b").push("").is_err());
- assert!(rel_path("a/b").push("c/d").is_err());
+ assert!(RelPath::new(Path::new("/a/b"), PathStyle::Windows).is_err());
+ assert!(RelPath::new(Path::new("\\a\\b"), PathStyle::Windows).is_err());
+ assert!(RelPath::new(Path::new("/a/b"), PathStyle::Posix).is_err());
+ assert!(RelPath::new(Path::new("C:/a/b"), PathStyle::Windows).is_err());
+ assert!(RelPath::new(Path::new("C:\\a\\b"), PathStyle::Windows).is_err());
+ assert!(RelPath::new(Path::new("C:/a/b"), PathStyle::Posix).is_ok());
}
#[test]
fn test_pop() {
let mut path = rel_path("a/b").to_rel_path_buf();
path.pop();
- assert_eq!(path.as_rel_path().as_str(), "a");
+ assert_eq!(path.as_rel_path().as_unix_str(), "a");
path.pop();
- assert_eq!(path.as_rel_path().as_str(), "");
+ assert_eq!(path.as_rel_path().as_unix_str(), "");
path.pop();
- assert_eq!(path.as_rel_path().as_str(), "");
+ assert_eq!(path.as_rel_path().as_unix_str(), "");
}
}
@@ -306,10 +306,10 @@ pub fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
return;
};
let path_style = worktree.read(cx).path_style();
- let Ok(project_path) = RelPath::from_std_path(Path::new(&action.filename), path_style)
- .map(|path| ProjectPath {
+ let Ok(project_path) =
+ RelPath::new(Path::new(&action.filename), path_style).map(|path| ProjectPath {
worktree_id: worktree.read(cx).id(),
- path,
+ path: path.into_arc(),
})
else {
// TODO implement save_as with absolute path
@@ -372,14 +372,12 @@ pub fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
return;
};
let path_style = worktree.read(cx).path_style();
- let Some(path) =
- RelPath::from_std_path(Path::new(&action.filename), path_style).log_err()
- else {
+ let Some(path) = RelPath::new(Path::new(&action.filename), path_style).log_err() else {
return;
};
let project_path = ProjectPath {
worktree_id: worktree.read(cx).id(),
- path,
+ path: path.into_arc(),
};
let direction = if action.vertical {
@@ -472,14 +470,12 @@ pub fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
return;
};
let path_style = worktree.read(cx).path_style();
- let Some(path) =
- RelPath::from_std_path(Path::new(&action.filename), path_style).log_err()
- else {
+ let Some(path) = RelPath::new(Path::new(&action.filename), path_style).log_err() else {
return;
};
let project_path = ProjectPath {
worktree_id: worktree.read(cx).id(),
- path,
+ path: path.into_arc(),
};
let _ = workspace.update(cx, |workspace, cx| {
@@ -344,11 +344,10 @@ impl MarksState {
.worktrees(cx)
.filter_map(|worktree| {
let relative = path.strip_prefix(worktree.read(cx).abs_path()).ok()?;
- let path = RelPath::from_std_path(relative, worktree.read(cx).path_style())
- .log_err()?;
+ let path = RelPath::new(relative, worktree.read(cx).path_style()).log_err()?;
Some(ProjectPath {
worktree_id: worktree.read(cx).id(),
- path,
+ path: path.into_arc(),
})
})
.next();
@@ -915,7 +915,7 @@ impl WorkspaceDb {
relative_worktree_path == String::default()
);
- let Some(relative_path) = RelPath::new(&relative_worktree_path).log_err() else {
+ let Some(relative_path) = RelPath::unix(&relative_worktree_path).log_err() else {
continue;
};
if worktree_id != u64::MAX && relative_worktree_path != String::default() {
@@ -1001,7 +1001,7 @@ impl WorkspaceDb {
for toolchain in toolchains {
let query = sql!(INSERT OR REPLACE INTO user_toolchains(remote_connection_id, workspace_id, worktree_id, relative_worktree_path, language_name, name, path, raw_json) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8));
let (workspace_id, worktree_id, relative_worktree_path) = match scope {
- ToolchainScope::Subproject(worktree_id, ref path) => (Some(workspace.id), Some(worktree_id), Some(path.as_str().to_owned())),
+ ToolchainScope::Subproject(worktree_id, ref path) => (Some(workspace.id), Some(worktree_id), Some(path.as_unix_str().to_owned())),
ToolchainScope::Project => (Some(workspace.id), None, None),
ToolchainScope::Global => (None, None, None),
};
@@ -1661,7 +1661,7 @@ impl WorkspaceDb {
workspace_id,
language_name.as_ref().to_string(),
worktree_id.to_usize(),
- relative_worktree_path.as_str().to_string(),
+ relative_worktree_path.as_unix_str().to_string(),
))?;
Ok(toolchain
@@ -1743,7 +1743,7 @@ impl WorkspaceDb {
insert((
workspace_id,
worktree_id.to_usize(),
- relative_worktree_path.as_str(),
+ relative_worktree_path.as_unix_str(),
toolchain.language_name.as_ref(),
toolchain.name.as_ref(),
toolchain.path.as_ref(),
@@ -2631,11 +2631,9 @@ impl Workspace {
.strip_prefix(worktree_abs_path.as_ref())
.ok()
.and_then(|relative_path| {
- let relative_path = RelPath::from_std_path(
- relative_path,
- PathStyle::local(),
- )
- .log_err()?;
+ let relative_path =
+ RelPath::new(relative_path, PathStyle::local())
+ .log_err()?;
worktree.entry_for_path(&relative_path)
})
}
@@ -10766,7 +10764,7 @@ mod tests {
.flat_map(|item| {
item.project_paths(cx)
.into_iter()
- .map(|path| path.path.as_str().to_string())
+ .map(|path| path.path.display(PathStyle::local()).into_owned())
})
.collect()
}
@@ -386,7 +386,9 @@ impl Worktree {
abs_path
.file_name()
.and_then(|f| f.to_str())
- .map_or(RelPath::empty().into(), |f| RelPath::new(f).unwrap().into()),
+ .map_or(RelPath::empty().into(), |f| {
+ RelPath::unix(f).unwrap().into()
+ }),
abs_path.clone(),
PathStyle::local(),
),
@@ -423,7 +425,7 @@ impl Worktree {
if !metadata.is_dir {
if let Some(file_name) = abs_path.file_name()
&& let Some(file_name) = file_name.to_str()
- && let Ok(path) = RelPath::new(file_name)
+ && let Ok(path) = RelPath::unix(file_name)
{
entry.is_private = !share_private_files && settings.is_path_private(path);
}
@@ -1556,11 +1558,12 @@ impl LocalWorktree {
let paths_to_refresh = paths
.iter()
.filter_map(|(_, target)| {
- RelPath::from_std_path(
+ RelPath::new(
target.strip_prefix(&worktree_path).ok()?,
PathStyle::local(),
)
.ok()
+ .map(|path| path.into_arc())
})
.collect::<Vec<_>>();
@@ -1767,7 +1770,9 @@ impl LocalWorktree {
.as_path()
.file_name()
.and_then(|f| f.to_str())
- .map_or(RelPath::empty().into(), |f| RelPath::new(f).unwrap().into());
+ .map_or(RelPath::empty().into(), |f| {
+ RelPath::unix(f).unwrap().into()
+ });
self.snapshot.update_abs_path(new_path, root_name);
}
self.restart_background_scanners(cx);
@@ -1962,7 +1967,7 @@ impl RemoteWorktree {
let Some(filename) = root_path_to_copy
.file_name()
.and_then(|name| name.to_str())
- .and_then(|filename| RelPath::new(filename).ok())
+ .and_then(|filename| RelPath::unix(filename).ok())
else {
continue;
};
@@ -1972,9 +1977,7 @@ impl RemoteWorktree {
let Some(relative_path) = abs_path
.strip_prefix(&root_path_to_copy)
.map_err(|e| anyhow::Error::from(e))
- .and_then(|relative_path| {
- RelPath::from_std_path(relative_path, PathStyle::local())
- })
+ .and_then(|relative_path| RelPath::new(relative_path, PathStyle::local()))
.log_err()
else {
continue;
@@ -2025,7 +2028,7 @@ impl Snapshot {
abs_path: SanitizedPath::from_arc(abs_path),
path_style,
root_char_bag: root_name
- .as_str()
+ .as_unix_str()
.chars()
.map(|c| c.to_ascii_lowercase())
.collect(),
@@ -2154,7 +2157,7 @@ impl Snapshot {
self.abs_path = abs_path;
if root_name != self.root_name {
self.root_char_bag = root_name
- .as_str()
+ .as_unix_str()
.chars()
.map(|c| c.to_ascii_lowercase())
.collect();
@@ -2349,7 +2352,7 @@ impl Snapshot {
}
pub fn root_name_str(&self) -> &str {
- self.root_name.as_str()
+ self.root_name.as_unix_str()
}
pub fn scan_id(&self) -> usize {
@@ -2589,7 +2592,7 @@ impl LocalSnapshot {
if git_state {
for ignore_parent_abs_path in self.ignores_by_parent_abs_path.keys() {
- let ignore_parent_path = &RelPath::from_std_path(
+ let ignore_parent_path = &RelPath::new(
ignore_parent_abs_path
.strip_prefix(self.abs_path.as_path())
.unwrap(),
@@ -2598,8 +2601,10 @@ impl LocalSnapshot {
.unwrap();
assert!(self.entry_for_path(ignore_parent_path).is_some());
assert!(
- self.entry_for_path(&ignore_parent_path.join(RelPath::new(GITIGNORE).unwrap()))
- .is_some()
+ self.entry_for_path(
+ &ignore_parent_path.join(RelPath::unix(GITIGNORE).unwrap())
+ )
+ .is_some()
);
}
}
@@ -3038,7 +3043,7 @@ impl language::File for File {
fn file_name<'a>(&'a self, cx: &'a App) -> &'a str {
self.path
.file_name()
- .unwrap_or_else(|| self.worktree.read(cx).root_name.as_str())
+ .unwrap_or_else(|| self.worktree.read(cx).root_name_str())
}
fn worktree_id(&self, cx: &App) -> WorktreeId {
@@ -3815,9 +3820,9 @@ impl BackgroundScanner {
}
}
- let relative_path: Arc<RelPath> = if let Ok(path) =
+ let relative_path = if let Ok(path) =
abs_path.strip_prefix(&root_canonical_path)
- && let Ok(path) = RelPath::from_std_path(path, PathStyle::local())
+ && let Ok(path) = RelPath::new(path, PathStyle::local())
{
path
} else {
@@ -3864,7 +3869,7 @@ impl BackgroundScanner {
return false;
}
- relative_paths.push(relative_path);
+ relative_paths.push(relative_path.into_arc());
true
}
});
@@ -4116,7 +4121,7 @@ impl BackgroundScanner {
let child_name = child_abs_path.file_name().unwrap();
let Some(child_path) = child_name
.to_str()
- .and_then(|name| Some(job.path.join(RelPath::new(name).ok()?)))
+ .and_then(|name| Some(job.path.join(RelPath::unix(name).ok()?)))
else {
continue;
};
@@ -4231,7 +4236,7 @@ impl BackgroundScanner {
{
let relative_path = job
.path
- .join(RelPath::new(child_name.to_str().unwrap()).unwrap());
+ .join(RelPath::unix(child_name.to_str().unwrap()).unwrap());
if self.is_path_private(&relative_path) {
log::debug!("detected private file: {relative_path:?}");
child_entry.is_private = true;
@@ -4366,7 +4371,7 @@ impl BackgroundScanner {
if let (Some(scan_queue_tx), true) = (&scan_queue_tx, is_dir) {
if state.should_scan_directory(&fs_entry)
- || (fs_entry.path.as_os_str().is_empty()
+ || (fs_entry.path.is_empty()
&& abs_path.file_name() == Some(OsStr::new(DOT_GIT)))
{
state.enqueue_scan_dir(
@@ -4489,7 +4494,7 @@ impl BackgroundScanner {
.retain(|parent_abs_path, (_, needs_update)| {
if let Ok(parent_path) = parent_abs_path.strip_prefix(abs_path.as_path())
&& let Some(parent_path) =
- RelPath::from_std_path(&parent_path, PathStyle::local()).log_err()
+ RelPath::new(&parent_path, PathStyle::local()).log_err()
{
if *needs_update {
*needs_update = false;
@@ -4498,7 +4503,7 @@ impl BackgroundScanner {
}
}
- let ignore_path = parent_path.join(RelPath::new(GITIGNORE).unwrap());
+ let ignore_path = parent_path.join(RelPath::unix(GITIGNORE).unwrap());
if snapshot.snapshot.entry_for_path(&ignore_path).is_none() {
return false;
}
@@ -4546,7 +4551,7 @@ impl BackgroundScanner {
.abs_path
.strip_prefix(snapshot.abs_path.as_path())
.unwrap();
- let Some(path) = RelPath::from_std_path(&path, PathStyle::local()).log_err() else {
+ let Some(path) = RelPath::new(&path, PathStyle::local()).log_err() else {
return;
};
@@ -4650,7 +4655,9 @@ impl BackgroundScanner {
};
affected_repo_roots.push(dot_git_dir.parent().unwrap().into());
state.insert_git_repository(
- RelPath::from_std_path(relative, PathStyle::local()).unwrap(),
+ RelPath::new(relative, PathStyle::local())
+ .unwrap()
+ .into_arc(),
self.fs.as_ref(),
self.watcher.as_ref(),
);
@@ -4675,7 +4682,7 @@ impl BackgroundScanner {
.entry_for_id(work_directory_id)
.is_some_and(|entry| {
snapshot
- .entry_for_path(&entry.path.join(RelPath::new(DOT_GIT).unwrap()))
+ .entry_for_path(&entry.path.join(RelPath::unix(DOT_GIT).unwrap()))
.is_some()
});
@@ -4908,7 +4915,7 @@ fn swap_to_front(child_paths: &mut Vec<PathBuf>, file: &str) {
fn char_bag_for_path(root_char_bag: CharBag, path: &RelPath) -> CharBag {
let mut result = root_char_bag;
- result.extend(path.as_str().chars().map(|c| c.to_ascii_lowercase()));
+ result.extend(path.as_unix_str().chars().map(|c| c.to_ascii_lowercase()));
result
}
@@ -4971,7 +4978,7 @@ impl WorktreeModelHandle for Entity<Worktree> {
let mut events = cx.events(&tree);
while events.next().await.is_some() {
if tree.read_with(cx, |tree, _| {
- tree.entry_for_path(RelPath::new(file_name).unwrap())
+ tree.entry_for_path(RelPath::unix(file_name).unwrap())
.is_some()
}) {
break;
@@ -4983,7 +4990,7 @@ impl WorktreeModelHandle for Entity<Worktree> {
.unwrap();
while events.next().await.is_some() {
if tree.read_with(cx, |tree, _| {
- tree.entry_for_path(RelPath::new(file_name).unwrap())
+ tree.entry_for_path(RelPath::unix(file_name).unwrap())
.is_none()
}) {
break;
@@ -1669,8 +1669,9 @@ fn open_local_file(
cx.spawn_in(window, async move |workspace, cx| {
// Check if the file actually exists on disk (even if it's excluded from worktree)
let file_exists = {
- let full_path = worktree
- .read_with(cx, |tree, _| tree.abs_path().join(settings_relative_path))?;
+ let full_path = worktree.read_with(cx, |tree, _| {
+ tree.abs_path().join(settings_relative_path.as_std_path())
+ })?;
let fs = project.read_with(cx, |project, _| project.fs().clone())?;
@@ -290,7 +290,7 @@ impl LicenseDetectionWatcher {
include_ignored: true,
};
for top_file in local_worktree.child_entries_with_options(RelPath::empty(), options) {
- let path_bytes = top_file.path.as_os_str().as_encoded_bytes();
+ let path_bytes = top_file.path.as_unix_str().as_bytes();
if top_file.is_created() && LICENSE_FILE_NAME_REGEX.is_match(path_bytes) {
let rel_path = top_file.path.clone();
files_to_check_tx.unbounded_send(rel_path).ok();
@@ -302,7 +302,7 @@ impl LicenseDetectionWatcher {
worktree::Event::UpdatedEntries(updated_entries) => {
for updated_entry in updated_entries.iter() {
let rel_path = &updated_entry.0;
- let path_bytes = rel_path.as_os_str().as_encoded_bytes();
+ let path_bytes = rel_path.as_unix_str().as_bytes();
if LICENSE_FILE_NAME_REGEX.is_match(path_bytes) {
files_to_check_tx.unbounded_send(rel_path.clone()).ok();
}
@@ -1181,11 +1181,11 @@ impl Event {
let old_path = old_snapshot
.file()
.map(|f| f.path().as_ref())
- .unwrap_or(RelPath::new("untitled").unwrap());
+ .unwrap_or(RelPath::unix("untitled").unwrap());
let new_path = new_snapshot
.file()
.map(|f| f.path().as_ref())
- .unwrap_or(RelPath::new("untitled").unwrap());
+ .unwrap_or(RelPath::unix("untitled").unwrap());
if old_path != new_path {
writeln!(prompt, "User renamed {:?} to {:?}\n", old_path, new_path).unwrap();
}
@@ -307,7 +307,7 @@ impl Zeta2Inspector {
let multibuffer = cx.new(|cx| {
let mut multibuffer = MultiBuffer::new(language::Capability::ReadOnly);
let excerpt_file = Arc::new(ExcerptMetadataFile {
- title: RelPath::new("Cursor Excerpt").unwrap().into(),
+ title: RelPath::unix("Cursor Excerpt").unwrap().into(),
path_style,
worktree_id,
});
@@ -341,7 +341,7 @@ impl Zeta2Inspector {
.path_for_entry(snippet.declaration.project_entry_id(), cx);
let snippet_file = Arc::new(ExcerptMetadataFile {
- title: RelPath::new(&format!(
+ title: RelPath::unix(&format!(
"{} (Score density: {})",
path.map(|p| p.path.display(path_style).to_string())
.unwrap_or_else(|| "".to_string()),
@@ -148,7 +148,7 @@ impl FromStr for CursorPosition {
));
}
- let path = RelPath::from_std_path(Path::new(&parts[0]), PathStyle::local())?;
+ let path = RelPath::new(Path::new(&parts[0]), PathStyle::local())?.into_arc();
let line: u32 = parts[1]
.parse()
.map_err(|_| anyhow!("Invalid line number: '{}'", parts[1]))?;