Detailed changes
@@ -268,6 +268,19 @@
// Whether to show warnings or not by default.
"include_warnings": true
},
+ // Add files or globs of files that will be excluded by Zed entirely:
+ // they will be skipped during FS scan(s), file tree and file search
+ // will lack the corresponding file entries.
+ "file_scan_exclusions": [
+ "**/.git",
+ "**/.svn",
+ "**/.hg",
+ "**/CVS",
+ "**/.DS_Store",
+ "**/Thumbs.db",
+ "**/.classpath",
+ "**/.settings"
+ ],
// Git gutter behavior configuration.
"git": {
// Control whether the git gutter is shown. May take 2 values:
@@ -5052,7 +5052,7 @@ async fn test_project_search(
let mut results = HashMap::default();
let mut search_rx = project_b.update(cx_b, |project, cx| {
project.search(
- SearchQuery::text("world", false, false, Vec::new(), Vec::new()).unwrap(),
+ SearchQuery::text("world", false, false, false, Vec::new(), Vec::new()).unwrap(),
cx,
)
});
@@ -869,7 +869,8 @@ impl RandomizedTest for ProjectCollaborationTest {
let mut search = project.update(cx, |project, cx| {
project.search(
- SearchQuery::text(query, false, false, Vec::new(), Vec::new()).unwrap(),
+ SearchQuery::text(query, false, false, false, Vec::new(), Vec::new())
+ .unwrap(),
cx,
)
});
@@ -4599,7 +4599,7 @@ async fn test_project_search(
let mut results = HashMap::default();
let mut search_rx = project_b.update(cx_b, |project, cx| {
project.search(
- SearchQuery::text("world", false, false, Vec::new(), Vec::new()).unwrap(),
+ SearchQuery::text("world", false, false, false, Vec::new(), Vec::new()).unwrap(),
cx,
)
});
@@ -870,7 +870,8 @@ impl RandomizedTest for ProjectCollaborationTest {
let mut search = project.update(cx, |project, cx| {
project.search(
- SearchQuery::text(query, false, false, Vec::new(), Vec::new()).unwrap(),
+ SearchQuery::text(query, false, false, false, Vec::new(), Vec::new())
+ .unwrap(),
cx,
)
});
@@ -14,14 +14,8 @@ use std::{sync::Arc, time::Duration};
const MENTIONS_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(50);
lazy_static! {
- static ref MENTIONS_SEARCH: SearchQuery = SearchQuery::regex(
- "@[-_\\w]+",
- false,
- false,
- Default::default(),
- Default::default()
- )
- .unwrap();
+ static ref MENTIONS_SEARCH: SearchQuery =
+ SearchQuery::regex("@[-_\\w]+", false, false, false, Vec::new(), Vec::new()).unwrap();
}
pub struct MessageEditor {
@@ -14,14 +14,8 @@ use std::{sync::Arc, time::Duration};
const MENTIONS_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(50);
lazy_static! {
- static ref MENTIONS_SEARCH: SearchQuery = SearchQuery::regex(
- "@[-_\\w]+",
- false,
- false,
- Default::default(),
- Default::default()
- )
- .unwrap();
+ static ref MENTIONS_SEARCH: SearchQuery =
+ SearchQuery::regex("@[-_\\w]+", false, false, false, Vec::new(), Vec::new()).unwrap();
}
pub struct MessageEditor {
@@ -1,8 +1,9 @@
use collections::{CommandPaletteFilter, HashMap};
use fuzzy::{StringMatch, StringMatchCandidate};
use gpui::{
- actions, div, prelude::*, Action, AppContext, Component, Dismiss, Div, FocusHandle, Keystroke,
- ManagedView, ParentComponent, Render, Styled, View, ViewContext, VisualContext, WeakView,
+ actions, div, prelude::*, Action, AppContext, Component, Div, EventEmitter, FocusHandle,
+ FocusableView, Keystroke, Manager, ParentComponent, Render, Styled, View, ViewContext,
+ VisualContext, WeakView,
};
use picker::{Picker, PickerDelegate};
use std::{
@@ -68,7 +69,9 @@ impl CommandPalette {
}
}
-impl ManagedView for CommandPalette {
+impl EventEmitter<Manager> for CommandPalette {}
+
+impl FocusableView for CommandPalette {
fn focus_handle(&self, cx: &AppContext) -> FocusHandle {
self.picker.focus_handle(cx)
}
@@ -266,7 +269,7 @@ impl PickerDelegate for CommandPaletteDelegate {
fn dismissed(&mut self, cx: &mut ViewContext<Picker<Self>>) {
self.command_palette
- .update(cx, |_, cx| cx.emit(Dismiss))
+ .update(cx, |_, cx| cx.emit(Manager::Dismiss))
.log_err();
}
@@ -14,7 +14,7 @@ use editor::{
use futures::future::try_join_all;
use gpui::{
actions, div, AnyElement, AnyView, AppContext, Component, Context, Div, EventEmitter,
- FocusEvent, FocusHandle, Focusable, FocusableComponent, InteractiveComponent, ManagedView,
+ FocusEvent, FocusHandle, Focusable, FocusableComponent, FocusableView, InteractiveComponent,
Model, ParentComponent, Render, SharedString, Styled, Subscription, Task, View, ViewContext,
VisualContext, WeakView,
};
@@ -640,7 +640,7 @@ impl ProjectDiagnosticsEditor {
}
}
-impl ManagedView for ProjectDiagnosticsEditor {
+impl FocusableView for ProjectDiagnosticsEditor {
fn focus_handle(&self, _: &AppContext) -> FocusHandle {
self.focus_handle.clone()
}
@@ -2,9 +2,9 @@ use collections::HashMap;
use editor::{scroll::autoscroll::Autoscroll, Bias, Editor};
use fuzzy::{CharBag, PathMatch, PathMatchCandidate};
use gpui::{
- actions, div, AppContext, Component, Dismiss, Div, FocusHandle, InteractiveComponent,
- ManagedView, Model, ParentComponent, Render, Styled, Task, View, ViewContext, VisualContext,
- WeakView,
+ actions, div, AppContext, Component, Div, EventEmitter, FocusHandle, FocusableView,
+ InteractiveComponent, Manager, Model, ParentComponent, Render, Styled, Task, View, ViewContext,
+ VisualContext, WeakView,
};
use picker::{Picker, PickerDelegate};
use project::{PathMatchCandidateSet, Project, ProjectPath, WorktreeId};
@@ -111,7 +111,8 @@ impl FileFinder {
}
}
-impl ManagedView for FileFinder {
+impl EventEmitter<Manager> for FileFinder {}
+impl FocusableView for FileFinder {
fn focus_handle(&self, cx: &AppContext) -> FocusHandle {
self.picker.focus_handle(cx)
}
@@ -688,7 +689,9 @@ impl PickerDelegate for FileFinderDelegate {
.log_err();
}
}
- finder.update(&mut cx, |_, cx| cx.emit(Dismiss)).ok()?;
+ finder
+ .update(&mut cx, |_, cx| cx.emit(Manager::Dismiss))
+ .ok()?;
Some(())
})
@@ -699,7 +702,7 @@ impl PickerDelegate for FileFinderDelegate {
fn dismissed(&mut self, cx: &mut ViewContext<Picker<FileFinderDelegate>>) {
self.file_finder
- .update(cx, |_, cx| cx.emit(Dismiss))
+ .update(cx, |_, cx| cx.emit(Manager::Dismiss))
.log_err();
}
@@ -1,7 +1,8 @@
use editor::{display_map::ToDisplayPoint, scroll::autoscroll::Autoscroll, Editor};
use gpui::{
- actions, div, prelude::*, AppContext, Dismiss, Div, FocusHandle, ManagedView, ParentComponent,
- Render, SharedString, Styled, Subscription, View, ViewContext, VisualContext, WindowContext,
+ actions, div, prelude::*, AppContext, Div, EventEmitter, FocusHandle, FocusableView, Manager,
+ ParentComponent, Render, SharedString, Styled, Subscription, View, ViewContext, VisualContext,
+ WindowContext,
};
use text::{Bias, Point};
use theme::ActiveTheme;
@@ -23,11 +24,12 @@ pub struct GoToLine {
_subscriptions: Vec<Subscription>,
}
-impl ManagedView for GoToLine {
+impl FocusableView for GoToLine {
fn focus_handle(&self, cx: &AppContext) -> FocusHandle {
- self.line_editor.focus_handle(cx)
+ self.active_editor.focus_handle(cx)
}
}
+impl EventEmitter<Manager> for GoToLine {}
impl GoToLine {
fn register(workspace: &mut Workspace, _: &mut ViewContext<Workspace>) {
@@ -87,7 +89,7 @@ impl GoToLine {
) {
match event {
// todo!() this isn't working...
- editor::EditorEvent::Blurred => cx.emit(Dismiss),
+ editor::EditorEvent::Blurred => cx.emit(Manager::Dismiss),
editor::EditorEvent::BufferEdited { .. } => self.highlight_current_line(cx),
_ => {}
}
@@ -122,7 +124,7 @@ impl GoToLine {
}
fn cancel(&mut self, _: &menu::Cancel, cx: &mut ViewContext<Self>) {
- cx.emit(Dismiss);
+ cx.emit(Manager::Dismiss);
}
fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext<Self>) {
@@ -139,7 +141,7 @@ impl GoToLine {
self.prev_scroll_position.take();
}
- cx.emit(Dismiss);
+ cx.emit(Manager::Dismiss);
}
}
@@ -1,6 +1,6 @@
use crate::{
AnyView, AnyWindowHandle, AppCell, AppContext, BackgroundExecutor, Context, FocusableView,
- ForegroundExecutor, Model, ModelContext, Render, Result, Task, View, ViewContext,
+ ForegroundExecutor, Manager, Model, ModelContext, Render, Result, Task, View, ViewContext,
VisualContext, WindowContext, WindowHandle,
};
use anyhow::{anyhow, Context as _};
@@ -320,4 +320,13 @@ impl VisualContext for AsyncWindowContext {
view.read(cx).focus_handle(cx).clone().focus(cx);
})
}
+
+ fn dismiss_view<V>(&mut self, view: &View<V>) -> Self::Result<()>
+ where
+ V: crate::ManagedView,
+ {
+ self.window.update(self, |_, cx| {
+ view.update(cx, |_, cx| cx.emit(Manager::Dismiss))
+ })
+ }
}
@@ -71,11 +71,12 @@ impl EntityMap {
#[track_caller]
pub fn lease<'a, T>(&mut self, model: &'a Model<T>) -> Lease<'a, T> {
self.assert_valid_context(model);
- let entity = Some(
- self.entities
- .remove(model.entity_id)
- .expect("Circular entity lease. Is the entity already being updated?"),
- );
+ let entity = Some(self.entities.remove(model.entity_id).unwrap_or_else(|| {
+ panic!(
+ "Circular entity lease of {}. Is it already being updated?",
+ std::any::type_name::<T>()
+ )
+ }));
Lease {
model,
entity,
@@ -605,6 +605,17 @@ impl<'a> VisualContext for VisualTestContext<'a> {
})
.unwrap()
}
+
+ fn dismiss_view<V>(&mut self, view: &View<V>) -> Self::Result<()>
+ where
+ V: crate::ManagedView,
+ {
+ self.window
+ .update(self.cx, |_, cx| {
+ view.update(cx, |_, cx| cx.emit(crate::Manager::Dismiss))
+ })
+ .unwrap()
+ }
}
impl AnyWindowHandle {
@@ -1124,9 +1124,14 @@ where
}
}
}
+ // if self.hover_style.is_some() {
if bounds.contains_point(&mouse_position) {
+ // eprintln!("div hovered {bounds:?} {mouse_position:?}");
style.refine(&self.hover_style);
+ } else {
+ // eprintln!("div NOT hovered {bounds:?} {mouse_position:?}");
}
+ // }
if let Some(drag) = cx.active_drag.take() {
for (state_type, group_drag_style) in &self.group_drag_over_styles {
@@ -81,7 +81,7 @@ impl<V> Element<V> for Img<V> {
if let Some(data) = image_future
.clone()
.now_or_never()
- .and_then(ResultExt::log_err)
+ .and_then(|result| result.ok())
{
let corner_radii = corner_radii.to_pixels(bounds.size, cx.rem_size());
cx.with_z_index(1, |cx| {
@@ -90,7 +90,7 @@ impl<V> Element<V> for Img<V> {
});
} else {
cx.spawn(|_, mut cx| async move {
- if image_future.await.log_err().is_some() {
+ if image_future.await.ok().is_some() {
cx.on_next_frame(|cx| cx.notify());
}
})
@@ -141,6 +141,10 @@ pub trait VisualContext: Context {
fn focus_view<V>(&mut self, view: &View<V>) -> Self::Result<()>
where
V: FocusableView;
+
+ fn dismiss_view<V>(&mut self, view: &View<V>) -> Self::Result<()>
+ where
+ V: ManagedView;
}
pub trait Entity<T>: Sealed {
@@ -2,7 +2,7 @@ use crate::{ImageData, ImageId, SharedString};
use collections::HashMap;
use futures::{
future::{BoxFuture, Shared},
- AsyncReadExt, FutureExt,
+ AsyncReadExt, FutureExt, TryFutureExt,
};
use image::ImageError;
use parking_lot::Mutex;
@@ -88,6 +88,14 @@ impl ImageCache {
Ok(Arc::new(ImageData::new(image)))
}
}
+ .map_err({
+ let uri = uri.clone();
+
+ move |error| {
+ log::log!(log::Level::Error, "{:?} {:?}", &uri, &error);
+ error
+ }
+ })
.boxed()
.shared();
@@ -1205,10 +1205,7 @@ extern "C" fn handle_view_event(this: &Object, _: Sel, native_event: id) {
InputEvent::MouseMove(_) if !(is_active || lock.kind == WindowKind::PopUp) => return,
- InputEvent::MouseUp(MouseUpEvent {
- button: MouseButton::Left,
- ..
- }) => {
+ InputEvent::MouseUp(MouseUpEvent { .. }) => {
lock.synthetic_drag_counter += 1;
}
@@ -191,6 +191,10 @@ impl AnyView {
self.model.entity_type
}
+ pub fn entity_id(&self) -> EntityId {
+ self.model.entity_id()
+ }
+
pub(crate) fn draw(
&self,
origin: Point<Pixels>,
@@ -193,17 +193,12 @@ pub trait FocusableView: Render {
/// ManagedView is a view (like a Modal, Popover, Menu, etc.)
/// where the lifecycle of the view is handled by another view.
-pub trait ManagedView: Render {
- fn focus_handle(&self, cx: &AppContext) -> FocusHandle;
-}
+pub trait ManagedView: FocusableView + EventEmitter<Manager> {}
-pub struct Dismiss;
-impl<T: ManagedView> EventEmitter<Dismiss> for T {}
+impl<M: FocusableView + EventEmitter<Manager>> ManagedView for M {}
-impl<T: ManagedView> FocusableView for T {
- fn focus_handle(&self, cx: &AppContext) -> FocusHandle {
- self.focus_handle(cx)
- }
+pub enum Manager {
+ Dismiss,
}
// Holds the state for a specific window.
@@ -1582,6 +1577,13 @@ impl VisualContext for WindowContext<'_> {
view.focus_handle(cx).clone().focus(cx);
})
}
+
+ fn dismiss_view<V>(&mut self, view: &View<V>) -> Self::Result<()>
+ where
+ V: ManagedView,
+ {
+ self.update_view(view, |_, cx| cx.emit(Manager::Dismiss))
+ }
}
impl<'a> std::ops::Deref for WindowContext<'a> {
@@ -2275,6 +2277,13 @@ impl<'a, V: 'static> ViewContext<'a, V> {
{
self.defer(|view, cx| view.focus_handle(cx).focus(cx))
}
+
+ pub fn dismiss_self(&mut self)
+ where
+ V: ManagedView,
+ {
+ self.defer(|_, cx| cx.emit(Manager::Dismiss))
+ }
}
impl<V> Context for ViewContext<'_, V> {
@@ -2354,6 +2363,10 @@ impl<V: 'static> VisualContext for ViewContext<'_, V> {
fn focus_view<W: FocusableView>(&mut self, view: &View<W>) -> Self::Result<()> {
self.window_cx.focus_view(view)
}
+
+ fn dismiss_view<W: ManagedView>(&mut self, view: &View<W>) -> Self::Result<()> {
+ self.window_cx.dismiss_view(view)
+ }
}
impl<'a, V> std::ops::Deref for ViewContext<'a, V> {
@@ -20,10 +20,6 @@ impl IgnoreStack {
Arc::new(Self::All)
}
- pub fn is_all(&self) -> bool {
- matches!(self, IgnoreStack::All)
- }
-
pub fn append(self: Arc<Self>, abs_base_path: Arc<Path>, ignore: Arc<Gitignore>) -> Arc<Self> {
match self.as_ref() {
IgnoreStack::All => self,
@@ -5548,7 +5548,16 @@ impl Project {
.collect::<Vec<_>>();
let background = cx.background().clone();
- let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
+ let path_count: usize = snapshots
+ .iter()
+ .map(|s| {
+ if query.include_ignored() {
+ s.file_count()
+ } else {
+ s.visible_file_count()
+ }
+ })
+ .sum();
if path_count == 0 {
let (_, rx) = smol::channel::bounded(1024);
return rx;
@@ -5561,8 +5570,16 @@ impl Project {
.iter()
.filter_map(|(_, b)| {
let buffer = b.upgrade(cx)?;
- let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
- if let Some(path) = snapshot.file().map(|file| file.path()) {
+ let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
+ let is_ignored = buffer
+ .project_path(cx)
+ .and_then(|path| self.entry_for_path(&path, cx))
+ .map_or(false, |entry| entry.is_ignored);
+ (is_ignored, buffer.snapshot())
+ });
+ if is_ignored && !query.include_ignored() {
+ return None;
+ } else if let Some(path) = snapshot.file().map(|file| file.path()) {
Some((path.clone(), (buffer, snapshot)))
} else {
unnamed_files.push(buffer);
@@ -5735,7 +5752,12 @@ impl Project {
let mut snapshot_start_ix = 0;
let mut abs_path = PathBuf::new();
for snapshot in snapshots {
- let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count();
+ let snapshot_end_ix = snapshot_start_ix
+ + if query.include_ignored() {
+ snapshot.file_count()
+ } else {
+ snapshot.visible_file_count()
+ };
if worker_end_ix <= snapshot_start_ix {
break;
} else if worker_start_ix > snapshot_end_ix {
@@ -5748,7 +5770,7 @@ impl Project {
cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
for entry in snapshot
- .files(false, start_in_snapshot)
+ .files(query.include_ignored(), start_in_snapshot)
.take(end_in_snapshot - start_in_snapshot)
{
if matching_paths_tx.is_closed() {
@@ -10,6 +10,8 @@ pub struct ProjectSettings {
pub lsp: HashMap<Arc<str>, LspSettings>,
#[serde(default)]
pub git: GitSettings,
+ #[serde(default)]
+ pub file_scan_exclusions: Option<Vec<String>>,
}
#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
@@ -3598,7 +3598,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
assert_eq!(
search(
&project,
- SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
+ SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
cx
)
.await
@@ -3623,7 +3623,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
assert_eq!(
search(
&project,
- SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
+ SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
cx
)
.await
@@ -3662,6 +3662,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
+ false,
vec![PathMatcher::new("*.odd").unwrap()],
Vec::new()
)
@@ -3681,6 +3682,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
+ false,
vec![PathMatcher::new("*.rs").unwrap()],
Vec::new()
)
@@ -3703,6 +3705,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
+ false,
vec![
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap(),
@@ -3727,6 +3730,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
+ false,
vec![
PathMatcher::new("*.rs").unwrap(),
PathMatcher::new("*.ts").unwrap(),
@@ -3774,6 +3778,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
+ false,
Vec::new(),
vec![PathMatcher::new("*.odd").unwrap()],
)
@@ -3798,6 +3803,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
+ false,
Vec::new(),
vec![PathMatcher::new("*.rs").unwrap()],
)
@@ -3820,6 +3826,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
+ false,
Vec::new(),
vec![
PathMatcher::new("*.ts").unwrap(),
@@ -3844,6 +3851,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
+ false,
Vec::new(),
vec![
PathMatcher::new("*.rs").unwrap(),
@@ -3885,6 +3893,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query,
false,
true,
+ false,
vec![PathMatcher::new("*.odd").unwrap()],
vec![PathMatcher::new("*.odd").unwrap()],
)
@@ -3904,6 +3913,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query,
false,
true,
+ false,
vec![PathMatcher::new("*.ts").unwrap()],
vec![PathMatcher::new("*.ts").unwrap()],
).unwrap(),
@@ -3922,6 +3932,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query,
false,
true,
+ false,
vec![
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap()
@@ -3947,6 +3958,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query,
false,
true,
+ false,
vec![
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap()
@@ -39,6 +39,7 @@ pub enum SearchQuery {
replacement: Option<String>,
whole_word: bool,
case_sensitive: bool,
+ include_ignored: bool,
inner: SearchInputs,
},
@@ -48,6 +49,7 @@ pub enum SearchQuery {
multiline: bool,
whole_word: bool,
case_sensitive: bool,
+ include_ignored: bool,
inner: SearchInputs,
},
}
@@ -57,6 +59,7 @@ impl SearchQuery {
query: impl ToString,
whole_word: bool,
case_sensitive: bool,
+ include_ignored: bool,
files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<PathMatcher>,
) -> Result<Self> {
@@ -74,6 +77,7 @@ impl SearchQuery {
replacement: None,
whole_word,
case_sensitive,
+ include_ignored,
inner,
})
}
@@ -82,6 +86,7 @@ impl SearchQuery {
query: impl ToString,
whole_word: bool,
case_sensitive: bool,
+ include_ignored: bool,
files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<PathMatcher>,
) -> Result<Self> {
@@ -111,6 +116,7 @@ impl SearchQuery {
multiline,
whole_word,
case_sensitive,
+ include_ignored,
inner,
})
}
@@ -121,6 +127,7 @@ impl SearchQuery {
message.query,
message.whole_word,
message.case_sensitive,
+ message.include_ignored,
deserialize_path_matches(&message.files_to_include)?,
deserialize_path_matches(&message.files_to_exclude)?,
)
@@ -129,6 +136,7 @@ impl SearchQuery {
message.query,
message.whole_word,
message.case_sensitive,
+ message.include_ignored,
deserialize_path_matches(&message.files_to_include)?,
deserialize_path_matches(&message.files_to_exclude)?,
)
@@ -156,6 +164,7 @@ impl SearchQuery {
regex: self.is_regex(),
whole_word: self.whole_word(),
case_sensitive: self.case_sensitive(),
+ include_ignored: self.include_ignored(),
files_to_include: self
.files_to_include()
.iter()
@@ -336,6 +345,17 @@ impl SearchQuery {
}
}
+ pub fn include_ignored(&self) -> bool {
+ match self {
+ Self::Text {
+ include_ignored, ..
+ } => *include_ignored,
+ Self::Regex {
+ include_ignored, ..
+ } => *include_ignored,
+ }
+ }
+
pub fn is_regex(&self) -> bool {
matches!(self, Self::Regex { .. })
}
@@ -1,5 +1,6 @@
use crate::{
- copy_recursive, ignore::IgnoreStack, DiagnosticSummary, ProjectEntryId, RemoveOptions,
+ copy_recursive, ignore::IgnoreStack, project_settings::ProjectSettings, DiagnosticSummary,
+ ProjectEntryId, RemoveOptions,
};
use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
use anyhow::{anyhow, Context, Result};
@@ -21,7 +22,10 @@ use futures::{
};
use fuzzy::CharBag;
use git::{DOT_GIT, GITIGNORE};
-use gpui::{executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task};
+use gpui::{
+ executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Subscription, Task,
+};
+use itertools::Itertools;
use language::{
proto::{
deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending,
@@ -36,6 +40,7 @@ use postage::{
prelude::{Sink as _, Stream as _},
watch,
};
+use settings::SettingsStore;
use smol::channel::{self, Sender};
use std::{
any::Any,
@@ -55,7 +60,10 @@ use std::{
time::{Duration, SystemTime},
};
use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
-use util::{paths::HOME, ResultExt};
+use util::{
+ paths::{PathMatcher, HOME},
+ ResultExt,
+};
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
pub struct WorktreeId(usize);
@@ -70,7 +78,8 @@ pub struct LocalWorktree {
scan_requests_tx: channel::Sender<ScanRequest>,
path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>,
is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
- _background_scanner_task: Task<()>,
+ _settings_subscription: Subscription,
+ _background_scanner_tasks: Vec<Task<()>>,
share: Option<ShareState>,
diagnostics: HashMap<
Arc<Path>,
@@ -216,6 +225,7 @@ pub struct LocalSnapshot {
/// All of the git repositories in the worktree, indexed by the project entry
/// id of their parent directory.
git_repositories: TreeMap<ProjectEntryId, LocalRepositoryEntry>,
+ file_scan_exclusions: Vec<PathMatcher>,
}
struct BackgroundScannerState {
@@ -299,17 +309,54 @@ impl Worktree {
.await
.context("failed to stat worktree path")?;
+ let closure_fs = Arc::clone(&fs);
+ let closure_next_entry_id = Arc::clone(&next_entry_id);
+ let closure_abs_path = abs_path.to_path_buf();
Ok(cx.add_model(move |cx: &mut ModelContext<Worktree>| {
+ let settings_subscription = cx.observe_global::<SettingsStore, _>(move |this, cx| {
+ if let Self::Local(this) = this {
+ let new_file_scan_exclusions =
+ file_scan_exclusions(settings::get::<ProjectSettings>(cx));
+ if new_file_scan_exclusions != this.snapshot.file_scan_exclusions {
+ this.snapshot.file_scan_exclusions = new_file_scan_exclusions;
+ log::info!(
+ "Re-scanning directories, new scan exclude files: {:?}",
+ this.snapshot
+ .file_scan_exclusions
+ .iter()
+ .map(ToString::to_string)
+ .collect::<Vec<_>>()
+ );
+
+ let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
+ let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) =
+ channel::unbounded();
+ this.scan_requests_tx = scan_requests_tx;
+ this.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx;
+ this._background_scanner_tasks = start_background_scan_tasks(
+ &closure_abs_path,
+ this.snapshot(),
+ scan_requests_rx,
+ path_prefixes_to_scan_rx,
+ Arc::clone(&closure_next_entry_id),
+ Arc::clone(&closure_fs),
+ cx,
+ );
+ this.is_scanning = watch::channel_with(true);
+ }
+ }
+ });
+
let root_name = abs_path
.file_name()
.map_or(String::new(), |f| f.to_string_lossy().to_string());
-
let mut snapshot = LocalSnapshot {
+ file_scan_exclusions: file_scan_exclusions(settings::get::<ProjectSettings>(cx)),
ignores_by_parent_abs_path: Default::default(),
git_repositories: Default::default(),
snapshot: Snapshot {
id: WorktreeId::from_usize(cx.model_id()),
- abs_path: abs_path.clone(),
+ abs_path: abs_path.to_path_buf().into(),
root_name: root_name.clone(),
root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
entries_by_path: Default::default(),
@@ -334,60 +381,23 @@ impl Worktree {
let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
- let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
-
- cx.spawn_weak(|this, mut cx| async move {
- while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) {
- this.update(&mut cx, |this, cx| {
- let this = this.as_local_mut().unwrap();
- match state {
- ScanState::Started => {
- *this.is_scanning.0.borrow_mut() = true;
- }
- ScanState::Updated {
- snapshot,
- changes,
- barrier,
- scanning,
- } => {
- *this.is_scanning.0.borrow_mut() = scanning;
- this.set_snapshot(snapshot, changes, cx);
- drop(barrier);
- }
- }
- cx.notify();
- });
- }
- })
- .detach();
-
- let background_scanner_task = cx.background().spawn({
- let fs = fs.clone();
- let snapshot = snapshot.clone();
- let background = cx.background().clone();
- async move {
- let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
- BackgroundScanner::new(
- snapshot,
- next_entry_id,
- fs,
- scan_states_tx,
- background,
- scan_requests_rx,
- path_prefixes_to_scan_rx,
- )
- .run(events)
- .await;
- }
- });
-
+ let task_snapshot = snapshot.clone();
Worktree::Local(LocalWorktree {
snapshot,
is_scanning: watch::channel_with(true),
share: None,
scan_requests_tx,
path_prefixes_to_scan_tx,
- _background_scanner_task: background_scanner_task,
+ _settings_subscription: settings_subscription,
+ _background_scanner_tasks: start_background_scan_tasks(
+ &abs_path,
+ task_snapshot,
+ scan_requests_rx,
+ path_prefixes_to_scan_rx,
+ Arc::clone(&next_entry_id),
+ Arc::clone(&fs),
+ cx,
+ ),
diagnostics: Default::default(),
diagnostic_summaries: Default::default(),
client,
@@ -584,6 +594,76 @@ impl Worktree {
}
}
+fn start_background_scan_tasks(
+ abs_path: &Path,
+ snapshot: LocalSnapshot,
+ scan_requests_rx: channel::Receiver<ScanRequest>,
+ path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
+ next_entry_id: Arc<AtomicUsize>,
+ fs: Arc<dyn Fs>,
+ cx: &mut ModelContext<'_, Worktree>,
+) -> Vec<Task<()>> {
+ let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
+ let background_scanner = cx.background().spawn({
+ let abs_path = abs_path.to_path_buf();
+ let background = cx.background().clone();
+ async move {
+ let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
+ BackgroundScanner::new(
+ snapshot,
+ next_entry_id,
+ fs,
+ scan_states_tx,
+ background,
+ scan_requests_rx,
+ path_prefixes_to_scan_rx,
+ )
+ .run(events)
+ .await;
+ }
+ });
+ let scan_state_updater = cx.spawn_weak(|this, mut cx| async move {
+ while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) {
+ this.update(&mut cx, |this, cx| {
+ let this = this.as_local_mut().unwrap();
+ match state {
+ ScanState::Started => {
+ *this.is_scanning.0.borrow_mut() = true;
+ }
+ ScanState::Updated {
+ snapshot,
+ changes,
+ barrier,
+ scanning,
+ } => {
+ *this.is_scanning.0.borrow_mut() = scanning;
+ this.set_snapshot(snapshot, changes, cx);
+ drop(barrier);
+ }
+ }
+ cx.notify();
+ });
+ }
+ });
+ vec![background_scanner, scan_state_updater]
+}
+
+fn file_scan_exclusions(project_settings: &ProjectSettings) -> Vec<PathMatcher> {
+ project_settings.file_scan_exclusions.as_deref().unwrap_or(&[]).iter()
+ .sorted()
+ .filter_map(|pattern| {
+ PathMatcher::new(pattern)
+ .map(Some)
+ .unwrap_or_else(|e| {
+ log::error!(
+ "Skipping pattern {pattern} in `file_scan_exclusions` project settings due to parsing error: {e:#}"
+ );
+ None
+ })
+ })
+ .collect()
+}
+
impl LocalWorktree {
pub fn contains_abs_path(&self, path: &Path) -> bool {
path.starts_with(&self.abs_path)
@@ -1481,7 +1561,7 @@ impl Snapshot {
self.entries_by_id.get(&entry_id, &()).is_some()
}
- pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
+ fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
let entry = Entry::try_from((&self.root_char_bag, entry))?;
let old_entry = self.entries_by_id.insert_or_replace(
PathEntry {
@@ -2145,6 +2225,12 @@ impl LocalSnapshot {
paths.sort_by(|a, b| a.0.cmp(b.0));
paths
}
+
+ fn is_abs_path_excluded(&self, abs_path: &Path) -> bool {
+ self.file_scan_exclusions
+ .iter()
+ .any(|exclude_matcher| exclude_matcher.is_match(abs_path))
+ }
}
impl BackgroundScannerState {
@@ -2167,7 +2253,7 @@ impl BackgroundScannerState {
let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true);
let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path);
let mut containing_repository = None;
- if !ignore_stack.is_all() {
+ if !ignore_stack.is_abs_path_ignored(&abs_path, true) {
if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) {
if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) {
containing_repository = Some((
@@ -2378,18 +2464,30 @@ impl BackgroundScannerState {
// Remove any git repositories whose .git entry no longer exists.
let snapshot = &mut self.snapshot;
- let mut repositories = mem::take(&mut snapshot.git_repositories);
- let mut repository_entries = mem::take(&mut snapshot.repository_entries);
- repositories.retain(|work_directory_id, _| {
- snapshot
- .entry_for_id(*work_directory_id)
+ let mut ids_to_preserve = HashSet::default();
+ for (&work_directory_id, entry) in snapshot.git_repositories.iter() {
+ let exists_in_snapshot = snapshot
+ .entry_for_id(work_directory_id)
.map_or(false, |entry| {
snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some()
- })
- });
- repository_entries.retain(|_, entry| repositories.get(&entry.work_directory.0).is_some());
- snapshot.git_repositories = repositories;
- snapshot.repository_entries = repository_entries;
+ });
+ if exists_in_snapshot {
+ ids_to_preserve.insert(work_directory_id);
+ } else {
+ let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
+ if snapshot.is_abs_path_excluded(&git_dir_abs_path)
+ && !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
+ {
+ ids_to_preserve.insert(work_directory_id);
+ }
+ }
+ }
+ snapshot
+ .git_repositories
+ .retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id));
+ snapshot
+ .repository_entries
+ .retain(|_, entry| ids_to_preserve.contains(&entry.work_directory.0));
}
fn build_git_repository(
@@ -3094,7 +3192,7 @@ impl BackgroundScanner {
let ignore_stack = state
.snapshot
.ignore_stack_for_abs_path(&root_abs_path, true);
- if ignore_stack.is_all() {
+ if ignore_stack.is_abs_path_ignored(&root_abs_path, true) {
root_entry.is_ignored = true;
state.insert_entry(root_entry.clone(), self.fs.as_ref());
}
@@ -3231,14 +3329,22 @@ impl BackgroundScanner {
return false;
};
- let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
- snapshot
- .entry_for_path(parent)
- .map_or(false, |entry| entry.kind == EntryKind::Dir)
- });
- if !parent_dir_is_loaded {
- log::debug!("ignoring event {relative_path:?} within unloaded directory");
- return false;
+ if !is_git_related(&abs_path) {
+ let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
+ snapshot
+ .entry_for_path(parent)
+ .map_or(false, |entry| entry.kind == EntryKind::Dir)
+ });
+ if !parent_dir_is_loaded {
+ log::debug!("ignoring event {relative_path:?} within unloaded directory");
+ return false;
+ }
+ if snapshot.is_abs_path_excluded(abs_path) {
+ log::debug!(
+ "ignoring FS event for path {relative_path:?} within excluded directory"
+ );
+ return false;
+ }
}
relative_paths.push(relative_path);
@@ -3401,18 +3507,26 @@ impl BackgroundScanner {
}
async fn scan_dir(&self, job: &ScanJob) -> Result<()> {
- log::debug!("scan directory {:?}", job.path);
-
- let mut ignore_stack = job.ignore_stack.clone();
- let mut new_ignore = None;
- let (root_abs_path, root_char_bag, next_entry_id) = {
- let snapshot = &self.state.lock().snapshot;
- (
- snapshot.abs_path().clone(),
- snapshot.root_char_bag,
- self.next_entry_id.clone(),
- )
- };
+ let root_abs_path;
+ let mut ignore_stack;
+ let mut new_ignore;
+ let root_char_bag;
+ let next_entry_id;
+ {
+ let state = self.state.lock();
+ let snapshot = &state.snapshot;
+ root_abs_path = snapshot.abs_path().clone();
+ if snapshot.is_abs_path_excluded(&job.abs_path) {
+ log::error!("skipping excluded directory {:?}", job.path);
+ return Ok(());
+ }
+ log::debug!("scanning directory {:?}", job.path);
+ ignore_stack = job.ignore_stack.clone();
+ new_ignore = None;
+ root_char_bag = snapshot.root_char_bag;
+ next_entry_id = self.next_entry_id.clone();
+ drop(state);
+ }
let mut dotgit_path = None;
let mut root_canonical_path = None;
@@ -3427,18 +3541,8 @@ impl BackgroundScanner {
continue;
}
};
-
let child_name = child_abs_path.file_name().unwrap();
let child_path: Arc<Path> = job.path.join(child_name).into();
- let child_metadata = match self.fs.metadata(&child_abs_path).await {
- Ok(Some(metadata)) => metadata,
- Ok(None) => continue,
- Err(err) => {
- log::error!("error processing {:?}: {:?}", child_abs_path, err);
- continue;
- }
- };
-
// If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
if child_name == *GITIGNORE {
match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
@@ -3482,6 +3586,26 @@ impl BackgroundScanner {
dotgit_path = Some(child_path.clone());
}
+ {
+ let mut state = self.state.lock();
+ if state.snapshot.is_abs_path_excluded(&child_abs_path) {
+ let relative_path = job.path.join(child_name);
+ log::debug!("skipping excluded child entry {relative_path:?}");
+ state.remove_path(&relative_path);
+ continue;
+ }
+ drop(state);
+ }
+
+ let child_metadata = match self.fs.metadata(&child_abs_path).await {
+ Ok(Some(metadata)) => metadata,
+ Ok(None) => continue,
+ Err(err) => {
+ log::error!("error processing {child_abs_path:?}: {err:?}");
+ continue;
+ }
+ };
+
let mut child_entry = Entry::new(
child_path.clone(),
&child_metadata,
@@ -3662,19 +3786,16 @@ impl BackgroundScanner {
self.next_entry_id.as_ref(),
state.snapshot.root_char_bag,
);
- fs_entry.is_ignored = ignore_stack.is_all();
+ let is_dir = fs_entry.is_dir();
+ fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir);
fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path);
- if !fs_entry.is_ignored {
- if !fs_entry.is_dir() {
- if let Some((work_dir, repo)) =
- state.snapshot.local_repo_for_path(&path)
- {
- if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
- let repo_path = RepoPath(repo_path.into());
- let repo = repo.repo_ptr.lock();
- fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime);
- }
+ if !is_dir && !fs_entry.is_ignored {
+ if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) {
+ if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
+ let repo_path = RepoPath(repo_path.into());
+ let repo = repo.repo_ptr.lock();
+ fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime);
}
}
}
@@ -3833,8 +3954,7 @@ impl BackgroundScanner {
ignore_stack.clone()
};
- // Scan any directories that were previously ignored and weren't
- // previously scanned.
+ // Scan any directories that were previously ignored and weren't previously scanned.
if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() {
let state = self.state.lock();
if state.should_scan_directory(&entry) {
@@ -4010,6 +4130,12 @@ impl BackgroundScanner {
}
}
+fn is_git_related(abs_path: &Path) -> bool {
+ abs_path
+ .components()
+ .any(|c| c.as_os_str() == *DOT_GIT || c.as_os_str() == *GITIGNORE)
+}
+
fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
let mut result = root_char_bag;
result.extend(
@@ -1,6 +1,7 @@
use crate::{
+ project_settings::ProjectSettings,
worktree::{Event, Snapshot, WorktreeModelHandle},
- Entry, EntryKind, PathChange, Worktree,
+ Entry, EntryKind, PathChange, Project, Worktree,
};
use anyhow::Result;
use client::Client;
@@ -12,6 +13,7 @@ use postage::stream::Stream;
use pretty_assertions::assert_eq;
use rand::prelude::*;
use serde_json::json;
+use settings::SettingsStore;
use std::{
env,
fmt::Write,
@@ -23,6 +25,7 @@ use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
#[gpui::test]
async fn test_traversal(cx: &mut TestAppContext) {
+ init_test(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
@@ -78,6 +81,7 @@ async fn test_traversal(cx: &mut TestAppContext) {
#[gpui::test]
async fn test_descendent_entries(cx: &mut TestAppContext) {
+ init_test(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
@@ -185,6 +189,7 @@ async fn test_descendent_entries(cx: &mut TestAppContext) {
#[gpui::test(iterations = 10)]
async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
+ init_test(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
@@ -264,6 +269,7 @@ async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppCo
#[gpui::test]
async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
+ init_test(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
@@ -439,6 +445,7 @@ async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
#[gpui::test]
async fn test_open_gitignored_files(cx: &mut TestAppContext) {
+ init_test(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
@@ -599,6 +606,7 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
#[gpui::test]
async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
+ init_test(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
@@ -722,6 +730,14 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
#[gpui::test(iterations = 10)]
async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
+ init_test(cx);
+ cx.update(|cx| {
+ cx.update_global::<SettingsStore, _, _>(|store, cx| {
+ store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+ project_settings.file_scan_exclusions = Some(Vec::new());
+ });
+ });
+ });
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
@@ -827,6 +843,7 @@ async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
#[gpui::test]
async fn test_write_file(cx: &mut TestAppContext) {
+ init_test(cx);
let dir = temp_tree(json!({
".git": {},
".gitignore": "ignored-dir\n",
@@ -877,8 +894,105 @@ async fn test_write_file(cx: &mut TestAppContext) {
});
}
+#[gpui::test]
+async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
+ init_test(cx);
+ let dir = temp_tree(json!({
+ ".gitignore": "**/target\n/node_modules\n",
+ "target": {
+ "index": "blah2"
+ },
+ "node_modules": {
+ ".DS_Store": "",
+ "prettier": {
+ "package.json": "{}",
+ },
+ },
+ "src": {
+ ".DS_Store": "",
+ "foo": {
+ "foo.rs": "mod another;\n",
+ "another.rs": "// another",
+ },
+ "bar": {
+ "bar.rs": "// bar",
+ },
+ "lib.rs": "mod foo;\nmod bar;\n",
+ },
+ ".DS_Store": "",
+ }));
+ cx.update(|cx| {
+ cx.update_global::<SettingsStore, _, _>(|store, cx| {
+ store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+ project_settings.file_scan_exclusions =
+ Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
+ });
+ });
+ });
+
+ let tree = Worktree::local(
+ build_client(cx),
+ dir.path(),
+ true,
+ Arc::new(RealFs),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+ tree.flush_fs_events(cx).await;
+ tree.read_with(cx, |tree, _| {
+ check_worktree_entries(
+ tree,
+ &[
+ "src/foo/foo.rs",
+ "src/foo/another.rs",
+ "node_modules/.DS_Store",
+ "src/.DS_Store",
+ ".DS_Store",
+ ],
+ &["target", "node_modules"],
+ &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
+ )
+ });
+
+ cx.update(|cx| {
+ cx.update_global::<SettingsStore, _, _>(|store, cx| {
+ store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+ project_settings.file_scan_exclusions =
+ Some(vec!["**/node_modules/**".to_string()]);
+ });
+ });
+ });
+ tree.flush_fs_events(cx).await;
+ cx.foreground().run_until_parked();
+ tree.read_with(cx, |tree, _| {
+ check_worktree_entries(
+ tree,
+ &[
+ "node_modules/prettier/package.json",
+ "node_modules/.DS_Store",
+ "node_modules",
+ ],
+ &["target"],
+ &[
+ ".gitignore",
+ "src/lib.rs",
+ "src/bar/bar.rs",
+ "src/foo/foo.rs",
+ "src/foo/another.rs",
+ "src/.DS_Store",
+ ".DS_Store",
+ ],
+ )
+ });
+}
+
#[gpui::test(iterations = 30)]
async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
+ init_test(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
@@ -938,6 +1052,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
#[gpui::test]
async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
+ init_test(cx);
let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
let fs_fake = FakeFs::new(cx.background());
@@ -1054,6 +1169,7 @@ async fn test_random_worktree_operations_during_initial_scan(
cx: &mut TestAppContext,
mut rng: StdRng,
) {
+ init_test(cx);
let operations = env::var("OPERATIONS")
.map(|o| o.parse().unwrap())
.unwrap_or(5);
@@ -1143,6 +1259,7 @@ async fn test_random_worktree_operations_during_initial_scan(
#[gpui::test(iterations = 100)]
async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
+ init_test(cx);
let operations = env::var("OPERATIONS")
.map(|o| o.parse().unwrap())
.unwrap_or(40);
@@ -1557,6 +1674,7 @@ fn random_filename(rng: &mut impl Rng) -> String {
#[gpui::test]
async fn test_rename_work_directory(cx: &mut TestAppContext) {
+ init_test(cx);
let root = temp_tree(json!({
"projects": {
"project1": {
@@ -1627,6 +1745,7 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) {
#[gpui::test]
async fn test_git_repository_for_path(cx: &mut TestAppContext) {
+ init_test(cx);
let root = temp_tree(json!({
"c.txt": "",
"dir1": {
@@ -1747,6 +1866,15 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) {
#[gpui::test]
async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
+ init_test(cx);
+ cx.update(|cx| {
+ cx.update_global::<SettingsStore, _, _>(|store, cx| {
+ store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+ project_settings.file_scan_exclusions =
+ Some(vec!["**/.git".to_string(), "**/.gitignore".to_string()]);
+ });
+ });
+ });
const IGNORE_RULE: &'static str = "**/target";
let root = temp_tree(json!({
@@ -1935,6 +2063,7 @@ async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppCont
#[gpui::test]
async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
+ init_test(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
@@ -2139,3 +2268,44 @@ fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Sta
.map(|status| (status.path().unwrap().to_string(), status.status()))
.collect()
}
+
+#[track_caller]
+fn check_worktree_entries(
+ tree: &Worktree,
+ expected_excluded_paths: &[&str],
+ expected_ignored_paths: &[&str],
+ expected_tracked_paths: &[&str],
+) {
+ for path in expected_excluded_paths {
+ let entry = tree.entry_for_path(path);
+ assert!(
+ entry.is_none(),
+ "expected path '{path}' to be excluded, but got entry: {entry:?}",
+ );
+ }
+ for path in expected_ignored_paths {
+ let entry = tree
+ .entry_for_path(path)
+ .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'"));
+ assert!(
+ entry.is_ignored,
+ "expected path '{path}' to be ignored, but got entry: {entry:?}",
+ );
+ }
+ for path in expected_tracked_paths {
+ let entry = tree
+ .entry_for_path(path)
+ .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
+ assert!(
+ !entry.is_ignored,
+ "expected path '{path}' to be tracked, but got entry: {entry:?}",
+ );
+ }
+}
+
+fn init_test(cx: &mut gpui::TestAppContext) {
+ cx.update(|cx| {
+ cx.set_global(SettingsStore::test(cx));
+ Project::init_settings(cx);
+ });
+}
@@ -20,10 +20,6 @@ impl IgnoreStack {
Arc::new(Self::All)
}
- pub fn is_all(&self) -> bool {
- matches!(self, IgnoreStack::All)
- }
-
pub fn append(self: Arc<Self>, abs_base_path: Arc<Path>, ignore: Arc<Gitignore>) -> Arc<Self> {
match self.as_ref() {
IgnoreStack::All => self,
@@ -5618,7 +5618,16 @@ impl Project {
.collect::<Vec<_>>();
let background = cx.background_executor().clone();
- let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
+ let path_count: usize = snapshots
+ .iter()
+ .map(|s| {
+ if query.include_ignored() {
+ s.file_count()
+ } else {
+ s.visible_file_count()
+ }
+ })
+ .sum();
if path_count == 0 {
let (_, rx) = smol::channel::bounded(1024);
return rx;
@@ -5631,8 +5640,16 @@ impl Project {
.iter()
.filter_map(|(_, b)| {
let buffer = b.upgrade()?;
- let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
- if let Some(path) = snapshot.file().map(|file| file.path()) {
+ let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
+ let is_ignored = buffer
+ .project_path(cx)
+ .and_then(|path| self.entry_for_path(&path, cx))
+ .map_or(false, |entry| entry.is_ignored);
+ (is_ignored, buffer.snapshot())
+ });
+ if is_ignored && !query.include_ignored() {
+ return None;
+ } else if let Some(path) = snapshot.file().map(|file| file.path()) {
Some((path.clone(), (buffer, snapshot)))
} else {
unnamed_files.push(buffer);
@@ -5806,7 +5823,12 @@ impl Project {
let mut snapshot_start_ix = 0;
let mut abs_path = PathBuf::new();
for snapshot in snapshots {
- let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count();
+ let snapshot_end_ix = snapshot_start_ix
+ + if query.include_ignored() {
+ snapshot.file_count()
+ } else {
+ snapshot.visible_file_count()
+ };
if worker_end_ix <= snapshot_start_ix {
break;
} else if worker_start_ix > snapshot_end_ix {
@@ -5819,7 +5841,7 @@ impl Project {
cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
for entry in snapshot
- .files(false, start_in_snapshot)
+ .files(query.include_ignored(), start_in_snapshot)
.take(end_in_snapshot - start_in_snapshot)
{
if matching_paths_tx.is_closed() {
@@ -11,6 +11,8 @@ pub struct ProjectSettings {
pub lsp: HashMap<Arc<str>, LspSettings>,
#[serde(default)]
pub git: GitSettings,
+ #[serde(default)]
+ pub file_scan_exclusions: Option<Vec<String>>,
}
#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
@@ -3730,7 +3730,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
assert_eq!(
search(
&project,
- SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
+ SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
cx
)
.await
@@ -3755,7 +3755,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
assert_eq!(
search(
&project,
- SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
+ SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
cx
)
.await
@@ -3794,6 +3794,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
+ false,
vec![PathMatcher::new("*.odd").unwrap()],
Vec::new()
)
@@ -3813,6 +3814,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
+ false,
vec![PathMatcher::new("*.rs").unwrap()],
Vec::new()
)
@@ -3835,6 +3837,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
+ false,
vec![
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap(),
@@ -3859,6 +3862,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
+ false,
vec![
PathMatcher::new("*.rs").unwrap(),
PathMatcher::new("*.ts").unwrap(),
@@ -3906,6 +3910,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
+ false,
Vec::new(),
vec![PathMatcher::new("*.odd").unwrap()],
)
@@ -3930,6 +3935,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
+ false,
Vec::new(),
vec![PathMatcher::new("*.rs").unwrap()],
)
@@ -3952,6 +3958,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
+ false,
Vec::new(),
vec![
PathMatcher::new("*.ts").unwrap(),
@@ -3976,6 +3983,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
search_query,
false,
true,
+ false,
Vec::new(),
vec![
PathMatcher::new("*.rs").unwrap(),
@@ -4017,6 +4025,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query,
false,
true,
+ false,
vec![PathMatcher::new("*.odd").unwrap()],
vec![PathMatcher::new("*.odd").unwrap()],
)
@@ -4036,6 +4045,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query,
false,
true,
+ false,
vec![PathMatcher::new("*.ts").unwrap()],
vec![PathMatcher::new("*.ts").unwrap()],
).unwrap(),
@@ -4054,6 +4064,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query,
false,
true,
+ false,
vec![
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap()
@@ -4079,6 +4090,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query,
false,
true,
+ false,
vec![
PathMatcher::new("*.ts").unwrap(),
PathMatcher::new("*.odd").unwrap()
@@ -39,6 +39,7 @@ pub enum SearchQuery {
replacement: Option<String>,
whole_word: bool,
case_sensitive: bool,
+ include_ignored: bool,
inner: SearchInputs,
},
@@ -48,6 +49,7 @@ pub enum SearchQuery {
multiline: bool,
whole_word: bool,
case_sensitive: bool,
+ include_ignored: bool,
inner: SearchInputs,
},
}
@@ -57,6 +59,7 @@ impl SearchQuery {
query: impl ToString,
whole_word: bool,
case_sensitive: bool,
+ include_ignored: bool,
files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<PathMatcher>,
) -> Result<Self> {
@@ -74,6 +77,7 @@ impl SearchQuery {
replacement: None,
whole_word,
case_sensitive,
+ include_ignored,
inner,
})
}
@@ -82,6 +86,7 @@ impl SearchQuery {
query: impl ToString,
whole_word: bool,
case_sensitive: bool,
+ include_ignored: bool,
files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<PathMatcher>,
) -> Result<Self> {
@@ -111,6 +116,7 @@ impl SearchQuery {
multiline,
whole_word,
case_sensitive,
+ include_ignored,
inner,
})
}
@@ -121,6 +127,7 @@ impl SearchQuery {
message.query,
message.whole_word,
message.case_sensitive,
+ message.include_ignored,
deserialize_path_matches(&message.files_to_include)?,
deserialize_path_matches(&message.files_to_exclude)?,
)
@@ -129,6 +136,7 @@ impl SearchQuery {
message.query,
message.whole_word,
message.case_sensitive,
+ message.include_ignored,
deserialize_path_matches(&message.files_to_include)?,
deserialize_path_matches(&message.files_to_exclude)?,
)
@@ -156,6 +164,7 @@ impl SearchQuery {
regex: self.is_regex(),
whole_word: self.whole_word(),
case_sensitive: self.case_sensitive(),
+ include_ignored: self.include_ignored(),
files_to_include: self
.files_to_include()
.iter()
@@ -336,6 +345,17 @@ impl SearchQuery {
}
}
+ pub fn include_ignored(&self) -> bool {
+ match self {
+ Self::Text {
+ include_ignored, ..
+ } => *include_ignored,
+ Self::Regex {
+ include_ignored, ..
+ } => *include_ignored,
+ }
+ }
+
pub fn is_regex(&self) -> bool {
matches!(self, Self::Regex { .. })
}
@@ -1,5 +1,6 @@
use crate::{
- copy_recursive, ignore::IgnoreStack, DiagnosticSummary, ProjectEntryId, RemoveOptions,
+ copy_recursive, ignore::IgnoreStack, project_settings::ProjectSettings, DiagnosticSummary,
+ ProjectEntryId, RemoveOptions,
};
use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
use anyhow::{anyhow, Context as _, Result};
@@ -25,6 +26,7 @@ use gpui::{
AppContext, AsyncAppContext, BackgroundExecutor, Context, EventEmitter, Model, ModelContext,
Task,
};
+use itertools::Itertools;
use language::{
proto::{
deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending,
@@ -39,6 +41,7 @@ use postage::{
prelude::{Sink as _, Stream as _},
watch,
};
+use settings::{Settings, SettingsStore};
use smol::channel::{self, Sender};
use std::{
any::Any,
@@ -58,7 +61,10 @@ use std::{
time::{Duration, SystemTime},
};
use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
-use util::{paths::HOME, ResultExt};
+use util::{
+ paths::{PathMatcher, HOME},
+ ResultExt,
+};
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
pub struct WorktreeId(usize);
@@ -73,7 +79,7 @@ pub struct LocalWorktree {
scan_requests_tx: channel::Sender<ScanRequest>,
path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>,
is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
- _background_scanner_task: Task<()>,
+ _background_scanner_tasks: Vec<Task<()>>,
share: Option<ShareState>,
diagnostics: HashMap<
Arc<Path>,
@@ -219,6 +225,7 @@ pub struct LocalSnapshot {
/// All of the git repositories in the worktree, indexed by the project entry
/// id of their parent directory.
git_repositories: TreeMap<ProjectEntryId, LocalRepositoryEntry>,
+ file_scan_exclusions: Vec<PathMatcher>,
}
struct BackgroundScannerState {
@@ -302,17 +309,56 @@ impl Worktree {
.await
.context("failed to stat worktree path")?;
+ let closure_fs = Arc::clone(&fs);
+ let closure_next_entry_id = Arc::clone(&next_entry_id);
+ let closure_abs_path = abs_path.to_path_buf();
cx.build_model(move |cx: &mut ModelContext<Worktree>| {
+ cx.observe_global::<SettingsStore>(move |this, cx| {
+ if let Self::Local(this) = this {
+ let new_file_scan_exclusions =
+ file_scan_exclusions(ProjectSettings::get_global(cx));
+ if new_file_scan_exclusions != this.snapshot.file_scan_exclusions {
+ this.snapshot.file_scan_exclusions = new_file_scan_exclusions;
+ log::info!(
+ "Re-scanning directories, new scan exclude files: {:?}",
+ this.snapshot
+ .file_scan_exclusions
+ .iter()
+ .map(ToString::to_string)
+ .collect::<Vec<_>>()
+ );
+
+ let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
+ let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) =
+ channel::unbounded();
+ this.scan_requests_tx = scan_requests_tx;
+ this.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx;
+ this._background_scanner_tasks = start_background_scan_tasks(
+ &closure_abs_path,
+ this.snapshot(),
+ scan_requests_rx,
+ path_prefixes_to_scan_rx,
+ Arc::clone(&closure_next_entry_id),
+ Arc::clone(&closure_fs),
+ cx,
+ );
+ this.is_scanning = watch::channel_with(true);
+ }
+ }
+ })
+ .detach();
+
let root_name = abs_path
.file_name()
.map_or(String::new(), |f| f.to_string_lossy().to_string());
let mut snapshot = LocalSnapshot {
+ file_scan_exclusions: file_scan_exclusions(ProjectSettings::get_global(cx)),
ignores_by_parent_abs_path: Default::default(),
git_repositories: Default::default(),
snapshot: Snapshot {
id: WorktreeId::from_usize(cx.entity_id().as_u64() as usize),
- abs_path: abs_path.clone(),
+ abs_path: abs_path.to_path_buf().into(),
root_name: root_name.clone(),
root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
entries_by_path: Default::default(),
@@ -337,61 +383,22 @@ impl Worktree {
let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
- let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
-
- cx.spawn(|this, mut cx| async move {
- while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade()) {
- this.update(&mut cx, |this, cx| {
- let this = this.as_local_mut().unwrap();
- match state {
- ScanState::Started => {
- *this.is_scanning.0.borrow_mut() = true;
- }
- ScanState::Updated {
- snapshot,
- changes,
- barrier,
- scanning,
- } => {
- *this.is_scanning.0.borrow_mut() = scanning;
- this.set_snapshot(snapshot, changes, cx);
- drop(barrier);
- }
- }
- cx.notify();
- })
- .ok();
- }
- })
- .detach();
-
- let background_scanner_task = cx.background_executor().spawn({
- let fs = fs.clone();
- let snapshot = snapshot.clone();
- let background = cx.background_executor().clone();
- async move {
- let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
- BackgroundScanner::new(
- snapshot,
- next_entry_id,
- fs,
- scan_states_tx,
- background,
- scan_requests_rx,
- path_prefixes_to_scan_rx,
- )
- .run(events)
- .await;
- }
- });
-
+ let task_snapshot = snapshot.clone();
Worktree::Local(LocalWorktree {
snapshot,
is_scanning: watch::channel_with(true),
share: None,
scan_requests_tx,
path_prefixes_to_scan_tx,
- _background_scanner_task: background_scanner_task,
+ _background_scanner_tasks: start_background_scan_tasks(
+ &abs_path,
+ task_snapshot,
+ scan_requests_rx,
+ path_prefixes_to_scan_rx,
+ Arc::clone(&next_entry_id),
+ Arc::clone(&fs),
+ cx,
+ ),
diagnostics: Default::default(),
diagnostic_summaries: Default::default(),
client,
@@ -584,6 +591,77 @@ impl Worktree {
}
}
+fn start_background_scan_tasks(
+ abs_path: &Path,
+ snapshot: LocalSnapshot,
+ scan_requests_rx: channel::Receiver<ScanRequest>,
+ path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
+ next_entry_id: Arc<AtomicUsize>,
+ fs: Arc<dyn Fs>,
+ cx: &mut ModelContext<'_, Worktree>,
+) -> Vec<Task<()>> {
+ let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
+ let background_scanner = cx.background_executor().spawn({
+ let abs_path = abs_path.to_path_buf();
+ let background = cx.background_executor().clone();
+ async move {
+ let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
+ BackgroundScanner::new(
+ snapshot,
+ next_entry_id,
+ fs,
+ scan_states_tx,
+ background,
+ scan_requests_rx,
+ path_prefixes_to_scan_rx,
+ )
+ .run(events)
+ .await;
+ }
+ });
+ let scan_state_updater = cx.spawn(|this, mut cx| async move {
+ while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade()) {
+ this.update(&mut cx, |this, cx| {
+ let this = this.as_local_mut().unwrap();
+ match state {
+ ScanState::Started => {
+ *this.is_scanning.0.borrow_mut() = true;
+ }
+ ScanState::Updated {
+ snapshot,
+ changes,
+ barrier,
+ scanning,
+ } => {
+ *this.is_scanning.0.borrow_mut() = scanning;
+ this.set_snapshot(snapshot, changes, cx);
+ drop(barrier);
+ }
+ }
+ cx.notify();
+ })
+ .ok();
+ }
+ });
+ vec![background_scanner, scan_state_updater]
+}
+
+fn file_scan_exclusions(project_settings: &ProjectSettings) -> Vec<PathMatcher> {
+ project_settings.file_scan_exclusions.as_deref().unwrap_or(&[]).iter()
+ .sorted()
+ .filter_map(|pattern| {
+ PathMatcher::new(pattern)
+ .map(Some)
+ .unwrap_or_else(|e| {
+ log::error!(
+ "Skipping pattern {pattern} in `file_scan_exclusions` project settings due to parsing error: {e:#}"
+ );
+ None
+ })
+ })
+ .collect()
+}
+
impl LocalWorktree {
pub fn contains_abs_path(&self, path: &Path) -> bool {
path.starts_with(&self.abs_path)
@@ -1482,7 +1560,7 @@ impl Snapshot {
self.entries_by_id.get(&entry_id, &()).is_some()
}
- pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
+ fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
let entry = Entry::try_from((&self.root_char_bag, entry))?;
let old_entry = self.entries_by_id.insert_or_replace(
PathEntry {
@@ -2143,6 +2221,12 @@ impl LocalSnapshot {
paths.sort_by(|a, b| a.0.cmp(b.0));
paths
}
+
+ fn is_abs_path_excluded(&self, abs_path: &Path) -> bool {
+ self.file_scan_exclusions
+ .iter()
+ .any(|exclude_matcher| exclude_matcher.is_match(abs_path))
+ }
}
impl BackgroundScannerState {
@@ -2165,7 +2249,7 @@ impl BackgroundScannerState {
let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true);
let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path);
let mut containing_repository = None;
- if !ignore_stack.is_all() {
+ if !ignore_stack.is_abs_path_ignored(&abs_path, true) {
if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) {
if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) {
containing_repository = Some((
@@ -2376,18 +2460,30 @@ impl BackgroundScannerState {
// Remove any git repositories whose .git entry no longer exists.
let snapshot = &mut self.snapshot;
- let mut repositories = mem::take(&mut snapshot.git_repositories);
- let mut repository_entries = mem::take(&mut snapshot.repository_entries);
- repositories.retain(|work_directory_id, _| {
- snapshot
- .entry_for_id(*work_directory_id)
+ let mut ids_to_preserve = HashSet::default();
+ for (&work_directory_id, entry) in snapshot.git_repositories.iter() {
+ let exists_in_snapshot = snapshot
+ .entry_for_id(work_directory_id)
.map_or(false, |entry| {
snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some()
- })
- });
- repository_entries.retain(|_, entry| repositories.get(&entry.work_directory.0).is_some());
- snapshot.git_repositories = repositories;
- snapshot.repository_entries = repository_entries;
+ });
+ if exists_in_snapshot {
+ ids_to_preserve.insert(work_directory_id);
+ } else {
+ let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
+ if snapshot.is_abs_path_excluded(&git_dir_abs_path)
+ && !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
+ {
+ ids_to_preserve.insert(work_directory_id);
+ }
+ }
+ }
+ snapshot
+ .git_repositories
+ .retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id));
+ snapshot
+ .repository_entries
+ .retain(|_, entry| ids_to_preserve.contains(&entry.work_directory.0));
}
fn build_git_repository(
@@ -3085,7 +3181,7 @@ impl BackgroundScanner {
let ignore_stack = state
.snapshot
.ignore_stack_for_abs_path(&root_abs_path, true);
- if ignore_stack.is_all() {
+ if ignore_stack.is_abs_path_ignored(&root_abs_path, true) {
root_entry.is_ignored = true;
state.insert_entry(root_entry.clone(), self.fs.as_ref());
}
@@ -3222,14 +3318,22 @@ impl BackgroundScanner {
return false;
};
- let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
- snapshot
- .entry_for_path(parent)
- .map_or(false, |entry| entry.kind == EntryKind::Dir)
- });
- if !parent_dir_is_loaded {
- log::debug!("ignoring event {relative_path:?} within unloaded directory");
- return false;
+ if !is_git_related(&abs_path) {
+ let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
+ snapshot
+ .entry_for_path(parent)
+ .map_or(false, |entry| entry.kind == EntryKind::Dir)
+ });
+ if !parent_dir_is_loaded {
+ log::debug!("ignoring event {relative_path:?} within unloaded directory");
+ return false;
+ }
+ if snapshot.is_abs_path_excluded(abs_path) {
+ log::debug!(
+ "ignoring FS event for path {relative_path:?} within excluded directory"
+ );
+ return false;
+ }
}
relative_paths.push(relative_path);
@@ -3392,18 +3496,26 @@ impl BackgroundScanner {
}
async fn scan_dir(&self, job: &ScanJob) -> Result<()> {
- log::debug!("scan directory {:?}", job.path);
-
- let mut ignore_stack = job.ignore_stack.clone();
- let mut new_ignore = None;
- let (root_abs_path, root_char_bag, next_entry_id) = {
- let snapshot = &self.state.lock().snapshot;
- (
- snapshot.abs_path().clone(),
- snapshot.root_char_bag,
- self.next_entry_id.clone(),
- )
- };
+ let root_abs_path;
+ let mut ignore_stack;
+ let mut new_ignore;
+ let root_char_bag;
+ let next_entry_id;
+ {
+ let state = self.state.lock();
+ let snapshot = &state.snapshot;
+ root_abs_path = snapshot.abs_path().clone();
+ if snapshot.is_abs_path_excluded(&job.abs_path) {
+ log::error!("skipping excluded directory {:?}", job.path);
+ return Ok(());
+ }
+ log::debug!("scanning directory {:?}", job.path);
+ ignore_stack = job.ignore_stack.clone();
+ new_ignore = None;
+ root_char_bag = snapshot.root_char_bag;
+ next_entry_id = self.next_entry_id.clone();
+ drop(state);
+ }
let mut dotgit_path = None;
let mut root_canonical_path = None;
@@ -3418,18 +3530,8 @@ impl BackgroundScanner {
continue;
}
};
-
let child_name = child_abs_path.file_name().unwrap();
let child_path: Arc<Path> = job.path.join(child_name).into();
- let child_metadata = match self.fs.metadata(&child_abs_path).await {
- Ok(Some(metadata)) => metadata,
- Ok(None) => continue,
- Err(err) => {
- log::error!("error processing {:?}: {:?}", child_abs_path, err);
- continue;
- }
- };
-
// If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
if child_name == *GITIGNORE {
match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
@@ -3473,6 +3575,26 @@ impl BackgroundScanner {
dotgit_path = Some(child_path.clone());
}
+ {
+ let mut state = self.state.lock();
+ if state.snapshot.is_abs_path_excluded(&child_abs_path) {
+ let relative_path = job.path.join(child_name);
+ log::debug!("skipping excluded child entry {relative_path:?}");
+ state.remove_path(&relative_path);
+ continue;
+ }
+ drop(state);
+ }
+
+ let child_metadata = match self.fs.metadata(&child_abs_path).await {
+ Ok(Some(metadata)) => metadata,
+ Ok(None) => continue,
+ Err(err) => {
+ log::error!("error processing {child_abs_path:?}: {err:?}");
+ continue;
+ }
+ };
+
let mut child_entry = Entry::new(
child_path.clone(),
&child_metadata,
@@ -3653,19 +3775,16 @@ impl BackgroundScanner {
self.next_entry_id.as_ref(),
state.snapshot.root_char_bag,
);
- fs_entry.is_ignored = ignore_stack.is_all();
+ let is_dir = fs_entry.is_dir();
+ fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir);
fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path);
- if !fs_entry.is_ignored {
- if !fs_entry.is_dir() {
- if let Some((work_dir, repo)) =
- state.snapshot.local_repo_for_path(&path)
- {
- if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
- let repo_path = RepoPath(repo_path.into());
- let repo = repo.repo_ptr.lock();
- fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime);
- }
+ if !is_dir && !fs_entry.is_ignored {
+ if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) {
+ if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
+ let repo_path = RepoPath(repo_path.into());
+ let repo = repo.repo_ptr.lock();
+ fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime);
}
}
}
@@ -3824,8 +3943,7 @@ impl BackgroundScanner {
ignore_stack.clone()
};
- // Scan any directories that were previously ignored and weren't
- // previously scanned.
+ // Scan any directories that were previously ignored and weren't previously scanned.
if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() {
let state = self.state.lock();
if state.should_scan_directory(&entry) {
@@ -4001,6 +4119,12 @@ impl BackgroundScanner {
}
}
+fn is_git_related(abs_path: &Path) -> bool {
+ abs_path
+ .components()
+ .any(|c| c.as_os_str() == *DOT_GIT || c.as_os_str() == *GITIGNORE)
+}
+
fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
let mut result = root_char_bag;
result.extend(
@@ -1,2141 +1,2310 @@
-// use crate::{
-// worktree::{Event, Snapshot, WorktreeModelHandle},
-// Entry, EntryKind, PathChange, Worktree,
-// };
-// use anyhow::Result;
-// use client2::Client;
-// use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
-// use git::GITIGNORE;
-// use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext};
-// use parking_lot::Mutex;
-// use postage::stream::Stream;
-// use pretty_assertions::assert_eq;
-// use rand::prelude::*;
-// use serde_json::json;
-// use std::{
-// env,
-// fmt::Write,
-// mem,
-// path::{Path, PathBuf},
-// sync::Arc,
-// };
-// use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
-
-// #[gpui::test]
-// async fn test_traversal(cx: &mut TestAppContext) {
-// let fs = FakeFs::new(cx.background());
-// fs.insert_tree(
-// "/root",
-// json!({
-// ".gitignore": "a/b\n",
-// "a": {
-// "b": "",
-// "c": "",
-// }
-// }),
-// )
-// .await;
-
-// let tree = Worktree::local(
-// build_client(cx),
-// Path::new("/root"),
-// true,
-// fs,
-// Default::default(),
-// &mut cx.to_async(),
-// )
-// .await
-// .unwrap();
-// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-// .await;
-
-// tree.read_with(cx, |tree, _| {
-// assert_eq!(
-// tree.entries(false)
-// .map(|entry| entry.path.as_ref())
-// .collect::<Vec<_>>(),
-// vec![
-// Path::new(""),
-// Path::new(".gitignore"),
-// Path::new("a"),
-// Path::new("a/c"),
-// ]
-// );
-// assert_eq!(
-// tree.entries(true)
-// .map(|entry| entry.path.as_ref())
-// .collect::<Vec<_>>(),
-// vec![
-// Path::new(""),
-// Path::new(".gitignore"),
-// Path::new("a"),
-// Path::new("a/b"),
-// Path::new("a/c"),
-// ]
-// );
-// })
-// }
-
-// #[gpui::test]
-// async fn test_descendent_entries(cx: &mut TestAppContext) {
-// let fs = FakeFs::new(cx.background());
-// fs.insert_tree(
-// "/root",
-// json!({
-// "a": "",
-// "b": {
-// "c": {
-// "d": ""
-// },
-// "e": {}
-// },
-// "f": "",
-// "g": {
-// "h": {}
-// },
-// "i": {
-// "j": {
-// "k": ""
-// },
-// "l": {
-
-// }
-// },
-// ".gitignore": "i/j\n",
-// }),
-// )
-// .await;
-
-// let tree = Worktree::local(
-// build_client(cx),
-// Path::new("/root"),
-// true,
-// fs,
-// Default::default(),
-// &mut cx.to_async(),
-// )
-// .await
-// .unwrap();
-// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-// .await;
-
-// tree.read_with(cx, |tree, _| {
-// assert_eq!(
-// tree.descendent_entries(false, false, Path::new("b"))
-// .map(|entry| entry.path.as_ref())
-// .collect::<Vec<_>>(),
-// vec![Path::new("b/c/d"),]
-// );
-// assert_eq!(
-// tree.descendent_entries(true, false, Path::new("b"))
-// .map(|entry| entry.path.as_ref())
-// .collect::<Vec<_>>(),
-// vec![
-// Path::new("b"),
-// Path::new("b/c"),
-// Path::new("b/c/d"),
-// Path::new("b/e"),
-// ]
-// );
-
-// assert_eq!(
-// tree.descendent_entries(false, false, Path::new("g"))
-// .map(|entry| entry.path.as_ref())
-// .collect::<Vec<_>>(),
-// Vec::<PathBuf>::new()
-// );
-// assert_eq!(
-// tree.descendent_entries(true, false, Path::new("g"))
-// .map(|entry| entry.path.as_ref())
-// .collect::<Vec<_>>(),
-// vec![Path::new("g"), Path::new("g/h"),]
-// );
-// });
-
-// // Expand gitignored directory.
-// tree.read_with(cx, |tree, _| {
-// tree.as_local()
-// .unwrap()
-// .refresh_entries_for_paths(vec![Path::new("i/j").into()])
-// })
-// .recv()
-// .await;
-
-// tree.read_with(cx, |tree, _| {
-// assert_eq!(
-// tree.descendent_entries(false, false, Path::new("i"))
-// .map(|entry| entry.path.as_ref())
-// .collect::<Vec<_>>(),
-// Vec::<PathBuf>::new()
-// );
-// assert_eq!(
-// tree.descendent_entries(false, true, Path::new("i"))
-// .map(|entry| entry.path.as_ref())
-// .collect::<Vec<_>>(),
-// vec![Path::new("i/j/k")]
-// );
-// assert_eq!(
-// tree.descendent_entries(true, false, Path::new("i"))
-// .map(|entry| entry.path.as_ref())
-// .collect::<Vec<_>>(),
-// vec![Path::new("i"), Path::new("i/l"),]
-// );
-// })
-// }
-
-// #[gpui::test(iterations = 10)]
-// async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
-// let fs = FakeFs::new(cx.background());
-// fs.insert_tree(
-// "/root",
-// json!({
-// "lib": {
-// "a": {
-// "a.txt": ""
-// },
-// "b": {
-// "b.txt": ""
-// }
-// }
-// }),
-// )
-// .await;
-// fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
-// fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
-
-// let tree = Worktree::local(
-// build_client(cx),
-// Path::new("/root"),
-// true,
-// fs.clone(),
-// Default::default(),
-// &mut cx.to_async(),
-// )
-// .await
-// .unwrap();
-
-// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-// .await;
-
-// tree.read_with(cx, |tree, _| {
-// assert_eq!(
-// tree.entries(false)
-// .map(|entry| entry.path.as_ref())
-// .collect::<Vec<_>>(),
-// vec![
-// Path::new(""),
-// Path::new("lib"),
-// Path::new("lib/a"),
-// Path::new("lib/a/a.txt"),
-// Path::new("lib/a/lib"),
-// Path::new("lib/b"),
-// Path::new("lib/b/b.txt"),
-// Path::new("lib/b/lib"),
-// ]
-// );
-// });
-
-// fs.rename(
-// Path::new("/root/lib/a/lib"),
-// Path::new("/root/lib/a/lib-2"),
-// Default::default(),
-// )
-// .await
-// .unwrap();
-// executor.run_until_parked();
-// tree.read_with(cx, |tree, _| {
-// assert_eq!(
-// tree.entries(false)
-// .map(|entry| entry.path.as_ref())
-// .collect::<Vec<_>>(),
-// vec![
-// Path::new(""),
-// Path::new("lib"),
-// Path::new("lib/a"),
-// Path::new("lib/a/a.txt"),
-// Path::new("lib/a/lib-2"),
-// Path::new("lib/b"),
-// Path::new("lib/b/b.txt"),
-// Path::new("lib/b/lib"),
-// ]
-// );
-// });
-// }
-
-// #[gpui::test]
-// async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
-// let fs = FakeFs::new(cx.background());
-// fs.insert_tree(
-// "/root",
-// json!({
-// "dir1": {
-// "deps": {
-// // symlinks here
-// },
-// "src": {
-// "a.rs": "",
-// "b.rs": "",
-// },
-// },
-// "dir2": {
-// "src": {
-// "c.rs": "",
-// "d.rs": "",
-// }
-// },
-// "dir3": {
-// "deps": {},
-// "src": {
-// "e.rs": "",
-// "f.rs": "",
-// },
-// }
-// }),
-// )
-// .await;
-
-// // These symlinks point to directories outside of the worktree's root, dir1.
-// fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
-// .await;
-// fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
-// .await;
-
-// let tree = Worktree::local(
-// build_client(cx),
-// Path::new("/root/dir1"),
-// true,
-// fs.clone(),
-// Default::default(),
-// &mut cx.to_async(),
-// )
-// .await
-// .unwrap();
-
-// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-// .await;
-
-// let tree_updates = Arc::new(Mutex::new(Vec::new()));
-// tree.update(cx, |_, cx| {
-// let tree_updates = tree_updates.clone();
-// cx.subscribe(&tree, move |_, _, event, _| {
-// if let Event::UpdatedEntries(update) = event {
-// tree_updates.lock().extend(
-// update
-// .iter()
-// .map(|(path, _, change)| (path.clone(), *change)),
-// );
-// }
-// })
-// .detach();
-// });
-
-// // The symlinked directories are not scanned by default.
-// tree.read_with(cx, |tree, _| {
-// assert_eq!(
-// tree.entries(true)
-// .map(|entry| (entry.path.as_ref(), entry.is_external))
-// .collect::<Vec<_>>(),
-// vec![
-// (Path::new(""), false),
-// (Path::new("deps"), false),
-// (Path::new("deps/dep-dir2"), true),
-// (Path::new("deps/dep-dir3"), true),
-// (Path::new("src"), false),
-// (Path::new("src/a.rs"), false),
-// (Path::new("src/b.rs"), false),
-// ]
-// );
-
-// assert_eq!(
-// tree.entry_for_path("deps/dep-dir2").unwrap().kind,
-// EntryKind::UnloadedDir
-// );
-// });
-
-// // Expand one of the symlinked directories.
-// tree.read_with(cx, |tree, _| {
-// tree.as_local()
-// .unwrap()
-// .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
-// })
-// .recv()
-// .await;
-
-// // The expanded directory's contents are loaded. Subdirectories are
-// // not scanned yet.
-// tree.read_with(cx, |tree, _| {
-// assert_eq!(
-// tree.entries(true)
-// .map(|entry| (entry.path.as_ref(), entry.is_external))
-// .collect::<Vec<_>>(),
-// vec![
-// (Path::new(""), false),
-// (Path::new("deps"), false),
-// (Path::new("deps/dep-dir2"), true),
-// (Path::new("deps/dep-dir3"), true),
-// (Path::new("deps/dep-dir3/deps"), true),
-// (Path::new("deps/dep-dir3/src"), true),
-// (Path::new("src"), false),
-// (Path::new("src/a.rs"), false),
-// (Path::new("src/b.rs"), false),
-// ]
-// );
-// });
-// assert_eq!(
-// mem::take(&mut *tree_updates.lock()),
-// &[
-// (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
-// (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
-// (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
-// ]
-// );
-
-// // Expand a subdirectory of one of the symlinked directories.
-// tree.read_with(cx, |tree, _| {
-// tree.as_local()
-// .unwrap()
-// .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
-// })
-// .recv()
-// .await;
-
-// // The expanded subdirectory's contents are loaded.
-// tree.read_with(cx, |tree, _| {
-// assert_eq!(
-// tree.entries(true)
-// .map(|entry| (entry.path.as_ref(), entry.is_external))
-// .collect::<Vec<_>>(),
-// vec![
-// (Path::new(""), false),
-// (Path::new("deps"), false),
-// (Path::new("deps/dep-dir2"), true),
-// (Path::new("deps/dep-dir3"), true),
-// (Path::new("deps/dep-dir3/deps"), true),
-// (Path::new("deps/dep-dir3/src"), true),
-// (Path::new("deps/dep-dir3/src/e.rs"), true),
-// (Path::new("deps/dep-dir3/src/f.rs"), true),
-// (Path::new("src"), false),
-// (Path::new("src/a.rs"), false),
-// (Path::new("src/b.rs"), false),
-// ]
-// );
-// });
-
-// assert_eq!(
-// mem::take(&mut *tree_updates.lock()),
-// &[
-// (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
-// (
-// Path::new("deps/dep-dir3/src/e.rs").into(),
-// PathChange::Loaded
-// ),
-// (
-// Path::new("deps/dep-dir3/src/f.rs").into(),
-// PathChange::Loaded
-// )
-// ]
-// );
-// }
-
-// #[gpui::test]
-// async fn test_open_gitignored_files(cx: &mut TestAppContext) {
-// let fs = FakeFs::new(cx.background());
-// fs.insert_tree(
-// "/root",
-// json!({
-// ".gitignore": "node_modules\n",
-// "one": {
-// "node_modules": {
-// "a": {
-// "a1.js": "a1",
-// "a2.js": "a2",
-// },
-// "b": {
-// "b1.js": "b1",
-// "b2.js": "b2",
-// },
-// "c": {
-// "c1.js": "c1",
-// "c2.js": "c2",
-// }
-// },
-// },
-// "two": {
-// "x.js": "",
-// "y.js": "",
-// },
-// }),
-// )
-// .await;
-
-// let tree = Worktree::local(
-// build_client(cx),
-// Path::new("/root"),
-// true,
-// fs.clone(),
-// Default::default(),
-// &mut cx.to_async(),
-// )
-// .await
-// .unwrap();
-
-// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-// .await;
-
-// tree.read_with(cx, |tree, _| {
-// assert_eq!(
-// tree.entries(true)
-// .map(|entry| (entry.path.as_ref(), entry.is_ignored))
-// .collect::<Vec<_>>(),
-// vec![
-// (Path::new(""), false),
-// (Path::new(".gitignore"), false),
-// (Path::new("one"), false),
-// (Path::new("one/node_modules"), true),
-// (Path::new("two"), false),
-// (Path::new("two/x.js"), false),
-// (Path::new("two/y.js"), false),
-// ]
-// );
-// });
-
-// // Open a file that is nested inside of a gitignored directory that
-// // has not yet been expanded.
-// let prev_read_dir_count = fs.read_dir_call_count();
-// let buffer = tree
-// .update(cx, |tree, cx| {
-// tree.as_local_mut()
-// .unwrap()
-// .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx)
-// })
-// .await
-// .unwrap();
-
-// tree.read_with(cx, |tree, cx| {
-// assert_eq!(
-// tree.entries(true)
-// .map(|entry| (entry.path.as_ref(), entry.is_ignored))
-// .collect::<Vec<_>>(),
-// vec![
-// (Path::new(""), false),
-// (Path::new(".gitignore"), false),
-// (Path::new("one"), false),
-// (Path::new("one/node_modules"), true),
-// (Path::new("one/node_modules/a"), true),
-// (Path::new("one/node_modules/b"), true),
-// (Path::new("one/node_modules/b/b1.js"), true),
-// (Path::new("one/node_modules/b/b2.js"), true),
-// (Path::new("one/node_modules/c"), true),
-// (Path::new("two"), false),
-// (Path::new("two/x.js"), false),
-// (Path::new("two/y.js"), false),
-// ]
-// );
-
-// assert_eq!(
-// buffer.read(cx).file().unwrap().path().as_ref(),
-// Path::new("one/node_modules/b/b1.js")
-// );
-
-// // Only the newly-expanded directories are scanned.
-// assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
-// });
-
-// // Open another file in a different subdirectory of the same
-// // gitignored directory.
-// let prev_read_dir_count = fs.read_dir_call_count();
-// let buffer = tree
-// .update(cx, |tree, cx| {
-// tree.as_local_mut()
-// .unwrap()
-// .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx)
-// })
-// .await
-// .unwrap();
-
-// tree.read_with(cx, |tree, cx| {
-// assert_eq!(
-// tree.entries(true)
-// .map(|entry| (entry.path.as_ref(), entry.is_ignored))
-// .collect::<Vec<_>>(),
-// vec![
-// (Path::new(""), false),
-// (Path::new(".gitignore"), false),
-// (Path::new("one"), false),
-// (Path::new("one/node_modules"), true),
-// (Path::new("one/node_modules/a"), true),
-// (Path::new("one/node_modules/a/a1.js"), true),
-// (Path::new("one/node_modules/a/a2.js"), true),
-// (Path::new("one/node_modules/b"), true),
-// (Path::new("one/node_modules/b/b1.js"), true),
-// (Path::new("one/node_modules/b/b2.js"), true),
-// (Path::new("one/node_modules/c"), true),
-// (Path::new("two"), false),
-// (Path::new("two/x.js"), false),
-// (Path::new("two/y.js"), false),
-// ]
-// );
-
-// assert_eq!(
-// buffer.read(cx).file().unwrap().path().as_ref(),
-// Path::new("one/node_modules/a/a2.js")
-// );
-
-// // Only the newly-expanded directory is scanned.
-// assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
-// });
-
-// // No work happens when files and directories change within an unloaded directory.
-// let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
-// fs.create_dir("/root/one/node_modules/c/lib".as_ref())
-// .await
-// .unwrap();
-// cx.foreground().run_until_parked();
-// assert_eq!(
-// fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
-// 0
-// );
-// }
-
-// #[gpui::test]
-// async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
-// let fs = FakeFs::new(cx.background());
-// fs.insert_tree(
-// "/root",
-// json!({
-// ".gitignore": "node_modules\n",
-// "a": {
-// "a.js": "",
-// },
-// "b": {
-// "b.js": "",
-// },
-// "node_modules": {
-// "c": {
-// "c.js": "",
-// },
-// "d": {
-// "d.js": "",
-// "e": {
-// "e1.js": "",
-// "e2.js": "",
-// },
-// "f": {
-// "f1.js": "",
-// "f2.js": "",
-// }
-// },
-// },
-// }),
-// )
-// .await;
-
-// let tree = Worktree::local(
-// build_client(cx),
-// Path::new("/root"),
-// true,
-// fs.clone(),
-// Default::default(),
-// &mut cx.to_async(),
-// )
-// .await
-// .unwrap();
-
-// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-// .await;
-
-// // Open a file within the gitignored directory, forcing some of its
-// // subdirectories to be read, but not all.
-// let read_dir_count_1 = fs.read_dir_call_count();
-// tree.read_with(cx, |tree, _| {
-// tree.as_local()
-// .unwrap()
-// .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
-// })
-// .recv()
-// .await;
-
-// // Those subdirectories are now loaded.
-// tree.read_with(cx, |tree, _| {
-// assert_eq!(
-// tree.entries(true)
-// .map(|e| (e.path.as_ref(), e.is_ignored))
-// .collect::<Vec<_>>(),
-// &[
-// (Path::new(""), false),
-// (Path::new(".gitignore"), false),
-// (Path::new("a"), false),
-// (Path::new("a/a.js"), false),
-// (Path::new("b"), false),
-// (Path::new("b/b.js"), false),
-// (Path::new("node_modules"), true),
-// (Path::new("node_modules/c"), true),
-// (Path::new("node_modules/d"), true),
-// (Path::new("node_modules/d/d.js"), true),
-// (Path::new("node_modules/d/e"), true),
-// (Path::new("node_modules/d/f"), true),
-// ]
-// );
-// });
-// let read_dir_count_2 = fs.read_dir_call_count();
-// assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
-
-// // Update the gitignore so that node_modules is no longer ignored,
-// // but a subdirectory is ignored
-// fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
-// .await
-// .unwrap();
-// cx.foreground().run_until_parked();
-
-// // All of the directories that are no longer ignored are now loaded.
-// tree.read_with(cx, |tree, _| {
-// assert_eq!(
-// tree.entries(true)
-// .map(|e| (e.path.as_ref(), e.is_ignored))
-// .collect::<Vec<_>>(),
-// &[
-// (Path::new(""), false),
-// (Path::new(".gitignore"), false),
-// (Path::new("a"), false),
-// (Path::new("a/a.js"), false),
-// (Path::new("b"), false),
-// (Path::new("b/b.js"), false),
-// // This directory is no longer ignored
-// (Path::new("node_modules"), false),
-// (Path::new("node_modules/c"), false),
-// (Path::new("node_modules/c/c.js"), false),
-// (Path::new("node_modules/d"), false),
-// (Path::new("node_modules/d/d.js"), false),
-// // This subdirectory is now ignored
-// (Path::new("node_modules/d/e"), true),
-// (Path::new("node_modules/d/f"), false),
-// (Path::new("node_modules/d/f/f1.js"), false),
-// (Path::new("node_modules/d/f/f2.js"), false),
-// ]
-// );
-// });
-
-// // Each of the newly-loaded directories is scanned only once.
-// let read_dir_count_3 = fs.read_dir_call_count();
-// assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
-// }
-
-// #[gpui::test(iterations = 10)]
-// async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
-// let fs = FakeFs::new(cx.background());
-// fs.insert_tree(
-// "/root",
-// json!({
-// ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
-// "tree": {
-// ".git": {},
-// ".gitignore": "ignored-dir\n",
-// "tracked-dir": {
-// "tracked-file1": "",
-// "ancestor-ignored-file1": "",
-// },
-// "ignored-dir": {
-// "ignored-file1": ""
-// }
-// }
-// }),
-// )
-// .await;
-
-// let tree = Worktree::local(
-// build_client(cx),
-// "/root/tree".as_ref(),
-// true,
-// fs.clone(),
-// Default::default(),
-// &mut cx.to_async(),
-// )
-// .await
-// .unwrap();
-// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-// .await;
-
-// tree.read_with(cx, |tree, _| {
-// tree.as_local()
-// .unwrap()
-// .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
-// })
-// .recv()
-// .await;
-
-// cx.read(|cx| {
-// let tree = tree.read(cx);
-// assert!(
-// !tree
-// .entry_for_path("tracked-dir/tracked-file1")
-// .unwrap()
-// .is_ignored
-// );
-// assert!(
-// tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
-// .unwrap()
-// .is_ignored
-// );
-// assert!(
-// tree.entry_for_path("ignored-dir/ignored-file1")
-// .unwrap()
-// .is_ignored
-// );
-// });
-
-// fs.create_file(
-// "/root/tree/tracked-dir/tracked-file2".as_ref(),
-// Default::default(),
-// )
-// .await
-// .unwrap();
-// fs.create_file(
-// "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
-// Default::default(),
-// )
-// .await
-// .unwrap();
-// fs.create_file(
-// "/root/tree/ignored-dir/ignored-file2".as_ref(),
-// Default::default(),
-// )
-// .await
-// .unwrap();
-
-// cx.foreground().run_until_parked();
-// cx.read(|cx| {
-// let tree = tree.read(cx);
-// assert!(
-// !tree
-// .entry_for_path("tracked-dir/tracked-file2")
-// .unwrap()
-// .is_ignored
-// );
-// assert!(
-// tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
-// .unwrap()
-// .is_ignored
-// );
-// assert!(
-// tree.entry_for_path("ignored-dir/ignored-file2")
-// .unwrap()
-// .is_ignored
-// );
-// assert!(tree.entry_for_path(".git").unwrap().is_ignored);
-// });
-// }
-
-// #[gpui::test]
-// async fn test_write_file(cx: &mut TestAppContext) {
-// let dir = temp_tree(json!({
-// ".git": {},
-// ".gitignore": "ignored-dir\n",
-// "tracked-dir": {},
-// "ignored-dir": {}
-// }));
-
-// let tree = Worktree::local(
-// build_client(cx),
-// dir.path(),
-// true,
-// Arc::new(RealFs),
-// Default::default(),
-// &mut cx.to_async(),
-// )
-// .await
-// .unwrap();
-// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-// .await;
-// tree.flush_fs_events(cx).await;
-
-// tree.update(cx, |tree, cx| {
-// tree.as_local().unwrap().write_file(
-// Path::new("tracked-dir/file.txt"),
-// "hello".into(),
-// Default::default(),
-// cx,
-// )
-// })
-// .await
-// .unwrap();
-// tree.update(cx, |tree, cx| {
-// tree.as_local().unwrap().write_file(
-// Path::new("ignored-dir/file.txt"),
-// "world".into(),
-// Default::default(),
-// cx,
-// )
-// })
-// .await
-// .unwrap();
-
-// tree.read_with(cx, |tree, _| {
-// let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
-// let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
-// assert!(!tracked.is_ignored);
-// assert!(ignored.is_ignored);
-// });
-// }
-
-// #[gpui::test(iterations = 30)]
-// async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
-// let fs = FakeFs::new(cx.background());
-// fs.insert_tree(
-// "/root",
-// json!({
-// "b": {},
-// "c": {},
-// "d": {},
-// }),
-// )
-// .await;
-
-// let tree = Worktree::local(
-// build_client(cx),
-// "/root".as_ref(),
-// true,
-// fs,
-// Default::default(),
-// &mut cx.to_async(),
-// )
-// .await
-// .unwrap();
-
-// let snapshot1 = tree.update(cx, |tree, cx| {
-// let tree = tree.as_local_mut().unwrap();
-// let snapshot = Arc::new(Mutex::new(tree.snapshot()));
-// let _ = tree.observe_updates(0, cx, {
-// let snapshot = snapshot.clone();
-// move |update| {
-// snapshot.lock().apply_remote_update(update).unwrap();
-// async { true }
-// }
-// });
-// snapshot
-// });
-
-// let entry = tree
-// .update(cx, |tree, cx| {
-// tree.as_local_mut()
-// .unwrap()
-// .create_entry("a/e".as_ref(), true, cx)
-// })
-// .await
-// .unwrap();
-// assert!(entry.is_dir());
-
-// cx.foreground().run_until_parked();
-// tree.read_with(cx, |tree, _| {
-// assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
-// });
-
-// let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
-// assert_eq!(
-// snapshot1.lock().entries(true).collect::<Vec<_>>(),
-// snapshot2.entries(true).collect::<Vec<_>>()
-// );
-// }
-
-// #[gpui::test]
-// async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
-// let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
-
-// let fs_fake = FakeFs::new(cx.background());
-// fs_fake
-// .insert_tree(
-// "/root",
-// json!({
-// "a": {},
-// }),
-// )
-// .await;
-
-// let tree_fake = Worktree::local(
-// client_fake,
-// "/root".as_ref(),
-// true,
-// fs_fake,
-// Default::default(),
-// &mut cx.to_async(),
-// )
-// .await
-// .unwrap();
-
-// let entry = tree_fake
-// .update(cx, |tree, cx| {
-// tree.as_local_mut()
-// .unwrap()
-// .create_entry("a/b/c/d.txt".as_ref(), false, cx)
-// })
-// .await
-// .unwrap();
-// assert!(entry.is_file());
-
-// cx.foreground().run_until_parked();
-// tree_fake.read_with(cx, |tree, _| {
-// assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
-// assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
-// assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
-// });
-
-// let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
-
-// let fs_real = Arc::new(RealFs);
-// let temp_root = temp_tree(json!({
-// "a": {}
-// }));
-
-// let tree_real = Worktree::local(
-// client_real,
-// temp_root.path(),
-// true,
-// fs_real,
-// Default::default(),
-// &mut cx.to_async(),
-// )
-// .await
-// .unwrap();
-
-// let entry = tree_real
-// .update(cx, |tree, cx| {
-// tree.as_local_mut()
-// .unwrap()
-// .create_entry("a/b/c/d.txt".as_ref(), false, cx)
-// })
-// .await
-// .unwrap();
-// assert!(entry.is_file());
-
-// cx.foreground().run_until_parked();
-// tree_real.read_with(cx, |tree, _| {
-// assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
-// assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
-// assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
-// });
-
-// // Test smallest change
-// let entry = tree_real
-// .update(cx, |tree, cx| {
-// tree.as_local_mut()
-// .unwrap()
-// .create_entry("a/b/c/e.txt".as_ref(), false, cx)
-// })
-// .await
-// .unwrap();
-// assert!(entry.is_file());
-
-// cx.foreground().run_until_parked();
-// tree_real.read_with(cx, |tree, _| {
-// assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
-// });
-
-// // Test largest change
-// let entry = tree_real
-// .update(cx, |tree, cx| {
-// tree.as_local_mut()
-// .unwrap()
-// .create_entry("d/e/f/g.txt".as_ref(), false, cx)
-// })
-// .await
-// .unwrap();
-// assert!(entry.is_file());
-
-// cx.foreground().run_until_parked();
-// tree_real.read_with(cx, |tree, _| {
-// assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
-// assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
-// assert!(tree.entry_for_path("d/e/").unwrap().is_dir());
-// assert!(tree.entry_for_path("d/").unwrap().is_dir());
-// });
-// }
-
-// #[gpui::test(iterations = 100)]
-// async fn test_random_worktree_operations_during_initial_scan(
-// cx: &mut TestAppContext,
-// mut rng: StdRng,
-// ) {
-// let operations = env::var("OPERATIONS")
-// .map(|o| o.parse().unwrap())
-// .unwrap_or(5);
-// let initial_entries = env::var("INITIAL_ENTRIES")
-// .map(|o| o.parse().unwrap())
-// .unwrap_or(20);
-
-// let root_dir = Path::new("/test");
-// let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
-// fs.as_fake().insert_tree(root_dir, json!({})).await;
-// for _ in 0..initial_entries {
-// randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
-// }
-// log::info!("generated initial tree");
-
-// let worktree = Worktree::local(
-// build_client(cx),
-// root_dir,
-// true,
-// fs.clone(),
-// Default::default(),
-// &mut cx.to_async(),
-// )
-// .await
-// .unwrap();
-
-// let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
-// let updates = Arc::new(Mutex::new(Vec::new()));
-// worktree.update(cx, |tree, cx| {
-// check_worktree_change_events(tree, cx);
-
-// let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
-// let updates = updates.clone();
-// move |update| {
-// updates.lock().push(update);
-// async { true }
-// }
-// });
-// });
-
-// for _ in 0..operations {
-// worktree
-// .update(cx, |worktree, cx| {
-// randomly_mutate_worktree(worktree, &mut rng, cx)
-// })
-// .await
-// .log_err();
-// worktree.read_with(cx, |tree, _| {
-// tree.as_local().unwrap().snapshot().check_invariants(true)
-// });
-
-// if rng.gen_bool(0.6) {
-// snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
-// }
-// }
-
-// worktree
-// .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
-// .await;
-
-// cx.foreground().run_until_parked();
-
-// let final_snapshot = worktree.read_with(cx, |tree, _| {
-// let tree = tree.as_local().unwrap();
-// let snapshot = tree.snapshot();
-// snapshot.check_invariants(true);
-// snapshot
-// });
-
-// for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
-// let mut updated_snapshot = snapshot.clone();
-// for update in updates.lock().iter() {
-// if update.scan_id >= updated_snapshot.scan_id() as u64 {
-// updated_snapshot
-// .apply_remote_update(update.clone())
-// .unwrap();
-// }
-// }
-
-// assert_eq!(
-// updated_snapshot.entries(true).collect::<Vec<_>>(),
-// final_snapshot.entries(true).collect::<Vec<_>>(),
-// "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
-// );
-// }
-// }
-
-// #[gpui::test(iterations = 100)]
-// async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
-// let operations = env::var("OPERATIONS")
-// .map(|o| o.parse().unwrap())
-// .unwrap_or(40);
-// let initial_entries = env::var("INITIAL_ENTRIES")
-// .map(|o| o.parse().unwrap())
-// .unwrap_or(20);
-
-// let root_dir = Path::new("/test");
-// let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
-// fs.as_fake().insert_tree(root_dir, json!({})).await;
-// for _ in 0..initial_entries {
-// randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
-// }
-// log::info!("generated initial tree");
-
-// let worktree = Worktree::local(
-// build_client(cx),
-// root_dir,
-// true,
-// fs.clone(),
-// Default::default(),
-// &mut cx.to_async(),
-// )
-// .await
-// .unwrap();
-
-// let updates = Arc::new(Mutex::new(Vec::new()));
-// worktree.update(cx, |tree, cx| {
-// check_worktree_change_events(tree, cx);
-
-// let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
-// let updates = updates.clone();
-// move |update| {
-// updates.lock().push(update);
-// async { true }
-// }
-// });
-// });
-
-// worktree
-// .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
-// .await;
-
-// fs.as_fake().pause_events();
-// let mut snapshots = Vec::new();
-// let mut mutations_len = operations;
-// while mutations_len > 1 {
-// if rng.gen_bool(0.2) {
-// worktree
-// .update(cx, |worktree, cx| {
-// randomly_mutate_worktree(worktree, &mut rng, cx)
-// })
-// .await
-// .log_err();
-// } else {
-// randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
-// }
-
-// let buffered_event_count = fs.as_fake().buffered_event_count();
-// if buffered_event_count > 0 && rng.gen_bool(0.3) {
-// let len = rng.gen_range(0..=buffered_event_count);
-// log::info!("flushing {} events", len);
-// fs.as_fake().flush_events(len);
-// } else {
-// randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
-// mutations_len -= 1;
-// }
-
-// cx.foreground().run_until_parked();
-// if rng.gen_bool(0.2) {
-// log::info!("storing snapshot {}", snapshots.len());
-// let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
-// snapshots.push(snapshot);
-// }
-// }
-
-// log::info!("quiescing");
-// fs.as_fake().flush_events(usize::MAX);
-// cx.foreground().run_until_parked();
-
-// let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
-// snapshot.check_invariants(true);
-// let expanded_paths = snapshot
-// .expanded_entries()
-// .map(|e| e.path.clone())
-// .collect::<Vec<_>>();
-
-// {
-// let new_worktree = Worktree::local(
-// build_client(cx),
-// root_dir,
-// true,
-// fs.clone(),
-// Default::default(),
-// &mut cx.to_async(),
-// )
-// .await
-// .unwrap();
-// new_worktree
-// .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
-// .await;
-// new_worktree
-// .update(cx, |tree, _| {
-// tree.as_local_mut()
-// .unwrap()
-// .refresh_entries_for_paths(expanded_paths)
-// })
-// .recv()
-// .await;
-// let new_snapshot =
-// new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
-// assert_eq!(
-// snapshot.entries_without_ids(true),
-// new_snapshot.entries_without_ids(true)
-// );
-// }
-
-// for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
-// for update in updates.lock().iter() {
-// if update.scan_id >= prev_snapshot.scan_id() as u64 {
-// prev_snapshot.apply_remote_update(update.clone()).unwrap();
-// }
-// }
-
-// assert_eq!(
-// prev_snapshot
-// .entries(true)
-// .map(ignore_pending_dir)
-// .collect::<Vec<_>>(),
-// snapshot
-// .entries(true)
-// .map(ignore_pending_dir)
-// .collect::<Vec<_>>(),
-// "wrong updates after snapshot {i}: {updates:#?}",
-// );
-// }
-
-// fn ignore_pending_dir(entry: &Entry) -> Entry {
-// let mut entry = entry.clone();
-// if entry.kind.is_dir() {
-// entry.kind = EntryKind::Dir
-// }
-// entry
-// }
-// }
-
-// // The worktree's `UpdatedEntries` event can be used to follow along with
-// // all changes to the worktree's snapshot.
-// fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
-// let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
-// cx.subscribe(&cx.handle(), move |tree, _, event, _| {
-// if let Event::UpdatedEntries(changes) = event {
-// for (path, _, change_type) in changes.iter() {
-// let entry = tree.entry_for_path(&path).cloned();
-// let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
-// Ok(ix) | Err(ix) => ix,
-// };
-// match change_type {
-// PathChange::Added => entries.insert(ix, entry.unwrap()),
-// PathChange::Removed => drop(entries.remove(ix)),
-// PathChange::Updated => {
-// let entry = entry.unwrap();
-// let existing_entry = entries.get_mut(ix).unwrap();
-// assert_eq!(existing_entry.path, entry.path);
-// *existing_entry = entry;
-// }
-// PathChange::AddedOrUpdated | PathChange::Loaded => {
-// let entry = entry.unwrap();
-// if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
-// *entries.get_mut(ix).unwrap() = entry;
-// } else {
-// entries.insert(ix, entry);
-// }
-// }
-// }
-// }
-
-// let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
-// assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
-// }
-// })
-// .detach();
-// }
-
-// fn randomly_mutate_worktree(
-// worktree: &mut Worktree,
-// rng: &mut impl Rng,
-// cx: &mut ModelContext<Worktree>,
-// ) -> Task<Result<()>> {
-// log::info!("mutating worktree");
-// let worktree = worktree.as_local_mut().unwrap();
-// let snapshot = worktree.snapshot();
-// let entry = snapshot.entries(false).choose(rng).unwrap();
-
-// match rng.gen_range(0_u32..100) {
-// 0..=33 if entry.path.as_ref() != Path::new("") => {
-// log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
-// worktree.delete_entry(entry.id, cx).unwrap()
-// }
-// ..=66 if entry.path.as_ref() != Path::new("") => {
-// let other_entry = snapshot.entries(false).choose(rng).unwrap();
-// let new_parent_path = if other_entry.is_dir() {
-// other_entry.path.clone()
-// } else {
-// other_entry.path.parent().unwrap().into()
-// };
-// let mut new_path = new_parent_path.join(random_filename(rng));
-// if new_path.starts_with(&entry.path) {
-// new_path = random_filename(rng).into();
-// }
-
-// log::info!(
-// "renaming entry {:?} ({}) to {:?}",
-// entry.path,
-// entry.id.0,
-// new_path
-// );
-// let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
-// cx.foreground().spawn(async move {
-// task.await?;
-// Ok(())
-// })
-// }
-// _ => {
-// let task = if entry.is_dir() {
-// let child_path = entry.path.join(random_filename(rng));
-// let is_dir = rng.gen_bool(0.3);
-// log::info!(
-// "creating {} at {:?}",
-// if is_dir { "dir" } else { "file" },
-// child_path,
-// );
-// worktree.create_entry(child_path, is_dir, cx)
-// } else {
-// log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
-// worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
-// };
-// cx.foreground().spawn(async move {
-// task.await?;
-// Ok(())
-// })
-// }
-// }
-// }
-
-// async fn randomly_mutate_fs(
-// fs: &Arc<dyn Fs>,
-// root_path: &Path,
-// insertion_probability: f64,
-// rng: &mut impl Rng,
-// ) {
-// log::info!("mutating fs");
-// let mut files = Vec::new();
-// let mut dirs = Vec::new();
-// for path in fs.as_fake().paths(false) {
-// if path.starts_with(root_path) {
-// if fs.is_file(&path).await {
-// files.push(path);
-// } else {
-// dirs.push(path);
-// }
-// }
-// }
-
-// if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
-// let path = dirs.choose(rng).unwrap();
-// let new_path = path.join(random_filename(rng));
-
-// if rng.gen() {
-// log::info!(
-// "creating dir {:?}",
-// new_path.strip_prefix(root_path).unwrap()
-// );
-// fs.create_dir(&new_path).await.unwrap();
-// } else {
-// log::info!(
-// "creating file {:?}",
-// new_path.strip_prefix(root_path).unwrap()
-// );
-// fs.create_file(&new_path, Default::default()).await.unwrap();
-// }
-// } else if rng.gen_bool(0.05) {
-// let ignore_dir_path = dirs.choose(rng).unwrap();
-// let ignore_path = ignore_dir_path.join(&*GITIGNORE);
-
-// let subdirs = dirs
-// .iter()
-// .filter(|d| d.starts_with(&ignore_dir_path))
-// .cloned()
-// .collect::<Vec<_>>();
-// let subfiles = files
-// .iter()
-// .filter(|d| d.starts_with(&ignore_dir_path))
-// .cloned()
-// .collect::<Vec<_>>();
-// let files_to_ignore = {
-// let len = rng.gen_range(0..=subfiles.len());
-// subfiles.choose_multiple(rng, len)
-// };
-// let dirs_to_ignore = {
-// let len = rng.gen_range(0..subdirs.len());
-// subdirs.choose_multiple(rng, len)
-// };
-
-// let mut ignore_contents = String::new();
-// for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
-// writeln!(
-// ignore_contents,
-// "{}",
-// path_to_ignore
-// .strip_prefix(&ignore_dir_path)
-// .unwrap()
-// .to_str()
-// .unwrap()
-// )
-// .unwrap();
-// }
-// log::info!(
-// "creating gitignore {:?} with contents:\n{}",
-// ignore_path.strip_prefix(&root_path).unwrap(),
-// ignore_contents
-// );
-// fs.save(
-// &ignore_path,
-// &ignore_contents.as_str().into(),
-// Default::default(),
-// )
-// .await
-// .unwrap();
-// } else {
-// let old_path = {
-// let file_path = files.choose(rng);
-// let dir_path = dirs[1..].choose(rng);
-// file_path.into_iter().chain(dir_path).choose(rng).unwrap()
-// };
-
-// let is_rename = rng.gen();
-// if is_rename {
-// let new_path_parent = dirs
-// .iter()
-// .filter(|d| !d.starts_with(old_path))
-// .choose(rng)
-// .unwrap();
-
-// let overwrite_existing_dir =
-// !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
-// let new_path = if overwrite_existing_dir {
-// fs.remove_dir(
-// &new_path_parent,
-// RemoveOptions {
-// recursive: true,
-// ignore_if_not_exists: true,
-// },
-// )
-// .await
-// .unwrap();
-// new_path_parent.to_path_buf()
-// } else {
-// new_path_parent.join(random_filename(rng))
-// };
-
-// log::info!(
-// "renaming {:?} to {}{:?}",
-// old_path.strip_prefix(&root_path).unwrap(),
-// if overwrite_existing_dir {
-// "overwrite "
-// } else {
-// ""
-// },
-// new_path.strip_prefix(&root_path).unwrap()
-// );
-// fs.rename(
-// &old_path,
-// &new_path,
-// fs::RenameOptions {
-// overwrite: true,
-// ignore_if_exists: true,
-// },
-// )
-// .await
-// .unwrap();
-// } else if fs.is_file(&old_path).await {
-// log::info!(
-// "deleting file {:?}",
-// old_path.strip_prefix(&root_path).unwrap()
-// );
-// fs.remove_file(old_path, Default::default()).await.unwrap();
-// } else {
-// log::info!(
-// "deleting dir {:?}",
-// old_path.strip_prefix(&root_path).unwrap()
-// );
-// fs.remove_dir(
-// &old_path,
-// RemoveOptions {
-// recursive: true,
-// ignore_if_not_exists: true,
-// },
-// )
-// .await
-// .unwrap();
-// }
-// }
-// }
-
-// fn random_filename(rng: &mut impl Rng) -> String {
-// (0..6)
-// .map(|_| rng.sample(rand::distributions::Alphanumeric))
-// .map(char::from)
-// .collect()
-// }
-
-// #[gpui::test]
-// async fn test_rename_work_directory(cx: &mut TestAppContext) {
-// let root = temp_tree(json!({
-// "projects": {
-// "project1": {
-// "a": "",
-// "b": "",
-// }
-// },
-
-// }));
-// let root_path = root.path();
-
-// let tree = Worktree::local(
-// build_client(cx),
-// root_path,
-// true,
-// Arc::new(RealFs),
-// Default::default(),
-// &mut cx.to_async(),
-// )
-// .await
-// .unwrap();
-
-// let repo = git_init(&root_path.join("projects/project1"));
-// git_add("a", &repo);
-// git_commit("init", &repo);
-// std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
-
-// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-// .await;
-
-// tree.flush_fs_events(cx).await;
-
-// cx.read(|cx| {
-// let tree = tree.read(cx);
-// let (work_dir, _) = tree.repositories().next().unwrap();
-// assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
-// assert_eq!(
-// tree.status_for_file(Path::new("projects/project1/a")),
-// Some(GitFileStatus::Modified)
-// );
-// assert_eq!(
-// tree.status_for_file(Path::new("projects/project1/b")),
-// Some(GitFileStatus::Added)
-// );
-// });
-
-// std::fs::rename(
-// root_path.join("projects/project1"),
-// root_path.join("projects/project2"),
-// )
-// .ok();
-// tree.flush_fs_events(cx).await;
-
-// cx.read(|cx| {
-// let tree = tree.read(cx);
-// let (work_dir, _) = tree.repositories().next().unwrap();
-// assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
-// assert_eq!(
-// tree.status_for_file(Path::new("projects/project2/a")),
-// Some(GitFileStatus::Modified)
-// );
-// assert_eq!(
-// tree.status_for_file(Path::new("projects/project2/b")),
-// Some(GitFileStatus::Added)
-// );
-// });
-// }
-
-// #[gpui::test]
-// async fn test_git_repository_for_path(cx: &mut TestAppContext) {
-// let root = temp_tree(json!({
-// "c.txt": "",
-// "dir1": {
-// ".git": {},
-// "deps": {
-// "dep1": {
-// ".git": {},
-// "src": {
-// "a.txt": ""
-// }
-// }
-// },
-// "src": {
-// "b.txt": ""
-// }
-// },
-// }));
-
-// let tree = Worktree::local(
-// build_client(cx),
-// root.path(),
-// true,
-// Arc::new(RealFs),
-// Default::default(),
-// &mut cx.to_async(),
-// )
-// .await
-// .unwrap();
-
-// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-// .await;
-// tree.flush_fs_events(cx).await;
-
-// tree.read_with(cx, |tree, _cx| {
-// let tree = tree.as_local().unwrap();
-
-// assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
-
-// let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
-// assert_eq!(
-// entry
-// .work_directory(tree)
-// .map(|directory| directory.as_ref().to_owned()),
-// Some(Path::new("dir1").to_owned())
-// );
-
-// let entry = tree
-// .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
-// .unwrap();
-// assert_eq!(
-// entry
-// .work_directory(tree)
-// .map(|directory| directory.as_ref().to_owned()),
-// Some(Path::new("dir1/deps/dep1").to_owned())
-// );
-
-// let entries = tree.files(false, 0);
-
-// let paths_with_repos = tree
-// .entries_with_repositories(entries)
-// .map(|(entry, repo)| {
-// (
-// entry.path.as_ref(),
-// repo.and_then(|repo| {
-// repo.work_directory(&tree)
-// .map(|work_directory| work_directory.0.to_path_buf())
-// }),
-// )
-// })
-// .collect::<Vec<_>>();
-
-// assert_eq!(
-// paths_with_repos,
-// &[
-// (Path::new("c.txt"), None),
-// (
-// Path::new("dir1/deps/dep1/src/a.txt"),
-// Some(Path::new("dir1/deps/dep1").into())
-// ),
-// (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
-// ]
-// );
-// });
-
-// let repo_update_events = Arc::new(Mutex::new(vec![]));
-// tree.update(cx, |_, cx| {
-// let repo_update_events = repo_update_events.clone();
-// cx.subscribe(&tree, move |_, _, event, _| {
-// if let Event::UpdatedGitRepositories(update) = event {
-// repo_update_events.lock().push(update.clone());
-// }
-// })
-// .detach();
-// });
-
-// std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
-// tree.flush_fs_events(cx).await;
-
-// assert_eq!(
-// repo_update_events.lock()[0]
-// .iter()
-// .map(|e| e.0.clone())
-// .collect::<Vec<Arc<Path>>>(),
-// vec![Path::new("dir1").into()]
-// );
-
-// std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
-// tree.flush_fs_events(cx).await;
-
-// tree.read_with(cx, |tree, _cx| {
-// let tree = tree.as_local().unwrap();
-
-// assert!(tree
-// .repository_for_path("dir1/src/b.txt".as_ref())
-// .is_none());
-// });
-// }
-
-// #[gpui::test]
-// async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
-// const IGNORE_RULE: &'static str = "**/target";
-
-// let root = temp_tree(json!({
-// "project": {
-// "a.txt": "a",
-// "b.txt": "bb",
-// "c": {
-// "d": {
-// "e.txt": "eee"
-// }
-// },
-// "f.txt": "ffff",
-// "target": {
-// "build_file": "???"
-// },
-// ".gitignore": IGNORE_RULE
-// },
-
-// }));
-
-// const A_TXT: &'static str = "a.txt";
-// const B_TXT: &'static str = "b.txt";
-// const E_TXT: &'static str = "c/d/e.txt";
-// const F_TXT: &'static str = "f.txt";
-// const DOTGITIGNORE: &'static str = ".gitignore";
-// const BUILD_FILE: &'static str = "target/build_file";
-// let project_path = Path::new("project");
-
-// // Set up git repository before creating the worktree.
-// let work_dir = root.path().join("project");
-// let mut repo = git_init(work_dir.as_path());
-// repo.add_ignore_rule(IGNORE_RULE).unwrap();
-// git_add(A_TXT, &repo);
-// git_add(E_TXT, &repo);
-// git_add(DOTGITIGNORE, &repo);
-// git_commit("Initial commit", &repo);
-
-// let tree = Worktree::local(
-// build_client(cx),
-// root.path(),
-// true,
-// Arc::new(RealFs),
-// Default::default(),
-// &mut cx.to_async(),
-// )
-// .await
-// .unwrap();
-
-// tree.flush_fs_events(cx).await;
-// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-// .await;
-// deterministic.run_until_parked();
-
-// // Check that the right git state is observed on startup
-// tree.read_with(cx, |tree, _cx| {
-// let snapshot = tree.snapshot();
-// assert_eq!(snapshot.repositories().count(), 1);
-// let (dir, _) = snapshot.repositories().next().unwrap();
-// assert_eq!(dir.as_ref(), Path::new("project"));
-
-// assert_eq!(
-// snapshot.status_for_file(project_path.join(B_TXT)),
-// Some(GitFileStatus::Added)
-// );
-// assert_eq!(
-// snapshot.status_for_file(project_path.join(F_TXT)),
-// Some(GitFileStatus::Added)
-// );
-// });
-
-// // Modify a file in the working copy.
-// std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
-// tree.flush_fs_events(cx).await;
-// deterministic.run_until_parked();
-
-// // The worktree detects that the file's git status has changed.
-// tree.read_with(cx, |tree, _cx| {
-// let snapshot = tree.snapshot();
-// assert_eq!(
-// snapshot.status_for_file(project_path.join(A_TXT)),
-// Some(GitFileStatus::Modified)
-// );
-// });
-
-// // Create a commit in the git repository.
-// git_add(A_TXT, &repo);
-// git_add(B_TXT, &repo);
-// git_commit("Committing modified and added", &repo);
-// tree.flush_fs_events(cx).await;
-// deterministic.run_until_parked();
-
-// // The worktree detects that the files' git status have changed.
-// tree.read_with(cx, |tree, _cx| {
-// let snapshot = tree.snapshot();
-// assert_eq!(
-// snapshot.status_for_file(project_path.join(F_TXT)),
-// Some(GitFileStatus::Added)
-// );
-// assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
-// assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
-// });
-
-// // Modify files in the working copy and perform git operations on other files.
-// git_reset(0, &repo);
-// git_remove_index(Path::new(B_TXT), &repo);
-// git_stash(&mut repo);
-// std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
-// std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
-// tree.flush_fs_events(cx).await;
-// deterministic.run_until_parked();
-
-// // Check that more complex repo changes are tracked
-// tree.read_with(cx, |tree, _cx| {
-// let snapshot = tree.snapshot();
-
-// assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
-// assert_eq!(
-// snapshot.status_for_file(project_path.join(B_TXT)),
-// Some(GitFileStatus::Added)
-// );
-// assert_eq!(
-// snapshot.status_for_file(project_path.join(E_TXT)),
-// Some(GitFileStatus::Modified)
-// );
-// });
-
-// std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
-// std::fs::remove_dir_all(work_dir.join("c")).unwrap();
-// std::fs::write(
-// work_dir.join(DOTGITIGNORE),
-// [IGNORE_RULE, "f.txt"].join("\n"),
-// )
-// .unwrap();
-
-// git_add(Path::new(DOTGITIGNORE), &repo);
-// git_commit("Committing modified git ignore", &repo);
-
-// tree.flush_fs_events(cx).await;
-// deterministic.run_until_parked();
-
-// let mut renamed_dir_name = "first_directory/second_directory";
-// const RENAMED_FILE: &'static str = "rf.txt";
-
-// std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
-// std::fs::write(
-// work_dir.join(renamed_dir_name).join(RENAMED_FILE),
-// "new-contents",
-// )
-// .unwrap();
-
-// tree.flush_fs_events(cx).await;
-// deterministic.run_until_parked();
-
-// tree.read_with(cx, |tree, _cx| {
-// let snapshot = tree.snapshot();
-// assert_eq!(
-// snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
-// Some(GitFileStatus::Added)
-// );
-// });
-
-// renamed_dir_name = "new_first_directory/second_directory";
-
-// std::fs::rename(
-// work_dir.join("first_directory"),
-// work_dir.join("new_first_directory"),
-// )
-// .unwrap();
-
-// tree.flush_fs_events(cx).await;
-// deterministic.run_until_parked();
-
-// tree.read_with(cx, |tree, _cx| {
-// let snapshot = tree.snapshot();
-
-// assert_eq!(
-// snapshot.status_for_file(
-// project_path
-// .join(Path::new(renamed_dir_name))
-// .join(RENAMED_FILE)
-// ),
-// Some(GitFileStatus::Added)
-// );
-// });
-// }
-
-// #[gpui::test]
-// async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
-// let fs = FakeFs::new(cx.background());
-// fs.insert_tree(
-// "/root",
-// json!({
-// ".git": {},
-// "a": {
-// "b": {
-// "c1.txt": "",
-// "c2.txt": "",
-// },
-// "d": {
-// "e1.txt": "",
-// "e2.txt": "",
-// "e3.txt": "",
-// }
-// },
-// "f": {
-// "no-status.txt": ""
-// },
-// "g": {
-// "h1.txt": "",
-// "h2.txt": ""
-// },
-
-// }),
-// )
-// .await;
-
-// fs.set_status_for_repo_via_git_operation(
-// &Path::new("/root/.git"),
-// &[
-// (Path::new("a/b/c1.txt"), GitFileStatus::Added),
-// (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
-// (Path::new("g/h2.txt"), GitFileStatus::Conflict),
-// ],
-// );
-
-// let tree = Worktree::local(
-// build_client(cx),
-// Path::new("/root"),
-// true,
-// fs.clone(),
-// Default::default(),
-// &mut cx.to_async(),
-// )
-// .await
-// .unwrap();
-
-// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-// .await;
-
-// cx.foreground().run_until_parked();
-// let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
-
-// check_propagated_statuses(
-// &snapshot,
-// &[
-// (Path::new(""), Some(GitFileStatus::Conflict)),
-// (Path::new("a"), Some(GitFileStatus::Modified)),
-// (Path::new("a/b"), Some(GitFileStatus::Added)),
-// (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
-// (Path::new("a/b/c2.txt"), None),
-// (Path::new("a/d"), Some(GitFileStatus::Modified)),
-// (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
-// (Path::new("f"), None),
-// (Path::new("f/no-status.txt"), None),
-// (Path::new("g"), Some(GitFileStatus::Conflict)),
-// (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
-// ],
-// );
-
-// check_propagated_statuses(
-// &snapshot,
-// &[
-// (Path::new("a/b"), Some(GitFileStatus::Added)),
-// (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
-// (Path::new("a/b/c2.txt"), None),
-// (Path::new("a/d"), Some(GitFileStatus::Modified)),
-// (Path::new("a/d/e1.txt"), None),
-// (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
-// (Path::new("f"), None),
-// (Path::new("f/no-status.txt"), None),
-// (Path::new("g"), Some(GitFileStatus::Conflict)),
-// ],
-// );
-
-// check_propagated_statuses(
-// &snapshot,
-// &[
-// (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
-// (Path::new("a/b/c2.txt"), None),
-// (Path::new("a/d/e1.txt"), None),
-// (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
-// (Path::new("f/no-status.txt"), None),
-// ],
-// );
-
-// #[track_caller]
-// fn check_propagated_statuses(
-// snapshot: &Snapshot,
-// expected_statuses: &[(&Path, Option<GitFileStatus>)],
-// ) {
-// let mut entries = expected_statuses
-// .iter()
-// .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
-// .collect::<Vec<_>>();
-// snapshot.propagate_git_statuses(&mut entries);
-// assert_eq!(
-// entries
-// .iter()
-// .map(|e| (e.path.as_ref(), e.git_status))
-// .collect::<Vec<_>>(),
-// expected_statuses
-// );
-// }
-// }
-
-// fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
-// let http_client = FakeHttpClient::with_404_response();
-// cx.read(|cx| Client::new(http_client, cx))
-// }
-
-// #[track_caller]
-// fn git_init(path: &Path) -> git2::Repository {
-// git2::Repository::init(path).expect("Failed to initialize git repository")
-// }
-
-// #[track_caller]
-// fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
-// let path = path.as_ref();
-// let mut index = repo.index().expect("Failed to get index");
-// index.add_path(path).expect("Failed to add a.txt");
-// index.write().expect("Failed to write index");
-// }
-
-// #[track_caller]
-// fn git_remove_index(path: &Path, repo: &git2::Repository) {
-// let mut index = repo.index().expect("Failed to get index");
-// index.remove_path(path).expect("Failed to add a.txt");
-// index.write().expect("Failed to write index");
-// }
-
-// #[track_caller]
-// fn git_commit(msg: &'static str, repo: &git2::Repository) {
-// use git2::Signature;
-
-// let signature = Signature::now("test", "test@zed.dev").unwrap();
-// let oid = repo.index().unwrap().write_tree().unwrap();
-// let tree = repo.find_tree(oid).unwrap();
-// if let Some(head) = repo.head().ok() {
-// let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
-
-// let parent_commit = parent_obj.as_commit().unwrap();
-
-// repo.commit(
-// Some("HEAD"),
-// &signature,
-// &signature,
-// msg,
-// &tree,
-// &[parent_commit],
-// )
-// .expect("Failed to commit with parent");
-// } else {
-// repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
-// .expect("Failed to commit");
-// }
-// }
-
-// #[track_caller]
-// fn git_stash(repo: &mut git2::Repository) {
-// use git2::Signature;
-
-// let signature = Signature::now("test", "test@zed.dev").unwrap();
-// repo.stash_save(&signature, "N/A", None)
-// .expect("Failed to stash");
-// }
-
-// #[track_caller]
-// fn git_reset(offset: usize, repo: &git2::Repository) {
-// let head = repo.head().expect("Couldn't get repo head");
-// let object = head.peel(git2::ObjectType::Commit).unwrap();
-// let commit = object.as_commit().unwrap();
-// let new_head = commit
-// .parents()
-// .inspect(|parnet| {
-// parnet.message();
-// })
-// .skip(offset)
-// .next()
-// .expect("Not enough history");
-// repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
-// .expect("Could not reset");
-// }
-
-// #[allow(dead_code)]
-// #[track_caller]
-// fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
-// repo.statuses(None)
-// .unwrap()
-// .iter()
-// .map(|status| (status.path().unwrap().to_string(), status.status()))
-// .collect()
-// }
+use crate::{
+ project_settings::ProjectSettings,
+ worktree::{Event, Snapshot, WorktreeModelHandle},
+ Entry, EntryKind, PathChange, Project, Worktree,
+};
+use anyhow::Result;
+use client::Client;
+use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
+use git::GITIGNORE;
+use gpui::{ModelContext, Task, TestAppContext};
+use parking_lot::Mutex;
+use postage::stream::Stream;
+use pretty_assertions::assert_eq;
+use rand::prelude::*;
+use serde_json::json;
+use settings::SettingsStore;
+use std::{
+ env,
+ fmt::Write,
+ mem,
+ path::{Path, PathBuf},
+ sync::Arc,
+};
+use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
+
+#[gpui::test]
+async fn test_traversal(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ "/root",
+ json!({
+ ".gitignore": "a/b\n",
+ "a": {
+ "b": "",
+ "c": "",
+ }
+ }),
+ )
+ .await;
+
+ let tree = Worktree::local(
+ build_client(cx),
+ Path::new("/root"),
+ true,
+ fs,
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+
+ tree.read_with(cx, |tree, _| {
+ assert_eq!(
+ tree.entries(false)
+ .map(|entry| entry.path.as_ref())
+ .collect::<Vec<_>>(),
+ vec![
+ Path::new(""),
+ Path::new(".gitignore"),
+ Path::new("a"),
+ Path::new("a/c"),
+ ]
+ );
+ assert_eq!(
+ tree.entries(true)
+ .map(|entry| entry.path.as_ref())
+ .collect::<Vec<_>>(),
+ vec![
+ Path::new(""),
+ Path::new(".gitignore"),
+ Path::new("a"),
+ Path::new("a/b"),
+ Path::new("a/c"),
+ ]
+ );
+ })
+}
+
+#[gpui::test]
+async fn test_descendent_entries(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "a": "",
+ "b": {
+ "c": {
+ "d": ""
+ },
+ "e": {}
+ },
+ "f": "",
+ "g": {
+ "h": {}
+ },
+ "i": {
+ "j": {
+ "k": ""
+ },
+ "l": {
+
+ }
+ },
+ ".gitignore": "i/j\n",
+ }),
+ )
+ .await;
+
+ let tree = Worktree::local(
+ build_client(cx),
+ Path::new("/root"),
+ true,
+ fs,
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+
+ tree.read_with(cx, |tree, _| {
+ assert_eq!(
+ tree.descendent_entries(false, false, Path::new("b"))
+ .map(|entry| entry.path.as_ref())
+ .collect::<Vec<_>>(),
+ vec![Path::new("b/c/d"),]
+ );
+ assert_eq!(
+ tree.descendent_entries(true, false, Path::new("b"))
+ .map(|entry| entry.path.as_ref())
+ .collect::<Vec<_>>(),
+ vec![
+ Path::new("b"),
+ Path::new("b/c"),
+ Path::new("b/c/d"),
+ Path::new("b/e"),
+ ]
+ );
+
+ assert_eq!(
+ tree.descendent_entries(false, false, Path::new("g"))
+ .map(|entry| entry.path.as_ref())
+ .collect::<Vec<_>>(),
+ Vec::<PathBuf>::new()
+ );
+ assert_eq!(
+ tree.descendent_entries(true, false, Path::new("g"))
+ .map(|entry| entry.path.as_ref())
+ .collect::<Vec<_>>(),
+ vec![Path::new("g"), Path::new("g/h"),]
+ );
+ });
+
+ // Expand gitignored directory.
+ tree.read_with(cx, |tree, _| {
+ tree.as_local()
+ .unwrap()
+ .refresh_entries_for_paths(vec![Path::new("i/j").into()])
+ })
+ .recv()
+ .await;
+
+ tree.read_with(cx, |tree, _| {
+ assert_eq!(
+ tree.descendent_entries(false, false, Path::new("i"))
+ .map(|entry| entry.path.as_ref())
+ .collect::<Vec<_>>(),
+ Vec::<PathBuf>::new()
+ );
+ assert_eq!(
+ tree.descendent_entries(false, true, Path::new("i"))
+ .map(|entry| entry.path.as_ref())
+ .collect::<Vec<_>>(),
+ vec![Path::new("i/j/k")]
+ );
+ assert_eq!(
+ tree.descendent_entries(true, false, Path::new("i"))
+ .map(|entry| entry.path.as_ref())
+ .collect::<Vec<_>>(),
+ vec![Path::new("i"), Path::new("i/l"),]
+ );
+ })
+}
+
+#[gpui::test(iterations = 10)]
+async fn test_circular_symlinks(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "lib": {
+ "a": {
+ "a.txt": ""
+ },
+ "b": {
+ "b.txt": ""
+ }
+ }
+ }),
+ )
+ .await;
+ fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
+ fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
+
+ let tree = Worktree::local(
+ build_client(cx),
+ Path::new("/root"),
+ true,
+ fs.clone(),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+
+ tree.read_with(cx, |tree, _| {
+ assert_eq!(
+ tree.entries(false)
+ .map(|entry| entry.path.as_ref())
+ .collect::<Vec<_>>(),
+ vec![
+ Path::new(""),
+ Path::new("lib"),
+ Path::new("lib/a"),
+ Path::new("lib/a/a.txt"),
+ Path::new("lib/a/lib"),
+ Path::new("lib/b"),
+ Path::new("lib/b/b.txt"),
+ Path::new("lib/b/lib"),
+ ]
+ );
+ });
+
+ fs.rename(
+ Path::new("/root/lib/a/lib"),
+ Path::new("/root/lib/a/lib-2"),
+ Default::default(),
+ )
+ .await
+ .unwrap();
+ cx.executor().run_until_parked();
+ tree.read_with(cx, |tree, _| {
+ assert_eq!(
+ tree.entries(false)
+ .map(|entry| entry.path.as_ref())
+ .collect::<Vec<_>>(),
+ vec![
+ Path::new(""),
+ Path::new("lib"),
+ Path::new("lib/a"),
+ Path::new("lib/a/a.txt"),
+ Path::new("lib/a/lib-2"),
+ Path::new("lib/b"),
+ Path::new("lib/b/b.txt"),
+ Path::new("lib/b/lib"),
+ ]
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "dir1": {
+ "deps": {
+ // symlinks here
+ },
+ "src": {
+ "a.rs": "",
+ "b.rs": "",
+ },
+ },
+ "dir2": {
+ "src": {
+ "c.rs": "",
+ "d.rs": "",
+ }
+ },
+ "dir3": {
+ "deps": {},
+ "src": {
+ "e.rs": "",
+ "f.rs": "",
+ },
+ }
+ }),
+ )
+ .await;
+
+ // These symlinks point to directories outside of the worktree's root, dir1.
+ fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
+ .await;
+ fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
+ .await;
+
+ let tree = Worktree::local(
+ build_client(cx),
+ Path::new("/root/dir1"),
+ true,
+ fs.clone(),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+
+ let tree_updates = Arc::new(Mutex::new(Vec::new()));
+ tree.update(cx, |_, cx| {
+ let tree_updates = tree_updates.clone();
+ cx.subscribe(&tree, move |_, _, event, _| {
+ if let Event::UpdatedEntries(update) = event {
+ tree_updates.lock().extend(
+ update
+ .iter()
+ .map(|(path, _, change)| (path.clone(), *change)),
+ );
+ }
+ })
+ .detach();
+ });
+
+ // The symlinked directories are not scanned by default.
+ tree.read_with(cx, |tree, _| {
+ assert_eq!(
+ tree.entries(true)
+ .map(|entry| (entry.path.as_ref(), entry.is_external))
+ .collect::<Vec<_>>(),
+ vec![
+ (Path::new(""), false),
+ (Path::new("deps"), false),
+ (Path::new("deps/dep-dir2"), true),
+ (Path::new("deps/dep-dir3"), true),
+ (Path::new("src"), false),
+ (Path::new("src/a.rs"), false),
+ (Path::new("src/b.rs"), false),
+ ]
+ );
+
+ assert_eq!(
+ tree.entry_for_path("deps/dep-dir2").unwrap().kind,
+ EntryKind::UnloadedDir
+ );
+ });
+
+ // Expand one of the symlinked directories.
+ tree.read_with(cx, |tree, _| {
+ tree.as_local()
+ .unwrap()
+ .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
+ })
+ .recv()
+ .await;
+
+ // The expanded directory's contents are loaded. Subdirectories are
+ // not scanned yet.
+ tree.read_with(cx, |tree, _| {
+ assert_eq!(
+ tree.entries(true)
+ .map(|entry| (entry.path.as_ref(), entry.is_external))
+ .collect::<Vec<_>>(),
+ vec![
+ (Path::new(""), false),
+ (Path::new("deps"), false),
+ (Path::new("deps/dep-dir2"), true),
+ (Path::new("deps/dep-dir3"), true),
+ (Path::new("deps/dep-dir3/deps"), true),
+ (Path::new("deps/dep-dir3/src"), true),
+ (Path::new("src"), false),
+ (Path::new("src/a.rs"), false),
+ (Path::new("src/b.rs"), false),
+ ]
+ );
+ });
+ assert_eq!(
+ mem::take(&mut *tree_updates.lock()),
+ &[
+ (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
+ (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
+ (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
+ ]
+ );
+
+ // Expand a subdirectory of one of the symlinked directories.
+ tree.read_with(cx, |tree, _| {
+ tree.as_local()
+ .unwrap()
+ .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
+ })
+ .recv()
+ .await;
+
+ // The expanded subdirectory's contents are loaded.
+ tree.read_with(cx, |tree, _| {
+ assert_eq!(
+ tree.entries(true)
+ .map(|entry| (entry.path.as_ref(), entry.is_external))
+ .collect::<Vec<_>>(),
+ vec![
+ (Path::new(""), false),
+ (Path::new("deps"), false),
+ (Path::new("deps/dep-dir2"), true),
+ (Path::new("deps/dep-dir3"), true),
+ (Path::new("deps/dep-dir3/deps"), true),
+ (Path::new("deps/dep-dir3/src"), true),
+ (Path::new("deps/dep-dir3/src/e.rs"), true),
+ (Path::new("deps/dep-dir3/src/f.rs"), true),
+ (Path::new("src"), false),
+ (Path::new("src/a.rs"), false),
+ (Path::new("src/b.rs"), false),
+ ]
+ );
+ });
+
+ assert_eq!(
+ mem::take(&mut *tree_updates.lock()),
+ &[
+ (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
+ (
+ Path::new("deps/dep-dir3/src/e.rs").into(),
+ PathChange::Loaded
+ ),
+ (
+ Path::new("deps/dep-dir3/src/f.rs").into(),
+ PathChange::Loaded
+ )
+ ]
+ );
+}
+
+#[gpui::test]
+async fn test_open_gitignored_files(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ "/root",
+ json!({
+ ".gitignore": "node_modules\n",
+ "one": {
+ "node_modules": {
+ "a": {
+ "a1.js": "a1",
+ "a2.js": "a2",
+ },
+ "b": {
+ "b1.js": "b1",
+ "b2.js": "b2",
+ },
+ "c": {
+ "c1.js": "c1",
+ "c2.js": "c2",
+ }
+ },
+ },
+ "two": {
+ "x.js": "",
+ "y.js": "",
+ },
+ }),
+ )
+ .await;
+
+ let tree = Worktree::local(
+ build_client(cx),
+ Path::new("/root"),
+ true,
+ fs.clone(),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+
+ tree.read_with(cx, |tree, _| {
+ assert_eq!(
+ tree.entries(true)
+ .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+ .collect::<Vec<_>>(),
+ vec![
+ (Path::new(""), false),
+ (Path::new(".gitignore"), false),
+ (Path::new("one"), false),
+ (Path::new("one/node_modules"), true),
+ (Path::new("two"), false),
+ (Path::new("two/x.js"), false),
+ (Path::new("two/y.js"), false),
+ ]
+ );
+ });
+
+ // Open a file that is nested inside of a gitignored directory that
+ // has not yet been expanded.
+ let prev_read_dir_count = fs.read_dir_call_count();
+ let buffer = tree
+ .update(cx, |tree, cx| {
+ tree.as_local_mut()
+ .unwrap()
+ .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx)
+ })
+ .await
+ .unwrap();
+
+ tree.read_with(cx, |tree, cx| {
+ assert_eq!(
+ tree.entries(true)
+ .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+ .collect::<Vec<_>>(),
+ vec![
+ (Path::new(""), false),
+ (Path::new(".gitignore"), false),
+ (Path::new("one"), false),
+ (Path::new("one/node_modules"), true),
+ (Path::new("one/node_modules/a"), true),
+ (Path::new("one/node_modules/b"), true),
+ (Path::new("one/node_modules/b/b1.js"), true),
+ (Path::new("one/node_modules/b/b2.js"), true),
+ (Path::new("one/node_modules/c"), true),
+ (Path::new("two"), false),
+ (Path::new("two/x.js"), false),
+ (Path::new("two/y.js"), false),
+ ]
+ );
+
+ assert_eq!(
+ buffer.read(cx).file().unwrap().path().as_ref(),
+ Path::new("one/node_modules/b/b1.js")
+ );
+
+ // Only the newly-expanded directories are scanned.
+ assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
+ });
+
+ // Open another file in a different subdirectory of the same
+ // gitignored directory.
+ let prev_read_dir_count = fs.read_dir_call_count();
+ let buffer = tree
+ .update(cx, |tree, cx| {
+ tree.as_local_mut()
+ .unwrap()
+ .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx)
+ })
+ .await
+ .unwrap();
+
+ tree.read_with(cx, |tree, cx| {
+ assert_eq!(
+ tree.entries(true)
+ .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+ .collect::<Vec<_>>(),
+ vec![
+ (Path::new(""), false),
+ (Path::new(".gitignore"), false),
+ (Path::new("one"), false),
+ (Path::new("one/node_modules"), true),
+ (Path::new("one/node_modules/a"), true),
+ (Path::new("one/node_modules/a/a1.js"), true),
+ (Path::new("one/node_modules/a/a2.js"), true),
+ (Path::new("one/node_modules/b"), true),
+ (Path::new("one/node_modules/b/b1.js"), true),
+ (Path::new("one/node_modules/b/b2.js"), true),
+ (Path::new("one/node_modules/c"), true),
+ (Path::new("two"), false),
+ (Path::new("two/x.js"), false),
+ (Path::new("two/y.js"), false),
+ ]
+ );
+
+ assert_eq!(
+ buffer.read(cx).file().unwrap().path().as_ref(),
+ Path::new("one/node_modules/a/a2.js")
+ );
+
+ // Only the newly-expanded directory is scanned.
+ assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
+ });
+
+ // No work happens when files and directories change within an unloaded directory.
+ let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
+ fs.create_dir("/root/one/node_modules/c/lib".as_ref())
+ .await
+ .unwrap();
+ cx.executor().run_until_parked();
+ assert_eq!(
+ fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
+ 0
+ );
+}
+
+#[gpui::test]
+async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ "/root",
+ json!({
+ ".gitignore": "node_modules\n",
+ "a": {
+ "a.js": "",
+ },
+ "b": {
+ "b.js": "",
+ },
+ "node_modules": {
+ "c": {
+ "c.js": "",
+ },
+ "d": {
+ "d.js": "",
+ "e": {
+ "e1.js": "",
+ "e2.js": "",
+ },
+ "f": {
+ "f1.js": "",
+ "f2.js": "",
+ }
+ },
+ },
+ }),
+ )
+ .await;
+
+ let tree = Worktree::local(
+ build_client(cx),
+ Path::new("/root"),
+ true,
+ fs.clone(),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+
+ // Open a file within the gitignored directory, forcing some of its
+ // subdirectories to be read, but not all.
+ let read_dir_count_1 = fs.read_dir_call_count();
+ tree.read_with(cx, |tree, _| {
+ tree.as_local()
+ .unwrap()
+ .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
+ })
+ .recv()
+ .await;
+
+ // Those subdirectories are now loaded.
+ tree.read_with(cx, |tree, _| {
+ assert_eq!(
+ tree.entries(true)
+ .map(|e| (e.path.as_ref(), e.is_ignored))
+ .collect::<Vec<_>>(),
+ &[
+ (Path::new(""), false),
+ (Path::new(".gitignore"), false),
+ (Path::new("a"), false),
+ (Path::new("a/a.js"), false),
+ (Path::new("b"), false),
+ (Path::new("b/b.js"), false),
+ (Path::new("node_modules"), true),
+ (Path::new("node_modules/c"), true),
+ (Path::new("node_modules/d"), true),
+ (Path::new("node_modules/d/d.js"), true),
+ (Path::new("node_modules/d/e"), true),
+ (Path::new("node_modules/d/f"), true),
+ ]
+ );
+ });
+ let read_dir_count_2 = fs.read_dir_call_count();
+ assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
+
+ // Update the gitignore so that node_modules is no longer ignored,
+ // but a subdirectory is ignored
+ fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
+ .await
+ .unwrap();
+ cx.executor().run_until_parked();
+
+ // All of the directories that are no longer ignored are now loaded.
+ tree.read_with(cx, |tree, _| {
+ assert_eq!(
+ tree.entries(true)
+ .map(|e| (e.path.as_ref(), e.is_ignored))
+ .collect::<Vec<_>>(),
+ &[
+ (Path::new(""), false),
+ (Path::new(".gitignore"), false),
+ (Path::new("a"), false),
+ (Path::new("a/a.js"), false),
+ (Path::new("b"), false),
+ (Path::new("b/b.js"), false),
+ // This directory is no longer ignored
+ (Path::new("node_modules"), false),
+ (Path::new("node_modules/c"), false),
+ (Path::new("node_modules/c/c.js"), false),
+ (Path::new("node_modules/d"), false),
+ (Path::new("node_modules/d/d.js"), false),
+ // This subdirectory is now ignored
+ (Path::new("node_modules/d/e"), true),
+ (Path::new("node_modules/d/f"), false),
+ (Path::new("node_modules/d/f/f1.js"), false),
+ (Path::new("node_modules/d/f/f2.js"), false),
+ ]
+ );
+ });
+
+ // Each of the newly-loaded directories is scanned only once.
+ let read_dir_count_3 = fs.read_dir_call_count();
+ assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
+}
+
+#[gpui::test(iterations = 10)]
+async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
+ init_test(cx);
+ cx.update(|cx| {
+ cx.update_global::<SettingsStore, _>(|store, cx| {
+ store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+ project_settings.file_scan_exclusions = Some(Vec::new());
+ });
+ });
+ });
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ "/root",
+ json!({
+ ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
+ "tree": {
+ ".git": {},
+ ".gitignore": "ignored-dir\n",
+ "tracked-dir": {
+ "tracked-file1": "",
+ "ancestor-ignored-file1": "",
+ },
+ "ignored-dir": {
+ "ignored-file1": ""
+ }
+ }
+ }),
+ )
+ .await;
+
+ let tree = Worktree::local(
+ build_client(cx),
+ "/root/tree".as_ref(),
+ true,
+ fs.clone(),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+
+ tree.read_with(cx, |tree, _| {
+ tree.as_local()
+ .unwrap()
+ .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
+ })
+ .recv()
+ .await;
+
+ cx.read(|cx| {
+ let tree = tree.read(cx);
+ assert!(
+ !tree
+ .entry_for_path("tracked-dir/tracked-file1")
+ .unwrap()
+ .is_ignored
+ );
+ assert!(
+ tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
+ .unwrap()
+ .is_ignored
+ );
+ assert!(
+ tree.entry_for_path("ignored-dir/ignored-file1")
+ .unwrap()
+ .is_ignored
+ );
+ });
+
+ fs.create_file(
+ "/root/tree/tracked-dir/tracked-file2".as_ref(),
+ Default::default(),
+ )
+ .await
+ .unwrap();
+ fs.create_file(
+ "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
+ Default::default(),
+ )
+ .await
+ .unwrap();
+ fs.create_file(
+ "/root/tree/ignored-dir/ignored-file2".as_ref(),
+ Default::default(),
+ )
+ .await
+ .unwrap();
+
+ cx.executor().run_until_parked();
+ cx.read(|cx| {
+ let tree = tree.read(cx);
+ assert!(
+ !tree
+ .entry_for_path("tracked-dir/tracked-file2")
+ .unwrap()
+ .is_ignored
+ );
+ assert!(
+ tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
+ .unwrap()
+ .is_ignored
+ );
+ assert!(
+ tree.entry_for_path("ignored-dir/ignored-file2")
+ .unwrap()
+ .is_ignored
+ );
+ assert!(tree.entry_for_path(".git").unwrap().is_ignored);
+ });
+}
+
+#[gpui::test]
+async fn test_write_file(cx: &mut TestAppContext) {
+ init_test(cx);
+ cx.executor().allow_parking();
+ let dir = temp_tree(json!({
+ ".git": {},
+ ".gitignore": "ignored-dir\n",
+ "tracked-dir": {},
+ "ignored-dir": {}
+ }));
+
+ let tree = Worktree::local(
+ build_client(cx),
+ dir.path(),
+ true,
+ Arc::new(RealFs),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+ tree.flush_fs_events(cx).await;
+
+ tree.update(cx, |tree, cx| {
+ tree.as_local().unwrap().write_file(
+ Path::new("tracked-dir/file.txt"),
+ "hello".into(),
+ Default::default(),
+ cx,
+ )
+ })
+ .await
+ .unwrap();
+ tree.update(cx, |tree, cx| {
+ tree.as_local().unwrap().write_file(
+ Path::new("ignored-dir/file.txt"),
+ "world".into(),
+ Default::default(),
+ cx,
+ )
+ })
+ .await
+ .unwrap();
+
+ tree.read_with(cx, |tree, _| {
+ let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
+ let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
+ assert!(!tracked.is_ignored);
+ assert!(ignored.is_ignored);
+ });
+}
+
+#[gpui::test]
+async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
+ init_test(cx);
+ cx.executor().allow_parking();
+ let dir = temp_tree(json!({
+ ".gitignore": "**/target\n/node_modules\n",
+ "target": {
+ "index": "blah2"
+ },
+ "node_modules": {
+ ".DS_Store": "",
+ "prettier": {
+ "package.json": "{}",
+ },
+ },
+ "src": {
+ ".DS_Store": "",
+ "foo": {
+ "foo.rs": "mod another;\n",
+ "another.rs": "// another",
+ },
+ "bar": {
+ "bar.rs": "// bar",
+ },
+ "lib.rs": "mod foo;\nmod bar;\n",
+ },
+ ".DS_Store": "",
+ }));
+ cx.update(|cx| {
+ cx.update_global::<SettingsStore, _>(|store, cx| {
+ store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+ project_settings.file_scan_exclusions =
+ Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
+ });
+ });
+ });
+
+ let tree = Worktree::local(
+ build_client(cx),
+ dir.path(),
+ true,
+ Arc::new(RealFs),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+ tree.flush_fs_events(cx).await;
+ tree.read_with(cx, |tree, _| {
+ check_worktree_entries(
+ tree,
+ &[
+ "src/foo/foo.rs",
+ "src/foo/another.rs",
+ "node_modules/.DS_Store",
+ "src/.DS_Store",
+ ".DS_Store",
+ ],
+ &["target", "node_modules"],
+ &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
+ )
+ });
+
+ cx.update(|cx| {
+ cx.update_global::<SettingsStore, _>(|store, cx| {
+ store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+ project_settings.file_scan_exclusions =
+ Some(vec!["**/node_modules/**".to_string()]);
+ });
+ });
+ });
+ tree.flush_fs_events(cx).await;
+ cx.executor().run_until_parked();
+ tree.read_with(cx, |tree, _| {
+ check_worktree_entries(
+ tree,
+ &[
+ "node_modules/prettier/package.json",
+ "node_modules/.DS_Store",
+ "node_modules",
+ ],
+ &["target"],
+ &[
+ ".gitignore",
+ "src/lib.rs",
+ "src/bar/bar.rs",
+ "src/foo/foo.rs",
+ "src/foo/another.rs",
+ "src/.DS_Store",
+ ".DS_Store",
+ ],
+ )
+ });
+}
+
+#[gpui::test(iterations = 30)]
+async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "b": {},
+ "c": {},
+ "d": {},
+ }),
+ )
+ .await;
+
+ let tree = Worktree::local(
+ build_client(cx),
+ "/root".as_ref(),
+ true,
+ fs,
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ let snapshot1 = tree.update(cx, |tree, cx| {
+ let tree = tree.as_local_mut().unwrap();
+ let snapshot = Arc::new(Mutex::new(tree.snapshot()));
+ let _ = tree.observe_updates(0, cx, {
+ let snapshot = snapshot.clone();
+ move |update| {
+ snapshot.lock().apply_remote_update(update).unwrap();
+ async { true }
+ }
+ });
+ snapshot
+ });
+
+ let entry = tree
+ .update(cx, |tree, cx| {
+ tree.as_local_mut()
+ .unwrap()
+ .create_entry("a/e".as_ref(), true, cx)
+ })
+ .await
+ .unwrap();
+ assert!(entry.is_dir());
+
+ cx.executor().run_until_parked();
+ tree.read_with(cx, |tree, _| {
+ assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
+ });
+
+ let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
+ assert_eq!(
+ snapshot1.lock().entries(true).collect::<Vec<_>>(),
+ snapshot2.entries(true).collect::<Vec<_>>()
+ );
+}
+
+#[gpui::test]
+async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
+ init_test(cx);
+ cx.executor().allow_parking();
+ let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
+
+ let fs_fake = FakeFs::new(cx.background_executor.clone());
+ fs_fake
+ .insert_tree(
+ "/root",
+ json!({
+ "a": {},
+ }),
+ )
+ .await;
+
+ let tree_fake = Worktree::local(
+ client_fake,
+ "/root".as_ref(),
+ true,
+ fs_fake,
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ let entry = tree_fake
+ .update(cx, |tree, cx| {
+ tree.as_local_mut()
+ .unwrap()
+ .create_entry("a/b/c/d.txt".as_ref(), false, cx)
+ })
+ .await
+ .unwrap();
+ assert!(entry.is_file());
+
+ cx.executor().run_until_parked();
+ tree_fake.read_with(cx, |tree, _| {
+ assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
+ assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
+ assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
+ });
+
+ let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
+
+ let fs_real = Arc::new(RealFs);
+ let temp_root = temp_tree(json!({
+ "a": {}
+ }));
+
+ let tree_real = Worktree::local(
+ client_real,
+ temp_root.path(),
+ true,
+ fs_real,
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ let entry = tree_real
+ .update(cx, |tree, cx| {
+ tree.as_local_mut()
+ .unwrap()
+ .create_entry("a/b/c/d.txt".as_ref(), false, cx)
+ })
+ .await
+ .unwrap();
+ assert!(entry.is_file());
+
+ cx.executor().run_until_parked();
+ tree_real.read_with(cx, |tree, _| {
+ assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
+ assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
+ assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
+ });
+
+ // Test smallest change
+ let entry = tree_real
+ .update(cx, |tree, cx| {
+ tree.as_local_mut()
+ .unwrap()
+ .create_entry("a/b/c/e.txt".as_ref(), false, cx)
+ })
+ .await
+ .unwrap();
+ assert!(entry.is_file());
+
+ cx.executor().run_until_parked();
+ tree_real.read_with(cx, |tree, _| {
+ assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
+ });
+
+ // Test largest change
+ let entry = tree_real
+ .update(cx, |tree, cx| {
+ tree.as_local_mut()
+ .unwrap()
+ .create_entry("d/e/f/g.txt".as_ref(), false, cx)
+ })
+ .await
+ .unwrap();
+ assert!(entry.is_file());
+
+ cx.executor().run_until_parked();
+ tree_real.read_with(cx, |tree, _| {
+ assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
+ assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
+ assert!(tree.entry_for_path("d/e/").unwrap().is_dir());
+ assert!(tree.entry_for_path("d/").unwrap().is_dir());
+ });
+}
+
+#[gpui::test(iterations = 100)]
+async fn test_random_worktree_operations_during_initial_scan(
+ cx: &mut TestAppContext,
+ mut rng: StdRng,
+) {
+ init_test(cx);
+ let operations = env::var("OPERATIONS")
+ .map(|o| o.parse().unwrap())
+ .unwrap_or(5);
+ let initial_entries = env::var("INITIAL_ENTRIES")
+ .map(|o| o.parse().unwrap())
+ .unwrap_or(20);
+
+ let root_dir = Path::new("/test");
+ let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
+ fs.as_fake().insert_tree(root_dir, json!({})).await;
+ for _ in 0..initial_entries {
+ randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
+ }
+ log::info!("generated initial tree");
+
+ let worktree = Worktree::local(
+ build_client(cx),
+ root_dir,
+ true,
+ fs.clone(),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
+ let updates = Arc::new(Mutex::new(Vec::new()));
+ worktree.update(cx, |tree, cx| {
+ check_worktree_change_events(tree, cx);
+
+ let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
+ let updates = updates.clone();
+ move |update| {
+ updates.lock().push(update);
+ async { true }
+ }
+ });
+ });
+
+ for _ in 0..operations {
+ worktree
+ .update(cx, |worktree, cx| {
+ randomly_mutate_worktree(worktree, &mut rng, cx)
+ })
+ .await
+ .log_err();
+ worktree.read_with(cx, |tree, _| {
+ tree.as_local().unwrap().snapshot().check_invariants(true)
+ });
+
+ if rng.gen_bool(0.6) {
+ snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
+ }
+ }
+
+ worktree
+ .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
+ .await;
+
+ cx.executor().run_until_parked();
+
+ let final_snapshot = worktree.read_with(cx, |tree, _| {
+ let tree = tree.as_local().unwrap();
+ let snapshot = tree.snapshot();
+ snapshot.check_invariants(true);
+ snapshot
+ });
+
+ for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
+ let mut updated_snapshot = snapshot.clone();
+ for update in updates.lock().iter() {
+ if update.scan_id >= updated_snapshot.scan_id() as u64 {
+ updated_snapshot
+ .apply_remote_update(update.clone())
+ .unwrap();
+ }
+ }
+
+ assert_eq!(
+ updated_snapshot.entries(true).collect::<Vec<_>>(),
+ final_snapshot.entries(true).collect::<Vec<_>>(),
+ "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
+ );
+ }
+}
+
+#[gpui::test(iterations = 100)]
+async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
+ init_test(cx);
+ let operations = env::var("OPERATIONS")
+ .map(|o| o.parse().unwrap())
+ .unwrap_or(40);
+ let initial_entries = env::var("INITIAL_ENTRIES")
+ .map(|o| o.parse().unwrap())
+ .unwrap_or(20);
+
+ let root_dir = Path::new("/test");
+ let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
+ fs.as_fake().insert_tree(root_dir, json!({})).await;
+ for _ in 0..initial_entries {
+ randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
+ }
+ log::info!("generated initial tree");
+
+ let worktree = Worktree::local(
+ build_client(cx),
+ root_dir,
+ true,
+ fs.clone(),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ let updates = Arc::new(Mutex::new(Vec::new()));
+ worktree.update(cx, |tree, cx| {
+ check_worktree_change_events(tree, cx);
+
+ let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
+ let updates = updates.clone();
+ move |update| {
+ updates.lock().push(update);
+ async { true }
+ }
+ });
+ });
+
+ worktree
+ .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
+ .await;
+
+ fs.as_fake().pause_events();
+ let mut snapshots = Vec::new();
+ let mut mutations_len = operations;
+ while mutations_len > 1 {
+ if rng.gen_bool(0.2) {
+ worktree
+ .update(cx, |worktree, cx| {
+ randomly_mutate_worktree(worktree, &mut rng, cx)
+ })
+ .await
+ .log_err();
+ } else {
+ randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
+ }
+
+ let buffered_event_count = fs.as_fake().buffered_event_count();
+ if buffered_event_count > 0 && rng.gen_bool(0.3) {
+ let len = rng.gen_range(0..=buffered_event_count);
+ log::info!("flushing {} events", len);
+ fs.as_fake().flush_events(len);
+ } else {
+ randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
+ mutations_len -= 1;
+ }
+
+ cx.executor().run_until_parked();
+ if rng.gen_bool(0.2) {
+ log::info!("storing snapshot {}", snapshots.len());
+ let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
+ snapshots.push(snapshot);
+ }
+ }
+
+ log::info!("quiescing");
+ fs.as_fake().flush_events(usize::MAX);
+ cx.executor().run_until_parked();
+
+ let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
+ snapshot.check_invariants(true);
+ let expanded_paths = snapshot
+ .expanded_entries()
+ .map(|e| e.path.clone())
+ .collect::<Vec<_>>();
+
+ {
+ let new_worktree = Worktree::local(
+ build_client(cx),
+ root_dir,
+ true,
+ fs.clone(),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+ new_worktree
+ .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
+ .await;
+ new_worktree
+ .update(cx, |tree, _| {
+ tree.as_local_mut()
+ .unwrap()
+ .refresh_entries_for_paths(expanded_paths)
+ })
+ .recv()
+ .await;
+ let new_snapshot =
+ new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
+ assert_eq!(
+ snapshot.entries_without_ids(true),
+ new_snapshot.entries_without_ids(true)
+ );
+ }
+
+ for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
+ for update in updates.lock().iter() {
+ if update.scan_id >= prev_snapshot.scan_id() as u64 {
+ prev_snapshot.apply_remote_update(update.clone()).unwrap();
+ }
+ }
+
+ assert_eq!(
+ prev_snapshot
+ .entries(true)
+ .map(ignore_pending_dir)
+ .collect::<Vec<_>>(),
+ snapshot
+ .entries(true)
+ .map(ignore_pending_dir)
+ .collect::<Vec<_>>(),
+ "wrong updates after snapshot {i}: {updates:#?}",
+ );
+ }
+
+ fn ignore_pending_dir(entry: &Entry) -> Entry {
+ let mut entry = entry.clone();
+ if entry.kind.is_dir() {
+ entry.kind = EntryKind::Dir
+ }
+ entry
+ }
+}
+
+// The worktree's `UpdatedEntries` event can be used to follow along with
+// all changes to the worktree's snapshot.
+fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
+ let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
+ cx.subscribe(&cx.handle(), move |tree, _, event, _| {
+ if let Event::UpdatedEntries(changes) = event {
+ for (path, _, change_type) in changes.iter() {
+ let entry = tree.entry_for_path(&path).cloned();
+ let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
+ Ok(ix) | Err(ix) => ix,
+ };
+ match change_type {
+ PathChange::Added => entries.insert(ix, entry.unwrap()),
+ PathChange::Removed => drop(entries.remove(ix)),
+ PathChange::Updated => {
+ let entry = entry.unwrap();
+ let existing_entry = entries.get_mut(ix).unwrap();
+ assert_eq!(existing_entry.path, entry.path);
+ *existing_entry = entry;
+ }
+ PathChange::AddedOrUpdated | PathChange::Loaded => {
+ let entry = entry.unwrap();
+ if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
+ *entries.get_mut(ix).unwrap() = entry;
+ } else {
+ entries.insert(ix, entry);
+ }
+ }
+ }
+ }
+
+ let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
+ assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
+ }
+ })
+ .detach();
+}
+
+fn randomly_mutate_worktree(
+ worktree: &mut Worktree,
+ rng: &mut impl Rng,
+ cx: &mut ModelContext<Worktree>,
+) -> Task<Result<()>> {
+ log::info!("mutating worktree");
+ let worktree = worktree.as_local_mut().unwrap();
+ let snapshot = worktree.snapshot();
+ let entry = snapshot.entries(false).choose(rng).unwrap();
+
+ match rng.gen_range(0_u32..100) {
+ 0..=33 if entry.path.as_ref() != Path::new("") => {
+ log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
+ worktree.delete_entry(entry.id, cx).unwrap()
+ }
+ ..=66 if entry.path.as_ref() != Path::new("") => {
+ let other_entry = snapshot.entries(false).choose(rng).unwrap();
+ let new_parent_path = if other_entry.is_dir() {
+ other_entry.path.clone()
+ } else {
+ other_entry.path.parent().unwrap().into()
+ };
+ let mut new_path = new_parent_path.join(random_filename(rng));
+ if new_path.starts_with(&entry.path) {
+ new_path = random_filename(rng).into();
+ }
+
+ log::info!(
+ "renaming entry {:?} ({}) to {:?}",
+ entry.path,
+ entry.id.0,
+ new_path
+ );
+ let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
+ cx.background_executor().spawn(async move {
+ task.await?;
+ Ok(())
+ })
+ }
+ _ => {
+ let task = if entry.is_dir() {
+ let child_path = entry.path.join(random_filename(rng));
+ let is_dir = rng.gen_bool(0.3);
+ log::info!(
+ "creating {} at {:?}",
+ if is_dir { "dir" } else { "file" },
+ child_path,
+ );
+ worktree.create_entry(child_path, is_dir, cx)
+ } else {
+ log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
+ worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
+ };
+ cx.background_executor().spawn(async move {
+ task.await?;
+ Ok(())
+ })
+ }
+ }
+}
+
+async fn randomly_mutate_fs(
+ fs: &Arc<dyn Fs>,
+ root_path: &Path,
+ insertion_probability: f64,
+ rng: &mut impl Rng,
+) {
+ log::info!("mutating fs");
+ let mut files = Vec::new();
+ let mut dirs = Vec::new();
+ for path in fs.as_fake().paths(false) {
+ if path.starts_with(root_path) {
+ if fs.is_file(&path).await {
+ files.push(path);
+ } else {
+ dirs.push(path);
+ }
+ }
+ }
+
+ if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
+ let path = dirs.choose(rng).unwrap();
+ let new_path = path.join(random_filename(rng));
+
+ if rng.gen() {
+ log::info!(
+ "creating dir {:?}",
+ new_path.strip_prefix(root_path).unwrap()
+ );
+ fs.create_dir(&new_path).await.unwrap();
+ } else {
+ log::info!(
+ "creating file {:?}",
+ new_path.strip_prefix(root_path).unwrap()
+ );
+ fs.create_file(&new_path, Default::default()).await.unwrap();
+ }
+ } else if rng.gen_bool(0.05) {
+ let ignore_dir_path = dirs.choose(rng).unwrap();
+ let ignore_path = ignore_dir_path.join(&*GITIGNORE);
+
+ let subdirs = dirs
+ .iter()
+ .filter(|d| d.starts_with(&ignore_dir_path))
+ .cloned()
+ .collect::<Vec<_>>();
+ let subfiles = files
+ .iter()
+ .filter(|d| d.starts_with(&ignore_dir_path))
+ .cloned()
+ .collect::<Vec<_>>();
+ let files_to_ignore = {
+ let len = rng.gen_range(0..=subfiles.len());
+ subfiles.choose_multiple(rng, len)
+ };
+ let dirs_to_ignore = {
+ let len = rng.gen_range(0..subdirs.len());
+ subdirs.choose_multiple(rng, len)
+ };
+
+ let mut ignore_contents = String::new();
+ for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
+ writeln!(
+ ignore_contents,
+ "{}",
+ path_to_ignore
+ .strip_prefix(&ignore_dir_path)
+ .unwrap()
+ .to_str()
+ .unwrap()
+ )
+ .unwrap();
+ }
+ log::info!(
+ "creating gitignore {:?} with contents:\n{}",
+ ignore_path.strip_prefix(&root_path).unwrap(),
+ ignore_contents
+ );
+ fs.save(
+ &ignore_path,
+ &ignore_contents.as_str().into(),
+ Default::default(),
+ )
+ .await
+ .unwrap();
+ } else {
+ let old_path = {
+ let file_path = files.choose(rng);
+ let dir_path = dirs[1..].choose(rng);
+ file_path.into_iter().chain(dir_path).choose(rng).unwrap()
+ };
+
+ let is_rename = rng.gen();
+ if is_rename {
+ let new_path_parent = dirs
+ .iter()
+ .filter(|d| !d.starts_with(old_path))
+ .choose(rng)
+ .unwrap();
+
+ let overwrite_existing_dir =
+ !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
+ let new_path = if overwrite_existing_dir {
+ fs.remove_dir(
+ &new_path_parent,
+ RemoveOptions {
+ recursive: true,
+ ignore_if_not_exists: true,
+ },
+ )
+ .await
+ .unwrap();
+ new_path_parent.to_path_buf()
+ } else {
+ new_path_parent.join(random_filename(rng))
+ };
+
+ log::info!(
+ "renaming {:?} to {}{:?}",
+ old_path.strip_prefix(&root_path).unwrap(),
+ if overwrite_existing_dir {
+ "overwrite "
+ } else {
+ ""
+ },
+ new_path.strip_prefix(&root_path).unwrap()
+ );
+ fs.rename(
+ &old_path,
+ &new_path,
+ fs::RenameOptions {
+ overwrite: true,
+ ignore_if_exists: true,
+ },
+ )
+ .await
+ .unwrap();
+ } else if fs.is_file(&old_path).await {
+ log::info!(
+ "deleting file {:?}",
+ old_path.strip_prefix(&root_path).unwrap()
+ );
+ fs.remove_file(old_path, Default::default()).await.unwrap();
+ } else {
+ log::info!(
+ "deleting dir {:?}",
+ old_path.strip_prefix(&root_path).unwrap()
+ );
+ fs.remove_dir(
+ &old_path,
+ RemoveOptions {
+ recursive: true,
+ ignore_if_not_exists: true,
+ },
+ )
+ .await
+ .unwrap();
+ }
+ }
+}
+
+fn random_filename(rng: &mut impl Rng) -> String {
+ (0..6)
+ .map(|_| rng.sample(rand::distributions::Alphanumeric))
+ .map(char::from)
+ .collect()
+}
+
+#[gpui::test]
+async fn test_rename_work_directory(cx: &mut TestAppContext) {
+ init_test(cx);
+ cx.executor().allow_parking();
+ let root = temp_tree(json!({
+ "projects": {
+ "project1": {
+ "a": "",
+ "b": "",
+ }
+ },
+
+ }));
+ let root_path = root.path();
+
+ let tree = Worktree::local(
+ build_client(cx),
+ root_path,
+ true,
+ Arc::new(RealFs),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ let repo = git_init(&root_path.join("projects/project1"));
+ git_add("a", &repo);
+ git_commit("init", &repo);
+ std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
+
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+
+ tree.flush_fs_events(cx).await;
+
+ cx.read(|cx| {
+ let tree = tree.read(cx);
+ let (work_dir, _) = tree.repositories().next().unwrap();
+ assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
+ assert_eq!(
+ tree.status_for_file(Path::new("projects/project1/a")),
+ Some(GitFileStatus::Modified)
+ );
+ assert_eq!(
+ tree.status_for_file(Path::new("projects/project1/b")),
+ Some(GitFileStatus::Added)
+ );
+ });
+
+ std::fs::rename(
+ root_path.join("projects/project1"),
+ root_path.join("projects/project2"),
+ )
+ .ok();
+ tree.flush_fs_events(cx).await;
+
+ cx.read(|cx| {
+ let tree = tree.read(cx);
+ let (work_dir, _) = tree.repositories().next().unwrap();
+ assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
+ assert_eq!(
+ tree.status_for_file(Path::new("projects/project2/a")),
+ Some(GitFileStatus::Modified)
+ );
+ assert_eq!(
+ tree.status_for_file(Path::new("projects/project2/b")),
+ Some(GitFileStatus::Added)
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_git_repository_for_path(cx: &mut TestAppContext) {
+ init_test(cx);
+ cx.executor().allow_parking();
+ let root = temp_tree(json!({
+ "c.txt": "",
+ "dir1": {
+ ".git": {},
+ "deps": {
+ "dep1": {
+ ".git": {},
+ "src": {
+ "a.txt": ""
+ }
+ }
+ },
+ "src": {
+ "b.txt": ""
+ }
+ },
+ }));
+
+ let tree = Worktree::local(
+ build_client(cx),
+ root.path(),
+ true,
+ Arc::new(RealFs),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+ tree.flush_fs_events(cx).await;
+
+ tree.read_with(cx, |tree, _cx| {
+ let tree = tree.as_local().unwrap();
+
+ assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
+
+ let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
+ assert_eq!(
+ entry
+ .work_directory(tree)
+ .map(|directory| directory.as_ref().to_owned()),
+ Some(Path::new("dir1").to_owned())
+ );
+
+ let entry = tree
+ .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
+ .unwrap();
+ assert_eq!(
+ entry
+ .work_directory(tree)
+ .map(|directory| directory.as_ref().to_owned()),
+ Some(Path::new("dir1/deps/dep1").to_owned())
+ );
+
+ let entries = tree.files(false, 0);
+
+ let paths_with_repos = tree
+ .entries_with_repositories(entries)
+ .map(|(entry, repo)| {
+ (
+ entry.path.as_ref(),
+ repo.and_then(|repo| {
+ repo.work_directory(&tree)
+ .map(|work_directory| work_directory.0.to_path_buf())
+ }),
+ )
+ })
+ .collect::<Vec<_>>();
+
+ assert_eq!(
+ paths_with_repos,
+ &[
+ (Path::new("c.txt"), None),
+ (
+ Path::new("dir1/deps/dep1/src/a.txt"),
+ Some(Path::new("dir1/deps/dep1").into())
+ ),
+ (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
+ ]
+ );
+ });
+
+ let repo_update_events = Arc::new(Mutex::new(vec![]));
+ tree.update(cx, |_, cx| {
+ let repo_update_events = repo_update_events.clone();
+ cx.subscribe(&tree, move |_, _, event, _| {
+ if let Event::UpdatedGitRepositories(update) = event {
+ repo_update_events.lock().push(update.clone());
+ }
+ })
+ .detach();
+ });
+
+ std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
+ tree.flush_fs_events(cx).await;
+
+ assert_eq!(
+ repo_update_events.lock()[0]
+ .iter()
+ .map(|e| e.0.clone())
+ .collect::<Vec<Arc<Path>>>(),
+ vec![Path::new("dir1").into()]
+ );
+
+ std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
+ tree.flush_fs_events(cx).await;
+
+ tree.read_with(cx, |tree, _cx| {
+ let tree = tree.as_local().unwrap();
+
+ assert!(tree
+ .repository_for_path("dir1/src/b.txt".as_ref())
+ .is_none());
+ });
+}
+
+#[gpui::test]
+async fn test_git_status(cx: &mut TestAppContext) {
+ init_test(cx);
+ cx.executor().allow_parking();
+ const IGNORE_RULE: &'static str = "**/target";
+
+ let root = temp_tree(json!({
+ "project": {
+ "a.txt": "a",
+ "b.txt": "bb",
+ "c": {
+ "d": {
+ "e.txt": "eee"
+ }
+ },
+ "f.txt": "ffff",
+ "target": {
+ "build_file": "???"
+ },
+ ".gitignore": IGNORE_RULE
+ },
+
+ }));
+
+ const A_TXT: &'static str = "a.txt";
+ const B_TXT: &'static str = "b.txt";
+ const E_TXT: &'static str = "c/d/e.txt";
+ const F_TXT: &'static str = "f.txt";
+ const DOTGITIGNORE: &'static str = ".gitignore";
+ const BUILD_FILE: &'static str = "target/build_file";
+ let project_path = Path::new("project");
+
+ // Set up git repository before creating the worktree.
+ let work_dir = root.path().join("project");
+ let mut repo = git_init(work_dir.as_path());
+ repo.add_ignore_rule(IGNORE_RULE).unwrap();
+ git_add(A_TXT, &repo);
+ git_add(E_TXT, &repo);
+ git_add(DOTGITIGNORE, &repo);
+ git_commit("Initial commit", &repo);
+
+ let tree = Worktree::local(
+ build_client(cx),
+ root.path(),
+ true,
+ Arc::new(RealFs),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ tree.flush_fs_events(cx).await;
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+ cx.executor().run_until_parked();
+
+ // Check that the right git state is observed on startup
+ tree.read_with(cx, |tree, _cx| {
+ let snapshot = tree.snapshot();
+ assert_eq!(snapshot.repositories().count(), 1);
+ let (dir, _) = snapshot.repositories().next().unwrap();
+ assert_eq!(dir.as_ref(), Path::new("project"));
+
+ assert_eq!(
+ snapshot.status_for_file(project_path.join(B_TXT)),
+ Some(GitFileStatus::Added)
+ );
+ assert_eq!(
+ snapshot.status_for_file(project_path.join(F_TXT)),
+ Some(GitFileStatus::Added)
+ );
+ });
+
+ // Modify a file in the working copy.
+ std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
+ tree.flush_fs_events(cx).await;
+ cx.executor().run_until_parked();
+
+ // The worktree detects that the file's git status has changed.
+ tree.read_with(cx, |tree, _cx| {
+ let snapshot = tree.snapshot();
+ assert_eq!(
+ snapshot.status_for_file(project_path.join(A_TXT)),
+ Some(GitFileStatus::Modified)
+ );
+ });
+
+ // Create a commit in the git repository.
+ git_add(A_TXT, &repo);
+ git_add(B_TXT, &repo);
+ git_commit("Committing modified and added", &repo);
+ tree.flush_fs_events(cx).await;
+ cx.executor().run_until_parked();
+
+ // The worktree detects that the files' git status have changed.
+ tree.read_with(cx, |tree, _cx| {
+ let snapshot = tree.snapshot();
+ assert_eq!(
+ snapshot.status_for_file(project_path.join(F_TXT)),
+ Some(GitFileStatus::Added)
+ );
+ assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
+ assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
+ });
+
+ // Modify files in the working copy and perform git operations on other files.
+ git_reset(0, &repo);
+ git_remove_index(Path::new(B_TXT), &repo);
+ git_stash(&mut repo);
+ std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
+ std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
+ tree.flush_fs_events(cx).await;
+ cx.executor().run_until_parked();
+
+ // Check that more complex repo changes are tracked
+ tree.read_with(cx, |tree, _cx| {
+ let snapshot = tree.snapshot();
+
+ assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
+ assert_eq!(
+ snapshot.status_for_file(project_path.join(B_TXT)),
+ Some(GitFileStatus::Added)
+ );
+ assert_eq!(
+ snapshot.status_for_file(project_path.join(E_TXT)),
+ Some(GitFileStatus::Modified)
+ );
+ });
+
+ std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
+ std::fs::remove_dir_all(work_dir.join("c")).unwrap();
+ std::fs::write(
+ work_dir.join(DOTGITIGNORE),
+ [IGNORE_RULE, "f.txt"].join("\n"),
+ )
+ .unwrap();
+
+ git_add(Path::new(DOTGITIGNORE), &repo);
+ git_commit("Committing modified git ignore", &repo);
+
+ tree.flush_fs_events(cx).await;
+ cx.executor().run_until_parked();
+
+ let mut renamed_dir_name = "first_directory/second_directory";
+ const RENAMED_FILE: &'static str = "rf.txt";
+
+ std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
+ std::fs::write(
+ work_dir.join(renamed_dir_name).join(RENAMED_FILE),
+ "new-contents",
+ )
+ .unwrap();
+
+ tree.flush_fs_events(cx).await;
+ cx.executor().run_until_parked();
+
+ tree.read_with(cx, |tree, _cx| {
+ let snapshot = tree.snapshot();
+ assert_eq!(
+ snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
+ Some(GitFileStatus::Added)
+ );
+ });
+
+ renamed_dir_name = "new_first_directory/second_directory";
+
+ std::fs::rename(
+ work_dir.join("first_directory"),
+ work_dir.join("new_first_directory"),
+ )
+ .unwrap();
+
+ tree.flush_fs_events(cx).await;
+ cx.executor().run_until_parked();
+
+ tree.read_with(cx, |tree, _cx| {
+ let snapshot = tree.snapshot();
+
+ assert_eq!(
+ snapshot.status_for_file(
+ project_path
+ .join(Path::new(renamed_dir_name))
+ .join(RENAMED_FILE)
+ ),
+ Some(GitFileStatus::Added)
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ "/root",
+ json!({
+ ".git": {},
+ "a": {
+ "b": {
+ "c1.txt": "",
+ "c2.txt": "",
+ },
+ "d": {
+ "e1.txt": "",
+ "e2.txt": "",
+ "e3.txt": "",
+ }
+ },
+ "f": {
+ "no-status.txt": ""
+ },
+ "g": {
+ "h1.txt": "",
+ "h2.txt": ""
+ },
+
+ }),
+ )
+ .await;
+
+ fs.set_status_for_repo_via_git_operation(
+ &Path::new("/root/.git"),
+ &[
+ (Path::new("a/b/c1.txt"), GitFileStatus::Added),
+ (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
+ (Path::new("g/h2.txt"), GitFileStatus::Conflict),
+ ],
+ );
+
+ let tree = Worktree::local(
+ build_client(cx),
+ Path::new("/root"),
+ true,
+ fs.clone(),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+
+ cx.executor().run_until_parked();
+ let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
+
+ check_propagated_statuses(
+ &snapshot,
+ &[
+ (Path::new(""), Some(GitFileStatus::Conflict)),
+ (Path::new("a"), Some(GitFileStatus::Modified)),
+ (Path::new("a/b"), Some(GitFileStatus::Added)),
+ (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
+ (Path::new("a/b/c2.txt"), None),
+ (Path::new("a/d"), Some(GitFileStatus::Modified)),
+ (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
+ (Path::new("f"), None),
+ (Path::new("f/no-status.txt"), None),
+ (Path::new("g"), Some(GitFileStatus::Conflict)),
+ (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
+ ],
+ );
+
+ check_propagated_statuses(
+ &snapshot,
+ &[
+ (Path::new("a/b"), Some(GitFileStatus::Added)),
+ (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
+ (Path::new("a/b/c2.txt"), None),
+ (Path::new("a/d"), Some(GitFileStatus::Modified)),
+ (Path::new("a/d/e1.txt"), None),
+ (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
+ (Path::new("f"), None),
+ (Path::new("f/no-status.txt"), None),
+ (Path::new("g"), Some(GitFileStatus::Conflict)),
+ ],
+ );
+
+ check_propagated_statuses(
+ &snapshot,
+ &[
+ (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
+ (Path::new("a/b/c2.txt"), None),
+ (Path::new("a/d/e1.txt"), None),
+ (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
+ (Path::new("f/no-status.txt"), None),
+ ],
+ );
+
+ #[track_caller]
+ fn check_propagated_statuses(
+ snapshot: &Snapshot,
+ expected_statuses: &[(&Path, Option<GitFileStatus>)],
+ ) {
+ let mut entries = expected_statuses
+ .iter()
+ .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
+ .collect::<Vec<_>>();
+ snapshot.propagate_git_statuses(&mut entries);
+ assert_eq!(
+ entries
+ .iter()
+ .map(|e| (e.path.as_ref(), e.git_status))
+ .collect::<Vec<_>>(),
+ expected_statuses
+ );
+ }
+}
+
+fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
+ let http_client = FakeHttpClient::with_404_response();
+ cx.read(|cx| Client::new(http_client, cx))
+}
+
+#[track_caller]
+fn git_init(path: &Path) -> git2::Repository {
+ git2::Repository::init(path).expect("Failed to initialize git repository")
+}
+
+#[track_caller]
+fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
+ let path = path.as_ref();
+ let mut index = repo.index().expect("Failed to get index");
+ index.add_path(path).expect("Failed to add a.txt");
+ index.write().expect("Failed to write index");
+}
+
+#[track_caller]
+fn git_remove_index(path: &Path, repo: &git2::Repository) {
+ let mut index = repo.index().expect("Failed to get index");
+ index.remove_path(path).expect("Failed to add a.txt");
+ index.write().expect("Failed to write index");
+}
+
+#[track_caller]
+fn git_commit(msg: &'static str, repo: &git2::Repository) {
+ use git2::Signature;
+
+ let signature = Signature::now("test", "test@zed.dev").unwrap();
+ let oid = repo.index().unwrap().write_tree().unwrap();
+ let tree = repo.find_tree(oid).unwrap();
+ if let Some(head) = repo.head().ok() {
+ let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
+
+ let parent_commit = parent_obj.as_commit().unwrap();
+
+ repo.commit(
+ Some("HEAD"),
+ &signature,
+ &signature,
+ msg,
+ &tree,
+ &[parent_commit],
+ )
+ .expect("Failed to commit with parent");
+ } else {
+ repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
+ .expect("Failed to commit");
+ }
+}
+
+#[track_caller]
+fn git_stash(repo: &mut git2::Repository) {
+ use git2::Signature;
+
+ let signature = Signature::now("test", "test@zed.dev").unwrap();
+ repo.stash_save(&signature, "N/A", None)
+ .expect("Failed to stash");
+}
+
+#[track_caller]
+fn git_reset(offset: usize, repo: &git2::Repository) {
+ let head = repo.head().expect("Couldn't get repo head");
+ let object = head.peel(git2::ObjectType::Commit).unwrap();
+ let commit = object.as_commit().unwrap();
+ let new_head = commit
+ .parents()
+ .inspect(|parnet| {
+ parnet.message();
+ })
+ .skip(offset)
+ .next()
+ .expect("Not enough history");
+ repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
+ .expect("Could not reset");
+}
+
+#[allow(dead_code)]
+#[track_caller]
+fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
+ repo.statuses(None)
+ .unwrap()
+ .iter()
+ .map(|status| (status.path().unwrap().to_string(), status.status()))
+ .collect()
+}
+
+#[track_caller]
+fn check_worktree_entries(
+ tree: &Worktree,
+ expected_excluded_paths: &[&str],
+ expected_ignored_paths: &[&str],
+ expected_tracked_paths: &[&str],
+) {
+ for path in expected_excluded_paths {
+ let entry = tree.entry_for_path(path);
+ assert!(
+ entry.is_none(),
+ "expected path '{path}' to be excluded, but got entry: {entry:?}",
+ );
+ }
+ for path in expected_ignored_paths {
+ let entry = tree
+ .entry_for_path(path)
+ .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'"));
+ assert!(
+ entry.is_ignored,
+ "expected path '{path}' to be ignored, but got entry: {entry:?}",
+ );
+ }
+ for path in expected_tracked_paths {
+ let entry = tree
+ .entry_for_path(path)
+ .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
+ assert!(
+ !entry.is_ignored,
+ "expected path '{path}' to be tracked, but got entry: {entry:?}",
+ );
+ }
+}
+
+fn init_test(cx: &mut gpui::TestAppContext) {
+ cx.update(|cx| {
+ let settings_store = SettingsStore::test(cx);
+ cx.set_global(settings_store);
+ Project::init_settings(cx);
+ });
+}
@@ -1732,7 +1732,7 @@ mod tests {
use super::*;
use gpui::{AnyWindowHandle, TestAppContext, ViewHandle, WindowHandle};
use pretty_assertions::assert_eq;
- use project::FakeFs;
+ use project::{project_settings::ProjectSettings, FakeFs};
use serde_json::json;
use settings::SettingsStore;
use std::{
@@ -1832,6 +1832,123 @@ mod tests {
);
}
+ #[gpui::test]
+ async fn test_exclusions_in_visible_list(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+ cx.update(|cx| {
+ cx.update_global::<SettingsStore, _, _>(|store, cx| {
+ store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+ project_settings.file_scan_exclusions =
+ Some(vec!["**/.git".to_string(), "**/4/**".to_string()]);
+ });
+ });
+ });
+
+ let fs = FakeFs::new(cx.background());
+ fs.insert_tree(
+ "/root1",
+ json!({
+ ".dockerignore": "",
+ ".git": {
+ "HEAD": "",
+ },
+ "a": {
+ "0": { "q": "", "r": "", "s": "" },
+ "1": { "t": "", "u": "" },
+ "2": { "v": "", "w": "", "x": "", "y": "" },
+ },
+ "b": {
+ "3": { "Q": "" },
+ "4": { "R": "", "S": "", "T": "", "U": "" },
+ },
+ "C": {
+ "5": {},
+ "6": { "V": "", "W": "" },
+ "7": { "X": "" },
+ "8": { "Y": {}, "Z": "" }
+ }
+ }),
+ )
+ .await;
+ fs.insert_tree(
+ "/root2",
+ json!({
+ "d": {
+ "4": ""
+ },
+ "e": {}
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await;
+ let workspace = cx
+ .add_window(|cx| Workspace::test_new(project.clone(), cx))
+ .root(cx);
+ let panel = workspace.update(cx, |workspace, cx| ProjectPanel::new(workspace, cx));
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..50, cx),
+ &[
+ "v root1",
+ " > a",
+ " > b",
+ " > C",
+ " .dockerignore",
+ "v root2",
+ " > d",
+ " > e",
+ ]
+ );
+
+ toggle_expand_dir(&panel, "root1/b", cx);
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..50, cx),
+ &[
+ "v root1",
+ " > a",
+ " v b <== selected",
+ " > 3",
+ " > C",
+ " .dockerignore",
+ "v root2",
+ " > d",
+ " > e",
+ ]
+ );
+
+ toggle_expand_dir(&panel, "root2/d", cx);
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..50, cx),
+ &[
+ "v root1",
+ " > a",
+ " v b",
+ " > 3",
+ " > C",
+ " .dockerignore",
+ "v root2",
+ " v d <== selected",
+ " > e",
+ ]
+ );
+
+ toggle_expand_dir(&panel, "root2/e", cx);
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..50, cx),
+ &[
+ "v root1",
+ " > a",
+ " v b",
+ " > 3",
+ " > C",
+ " .dockerignore",
+ "v root2",
+ " v d",
+ " v e <== selected",
+ ]
+ );
+ }
+
#[gpui::test(iterations = 30)]
async fn test_editing_files(cx: &mut gpui::TestAppContext) {
init_test(cx);
@@ -2929,6 +3046,12 @@ mod tests {
workspace::init_settings(cx);
client::init_settings(cx);
Project::init_settings(cx);
+
+ cx.update_global::<SettingsStore, _, _>(|store, cx| {
+ store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+ project_settings.file_scan_exclusions = Some(Vec::new());
+ });
+ });
});
}
@@ -1,6 +1,6 @@
pub mod file_associations;
mod project_panel_settings;
-use settings::Settings;
+use settings::{Settings, SettingsStore};
use db::kvp::KEY_VALUE_STORE;
use editor::{scroll::autoscroll::Autoscroll, Cancel, Editor};
@@ -34,7 +34,7 @@ use ui::{h_stack, v_stack, IconElement, Label};
use unicase::UniCase;
use util::{maybe, ResultExt, TryFutureExt};
use workspace::{
- dock::{DockPosition, PanelEvent},
+ dock::{DockPosition, Panel, PanelEvent},
Workspace,
};
@@ -148,7 +148,6 @@ pub enum Event {
SplitEntry {
entry_id: ProjectEntryId,
},
- DockPositionChanged,
Focus,
NewSearchInDirectory {
dir_entry: Entry,
@@ -245,16 +244,17 @@ impl ProjectPanel {
this.update_visible_entries(None, cx);
// Update the dock position when the setting changes.
- // todo!()
- // let mut old_dock_position = this.position(cx);
- // cx.observe_global::<SettingsStore, _>(move |this, cx| {
- // let new_dock_position = this.position(cx);
- // if new_dock_position != old_dock_position {
- // old_dock_position = new_dock_position;
- // cx.emit(Event::DockPositionChanged);
- // }
- // })
- // .detach();
+ let mut old_dock_position = this.position(cx);
+ ProjectPanelSettings::register(cx);
+ cx.observe_global::<SettingsStore>(move |this, cx| {
+ dbg!("OLA!");
+ let new_dock_position = this.position(cx);
+ if new_dock_position != old_dock_position {
+ old_dock_position = new_dock_position;
+ cx.emit(PanelEvent::ChangePosition);
+ }
+ })
+ .detach();
this
});
@@ -1486,7 +1486,7 @@ impl EventEmitter<Event> for ProjectPanel {}
impl EventEmitter<PanelEvent> for ProjectPanel {}
-impl workspace::dock::Panel for ProjectPanel {
+impl Panel for ProjectPanel {
fn position(&self, cx: &WindowContext) -> DockPosition {
match ProjectPanelSettings::get_global(cx).dock {
ProjectPanelDockPosition::Left => DockPosition::Left,
@@ -1572,7 +1572,7 @@ mod tests {
use super::*;
use gpui::{TestAppContext, View, VisualTestContext, WindowHandle};
use pretty_assertions::assert_eq;
- use project::FakeFs;
+ use project::{project_settings::ProjectSettings, FakeFs};
use serde_json::json;
use settings::SettingsStore;
use std::{
@@ -1673,6 +1673,124 @@ mod tests {
);
}
+ #[gpui::test]
+ async fn test_exclusions_in_visible_list(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+ cx.update(|cx| {
+ cx.update_global::<SettingsStore, _>(|store, cx| {
+ store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+ project_settings.file_scan_exclusions =
+ Some(vec!["**/.git".to_string(), "**/4/**".to_string()]);
+ });
+ });
+ });
+
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ "/root1",
+ json!({
+ ".dockerignore": "",
+ ".git": {
+ "HEAD": "",
+ },
+ "a": {
+ "0": { "q": "", "r": "", "s": "" },
+ "1": { "t": "", "u": "" },
+ "2": { "v": "", "w": "", "x": "", "y": "" },
+ },
+ "b": {
+ "3": { "Q": "" },
+ "4": { "R": "", "S": "", "T": "", "U": "" },
+ },
+ "C": {
+ "5": {},
+ "6": { "V": "", "W": "" },
+ "7": { "X": "" },
+ "8": { "Y": {}, "Z": "" }
+ }
+ }),
+ )
+ .await;
+ fs.insert_tree(
+ "/root2",
+ json!({
+ "d": {
+ "4": ""
+ },
+ "e": {}
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await;
+ let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
+ let cx = &mut VisualTestContext::from_window(*workspace, cx);
+ let panel = workspace
+ .update(cx, |workspace, cx| ProjectPanel::new(workspace, cx))
+ .unwrap();
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..50, cx),
+ &[
+ "v root1",
+ " > a",
+ " > b",
+ " > C",
+ " .dockerignore",
+ "v root2",
+ " > d",
+ " > e",
+ ]
+ );
+
+ toggle_expand_dir(&panel, "root1/b", cx);
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..50, cx),
+ &[
+ "v root1",
+ " > a",
+ " v b <== selected",
+ " > 3",
+ " > C",
+ " .dockerignore",
+ "v root2",
+ " > d",
+ " > e",
+ ]
+ );
+
+ toggle_expand_dir(&panel, "root2/d", cx);
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..50, cx),
+ &[
+ "v root1",
+ " > a",
+ " v b",
+ " > 3",
+ " > C",
+ " .dockerignore",
+ "v root2",
+ " v d <== selected",
+ " > e",
+ ]
+ );
+
+ toggle_expand_dir(&panel, "root2/e", cx);
+ assert_eq!(
+ visible_entries_as_strings(&panel, 0..50, cx),
+ &[
+ "v root1",
+ " > a",
+ " v b",
+ " > 3",
+ " > C",
+ " .dockerignore",
+ "v root2",
+ " v d",
+ " v e <== selected",
+ ]
+ );
+ }
+
#[gpui::test(iterations = 30)]
async fn test_editing_files(cx: &mut gpui::TestAppContext) {
init_test(cx);
@@ -2793,6 +2911,12 @@ mod tests {
workspace::init_settings(cx);
client::init_settings(cx);
Project::init_settings(cx);
+
+ cx.update_global::<SettingsStore, _>(|store, cx| {
+ store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+ project_settings.file_scan_exclusions = Some(Vec::new());
+ });
+ });
});
}
@@ -884,6 +884,7 @@ message SearchProject {
bool case_sensitive = 5;
string files_to_include = 6;
string files_to_exclude = 7;
+ bool include_ignored = 8;
}
message SearchProjectResponse {
@@ -884,6 +884,7 @@ message SearchProject {
bool case_sensitive = 5;
string files_to_include = 6;
string files_to_exclude = 7;
+ bool include_ignored = 8;
}
message SearchProjectResponse {
@@ -805,6 +805,7 @@ impl BufferSearchBar {
query,
self.search_options.contains(SearchOptions::WHOLE_WORD),
self.search_options.contains(SearchOptions::CASE_SENSITIVE),
+ false,
Vec::new(),
Vec::new(),
) {
@@ -820,6 +821,7 @@ impl BufferSearchBar {
query,
self.search_options.contains(SearchOptions::WHOLE_WORD),
self.search_options.contains(SearchOptions::CASE_SENSITIVE),
+ false,
Vec::new(),
Vec::new(),
) {
@@ -4,7 +4,7 @@ use crate::{
search_bar::{render_nav_button, render_option_button_icon, render_search_mode_button},
ActivateRegexMode, ActivateSemanticMode, ActivateTextMode, CycleMode, NextHistoryQuery,
PreviousHistoryQuery, ReplaceAll, ReplaceNext, SearchOptions, SelectNextMatch, SelectPrevMatch,
- ToggleCaseSensitive, ToggleReplace, ToggleWholeWord,
+ ToggleCaseSensitive, ToggleIncludeIgnored, ToggleReplace, ToggleWholeWord,
};
use anyhow::{Context, Result};
use collections::HashMap;
@@ -85,6 +85,7 @@ pub fn init(cx: &mut AppContext) {
cx.capture_action(ProjectSearchView::replace_next);
add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx);
add_toggle_option_action::<ToggleWholeWord>(SearchOptions::WHOLE_WORD, cx);
+ add_toggle_option_action::<ToggleIncludeIgnored>(SearchOptions::INCLUDE_IGNORED, cx);
add_toggle_filters_action::<ToggleFilters>(cx);
}
@@ -1192,6 +1193,7 @@ impl ProjectSearchView {
text,
self.search_options.contains(SearchOptions::WHOLE_WORD),
self.search_options.contains(SearchOptions::CASE_SENSITIVE),
+ self.search_options.contains(SearchOptions::INCLUDE_IGNORED),
included_files,
excluded_files,
) {
@@ -1210,6 +1212,7 @@ impl ProjectSearchView {
text,
self.search_options.contains(SearchOptions::WHOLE_WORD),
self.search_options.contains(SearchOptions::CASE_SENSITIVE),
+ self.search_options.contains(SearchOptions::INCLUDE_IGNORED),
included_files,
excluded_files,
) {
@@ -1764,6 +1767,17 @@ impl View for ProjectSearchBar {
render_option_button_icon("icons/word_search.svg", SearchOptions::WHOLE_WORD, cx)
});
+ let mut include_ignored = is_semantic_disabled.then(|| {
+ render_option_button_icon(
+ // TODO proper icon
+ "icons/case_insensitive.svg",
+ SearchOptions::INCLUDE_IGNORED,
+ cx,
+ )
+ });
+ // TODO not implemented yet
+ let _ = include_ignored.take();
+
let search_button_for_mode = |mode, side, cx: &mut ViewContext<ProjectSearchBar>| {
let is_active = if let Some(search) = self.active_project_search.as_ref() {
let search = search.read(cx);
@@ -1879,7 +1893,15 @@ impl View for ProjectSearchBar {
.with_children(search.filters_enabled.then(|| {
Flex::row()
.with_child(
- ChildView::new(&search.included_files_editor, cx)
+ Flex::row()
+ .with_child(
+ ChildView::new(&search.included_files_editor, cx)
+ .contained()
+ .constrained()
+ .with_height(theme.search.search_bar_row_height)
+ .flex(1., true),
+ )
+ .with_children(include_ignored)
.contained()
.with_style(include_container_style)
.constrained()
@@ -29,6 +29,7 @@ actions!(
CycleMode,
ToggleWholeWord,
ToggleCaseSensitive,
+ ToggleIncludeIgnored,
ToggleReplace,
SelectNextMatch,
SelectPrevMatch,
@@ -49,31 +50,35 @@ bitflags! {
const NONE = 0b000;
const WHOLE_WORD = 0b001;
const CASE_SENSITIVE = 0b010;
+ const INCLUDE_IGNORED = 0b100;
}
}
impl SearchOptions {
pub fn label(&self) -> &'static str {
match *self {
- SearchOptions::WHOLE_WORD => "Match Whole Word",
- SearchOptions::CASE_SENSITIVE => "Match Case",
- _ => panic!("{:?} is not a named SearchOption", self),
+ Self::WHOLE_WORD => "Match Whole Word",
+ Self::CASE_SENSITIVE => "Match Case",
+ Self::INCLUDE_IGNORED => "Include Ignored",
+ _ => panic!("{self:?} is not a named SearchOption"),
}
}
pub fn icon(&self) -> &'static str {
match *self {
- SearchOptions::WHOLE_WORD => "icons/word_search.svg",
- SearchOptions::CASE_SENSITIVE => "icons/case_insensitive.svg",
- _ => panic!("{:?} is not a named SearchOption", self),
+ Self::WHOLE_WORD => "icons/word_search.svg",
+ Self::CASE_SENSITIVE => "icons/case_insensitive.svg",
+ Self::INCLUDE_IGNORED => "icons/case_insensitive.svg",
+ _ => panic!("{self:?} is not a named SearchOption"),
}
}
pub fn to_toggle_action(&self) -> Box<dyn Action> {
match *self {
- SearchOptions::WHOLE_WORD => Box::new(ToggleWholeWord),
- SearchOptions::CASE_SENSITIVE => Box::new(ToggleCaseSensitive),
- _ => panic!("{:?} is not a named SearchOption", self),
+ Self::WHOLE_WORD => Box::new(ToggleWholeWord),
+ Self::CASE_SENSITIVE => Box::new(ToggleCaseSensitive),
+ Self::INCLUDE_IGNORED => Box::new(ToggleIncludeIgnored),
+ _ => panic!("{self:?} is not a named SearchOption"),
}
}
@@ -85,6 +90,7 @@ impl SearchOptions {
let mut options = SearchOptions::NONE;
options.set(SearchOptions::WHOLE_WORD, query.whole_word());
options.set(SearchOptions::CASE_SENSITIVE, query.case_sensitive());
+ options.set(SearchOptions::INCLUDE_IGNORED, query.include_ignored());
options
}
@@ -77,6 +77,7 @@ pub fn handle_settings_file_changes(
});
cx.spawn(move |mut cx| async move {
while let Some(user_settings_content) = user_settings_file_rx.next().await {
+ eprintln!("settings file changed");
let result = cx.update_global(|store: &mut SettingsStore, cx| {
store
.set_user_settings(&user_settings_content, cx)
@@ -9,11 +9,10 @@ pub mod terminal_panel;
// use crate::terminal_element::TerminalElement;
use editor::{scroll::autoscroll::Autoscroll, Editor};
use gpui::{
- actions, div, img, red, Action, AnyElement, AppContext, Component, DispatchPhase, Div,
- EventEmitter, FocusEvent, FocusHandle, Focusable, FocusableComponent, FocusableView,
- InputHandler, InteractiveComponent, KeyDownEvent, Keystroke, Model, MouseButton,
- ParentComponent, Pixels, Render, SharedString, Styled, Task, View, ViewContext, VisualContext,
- WeakView,
+ actions, div, Action, AnyElement, AppContext, Component, DispatchPhase, Div, EventEmitter,
+ FocusEvent, FocusHandle, Focusable, FocusableComponent, FocusableView, InputHandler,
+ InteractiveComponent, KeyDownEvent, Keystroke, Model, MouseButton, ParentComponent, Pixels,
+ Render, SharedString, Styled, Task, View, ViewContext, VisualContext, WeakView,
};
use language::Bias;
use persistence::TERMINAL_DB;
@@ -32,7 +31,7 @@ use workspace::{
notifications::NotifyResultExt,
register_deserializable_item,
searchable::{SearchEvent, SearchOptions, SearchableItem},
- ui::{ContextMenu, Label},
+ ui::{ContextMenu, Icon, IconElement, Label, ListEntry},
CloseActiveItem, NewCenterTerminal, Pane, ToolbarItemLocation, Workspace, WorkspaceId,
};
@@ -85,7 +84,7 @@ pub struct TerminalView {
has_new_content: bool,
//Currently using iTerm bell, show bell emoji in tab until input is received
has_bell: bool,
- context_menu: Option<View<ContextMenu>>,
+ context_menu: Option<View<ContextMenu<Self>>>,
blink_state: bool,
blinking_on: bool,
blinking_paused: bool,
@@ -300,11 +299,10 @@ impl TerminalView {
position: gpui::Point<Pixels>,
cx: &mut ViewContext<Self>,
) {
- self.context_menu = Some(cx.build_view(|cx| {
- ContextMenu::new(cx)
- .entry(Label::new("Clear"), Box::new(Clear))
- .entry(
- Label::new("Close"),
+ self.context_menu = Some(ContextMenu::build(cx, |menu, _| {
+ menu.action(ListEntry::new(Label::new("Clear")), Box::new(Clear))
+ .action(
+ ListEntry::new(Label::new("Close")),
Box::new(CloseActiveItem { save_intent: None }),
)
}));
@@ -756,7 +754,7 @@ impl Item for TerminalView {
let title = self.terminal().read(cx).title();
div()
- .child(img().uri("icons/terminal.svg").bg(red()))
+ .child(IconElement::new(Icon::Terminal))
.child(title)
.render()
}
@@ -4,57 +4,91 @@ use std::rc::Rc;
use crate::prelude::*;
use crate::{v_stack, Label, List, ListEntry, ListItem, ListSeparator, ListSubHeader};
use gpui::{
- overlay, px, Action, AnchorCorner, AnyElement, Bounds, Dismiss, DispatchPhase, Div,
- FocusHandle, LayoutId, ManagedView, MouseButton, MouseDownEvent, Pixels, Point, Render, View,
+ overlay, px, Action, AnchorCorner, AnyElement, AppContext, Bounds, DispatchPhase, Div,
+ EventEmitter, FocusHandle, FocusableView, LayoutId, ManagedView, Manager, MouseButton,
+ MouseDownEvent, Pixels, Point, Render, View, VisualContext, WeakView,
};
-pub struct ContextMenu {
- items: Vec<ListItem>,
+pub enum ContextMenuItem<V> {
+ Separator(ListSeparator),
+ Header(ListSubHeader),
+ Entry(
+ ListEntry<ContextMenu<V>>,
+ Rc<dyn Fn(&mut V, &mut ViewContext<V>)>,
+ ),
+}
+
+pub struct ContextMenu<V> {
+ items: Vec<ContextMenuItem<V>>,
focus_handle: FocusHandle,
+ handle: WeakView<V>,
}
-impl ManagedView for ContextMenu {
- fn focus_handle(&self, cx: &gpui::AppContext) -> FocusHandle {
+impl<V: Render> FocusableView for ContextMenu<V> {
+ fn focus_handle(&self, _cx: &AppContext) -> FocusHandle {
self.focus_handle.clone()
}
}
-impl ContextMenu {
- pub fn new(cx: &mut WindowContext) -> Self {
- Self {
- items: Default::default(),
- focus_handle: cx.focus_handle(),
- }
+impl<V: Render> EventEmitter<Manager> for ContextMenu<V> {}
+
+impl<V: Render> ContextMenu<V> {
+ pub fn build(
+ cx: &mut ViewContext<V>,
+ f: impl FnOnce(Self, &mut ViewContext<Self>) -> Self,
+ ) -> View<Self> {
+ let handle = cx.view().downgrade();
+ cx.build_view(|cx| {
+ f(
+ Self {
+ handle,
+ items: Default::default(),
+ focus_handle: cx.focus_handle(),
+ },
+ cx,
+ )
+ })
}
pub fn header(mut self, title: impl Into<SharedString>) -> Self {
- self.items.push(ListItem::Header(ListSubHeader::new(title)));
+ self.items
+ .push(ContextMenuItem::Header(ListSubHeader::new(title)));
self
}
pub fn separator(mut self) -> Self {
- self.items.push(ListItem::Separator(ListSeparator));
+ self.items.push(ContextMenuItem::Separator(ListSeparator));
self
}
- pub fn entry(mut self, label: Label, action: Box<dyn Action>) -> Self {
- self.items.push(ListEntry::new(label).action(action).into());
+ pub fn entry(
+ mut self,
+ view: ListEntry<Self>,
+ on_click: impl Fn(&mut V, &mut ViewContext<V>) + 'static,
+ ) -> Self {
+ self.items
+ .push(ContextMenuItem::Entry(view, Rc::new(on_click)));
self
}
+ pub fn action(self, view: ListEntry<Self>, action: Box<dyn Action>) -> Self {
+ // todo: add the keybindings to the list entry
+ self.entry(view, move |_, cx| cx.dispatch_action(action.boxed_clone()))
+ }
+
pub fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext<Self>) {
// todo!()
- cx.emit(Dismiss);
+ cx.emit(Manager::Dismiss);
}
pub fn cancel(&mut self, _: &menu::Cancel, cx: &mut ViewContext<Self>) {
- cx.emit(Dismiss);
+ cx.emit(Manager::Dismiss);
}
}
-impl Render for ContextMenu {
+impl<V: Render> Render for ContextMenu<V> {
type Element = Div<Self>;
- // todo!()
+
fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element {
div().elevation_2(cx).flex().flex_row().child(
v_stack()
@@ -71,7 +105,25 @@ impl Render for ContextMenu {
// .bg(cx.theme().colors().elevated_surface_background)
// .border()
// .border_color(cx.theme().colors().border)
- .child(List::new(self.items.clone())),
+ .child(List::new(
+ self.items
+ .iter()
+ .map(|item| match item {
+ ContextMenuItem::Separator(separator) => {
+ ListItem::Separator(separator.clone())
+ }
+ ContextMenuItem::Header(header) => ListItem::Header(header.clone()),
+ ContextMenuItem::Entry(entry, callback) => {
+ let callback = callback.clone();
+ let handle = self.handle.clone();
+ ListItem::Entry(entry.clone().on_click(move |this, cx| {
+ handle.update(cx, |view, cx| callback(view, cx)).ok();
+ cx.emit(Manager::Dismiss);
+ }))
+ }
+ })
+ .collect(),
+ )),
)
}
}
@@ -226,12 +278,13 @@ impl<V: 'static, M: ManagedView> Element<V> for MenuHandle<V, M> {
let new_menu = (builder)(view_state, cx);
let menu2 = menu.clone();
cx.subscribe(&new_menu, move |this, modal, e, cx| match e {
- &Dismiss => {
+ &Manager::Dismiss => {
*menu2.borrow_mut() = None;
cx.notify();
}
})
.detach();
+ cx.focus_view(&new_menu);
*menu.borrow_mut() = Some(new_menu);
*position.borrow_mut() = if attach.is_some() && child_layout_id.is_some() {
@@ -260,16 +313,25 @@ pub use stories::*;
mod stories {
use super::*;
use crate::story::Story;
- use gpui::{actions, Div, Render, VisualContext};
-
- actions!(PrintCurrentDate);
-
- fn build_menu(cx: &mut WindowContext, header: impl Into<SharedString>) -> View<ContextMenu> {
- cx.build_view(|cx| {
- ContextMenu::new(cx).header(header).separator().entry(
- Label::new("Print current time"),
- PrintCurrentDate.boxed_clone(),
- )
+ use gpui::{actions, Div, Render};
+
+ actions!(PrintCurrentDate, PrintBestFood);
+
+ fn build_menu<V: Render>(
+ cx: &mut ViewContext<V>,
+ header: impl Into<SharedString>,
+ ) -> View<ContextMenu<V>> {
+ let handle = cx.view().clone();
+ ContextMenu::build(cx, |menu, _| {
+ menu.header(header)
+ .separator()
+ .entry(ListEntry::new(Label::new("Print current time")), |v, cx| {
+ println!("dispatching PrintCurrentTime action");
+ cx.dispatch_action(PrintCurrentDate.boxed_clone())
+ })
+ .entry(ListEntry::new(Label::new("Print best food")), |v, cx| {
+ cx.dispatch_action(PrintBestFood.boxed_clone())
+ })
})
}
@@ -281,10 +343,14 @@ mod stories {
fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element {
Story::container(cx)
.on_action(|_, _: &PrintCurrentDate, _| {
+ println!("printing unix time!");
if let Ok(unix_time) = std::time::UNIX_EPOCH.elapsed() {
println!("Current Unix time is {:?}", unix_time.as_secs());
}
})
+ .on_action(|_, _: &PrintBestFood, _| {
+ println!("burrito");
+ })
.flex()
.flex_row()
.justify_between()
@@ -1,4 +1,6 @@
-use gpui::{div, Action};
+use std::rc::Rc;
+
+use gpui::{div, Div, Stateful, StatefulInteractiveComponent};
use crate::settings::user_settings;
use crate::{
@@ -172,35 +174,35 @@ pub enum ListEntrySize {
Medium,
}
-#[derive(Component, Clone)]
-pub enum ListItem {
- Entry(ListEntry),
+#[derive(Clone)]
+pub enum ListItem<V: 'static> {
+ Entry(ListEntry<V>),
Separator(ListSeparator),
Header(ListSubHeader),
}
-impl From<ListEntry> for ListItem {
- fn from(entry: ListEntry) -> Self {
+impl<V: 'static> From<ListEntry<V>> for ListItem<V> {
+ fn from(entry: ListEntry<V>) -> Self {
Self::Entry(entry)
}
}
-impl From<ListSeparator> for ListItem {
+impl<V: 'static> From<ListSeparator> for ListItem<V> {
fn from(entry: ListSeparator) -> Self {
Self::Separator(entry)
}
}
-impl From<ListSubHeader> for ListItem {
+impl<V: 'static> From<ListSubHeader> for ListItem<V> {
fn from(entry: ListSubHeader) -> Self {
Self::Header(entry)
}
}
-impl ListItem {
- fn render<V: 'static>(self, view: &mut V, cx: &mut ViewContext<V>) -> impl Component<V> {
+impl<V: 'static> ListItem<V> {
+ fn render(self, view: &mut V, ix: usize, cx: &mut ViewContext<V>) -> impl Component<V> {
match self {
- ListItem::Entry(entry) => div().child(entry.render(view, cx)),
+ ListItem::Entry(entry) => div().child(entry.render(ix, cx)),
ListItem::Separator(separator) => div().child(separator.render(view, cx)),
ListItem::Header(header) => div().child(header.render(view, cx)),
}
@@ -210,7 +212,7 @@ impl ListItem {
Self::Entry(ListEntry::new(label))
}
- pub fn as_entry(&mut self) -> Option<&mut ListEntry> {
+ pub fn as_entry(&mut self) -> Option<&mut ListEntry<V>> {
if let Self::Entry(entry) = self {
Some(entry)
} else {
@@ -219,8 +221,7 @@ impl ListItem {
}
}
-#[derive(Component)]
-pub struct ListEntry {
+pub struct ListEntry<V> {
disabled: bool,
// TODO: Reintroduce this
// disclosure_control_style: DisclosureControlVisibility,
@@ -231,15 +232,13 @@ pub struct ListEntry {
size: ListEntrySize,
toggle: Toggle,
variant: ListItemVariant,
- on_click: Option<Box<dyn Action>>,
+ on_click: Option<Rc<dyn Fn(&mut V, &mut ViewContext<V>) + 'static>>,
}
-impl Clone for ListEntry {
+impl<V> Clone for ListEntry<V> {
fn clone(&self) -> Self {
Self {
disabled: self.disabled,
- // TODO: Reintroduce this
- // disclosure_control_style: DisclosureControlVisibility,
indent_level: self.indent_level,
label: self.label.clone(),
left_slot: self.left_slot.clone(),
@@ -247,12 +246,12 @@ impl Clone for ListEntry {
size: self.size,
toggle: self.toggle,
variant: self.variant,
- on_click: self.on_click.as_ref().map(|opt| opt.boxed_clone()),
+ on_click: self.on_click.clone(),
}
}
}
-impl ListEntry {
+impl<V: 'static> ListEntry<V> {
pub fn new(label: Label) -> Self {
Self {
disabled: false,
@@ -267,8 +266,8 @@ impl ListEntry {
}
}
- pub fn action(mut self, action: impl Into<Box<dyn Action>>) -> Self {
- self.on_click = Some(action.into());
+ pub fn on_click(mut self, handler: impl Fn(&mut V, &mut ViewContext<V>) + 'static) -> Self {
+ self.on_click = Some(Rc::new(handler));
self
}
@@ -307,7 +306,7 @@ impl ListEntry {
self
}
- fn render<V: 'static>(self, _view: &mut V, cx: &mut ViewContext<V>) -> impl Component<V> {
+ fn render(self, ix: usize, cx: &mut ViewContext<V>) -> Stateful<V, Div<V>> {
let settings = user_settings(cx);
let left_content = match self.left_slot.clone() {
@@ -328,21 +327,21 @@ impl ListEntry {
ListEntrySize::Medium => div().h_7(),
};
div()
+ .id(ix)
.relative()
.hover(|mut style| {
style.background = Some(cx.theme().colors().editor_background.into());
style
})
- .on_mouse_down(gpui::MouseButton::Left, {
- let action = self.on_click.map(|action| action.boxed_clone());
+ .on_click({
+ let on_click = self.on_click.clone();
- move |entry: &mut V, event, cx| {
- if let Some(action) = action.as_ref() {
- cx.dispatch_action(action.boxed_clone());
+ move |view: &mut V, event, cx| {
+ if let Some(on_click) = &on_click {
+ (on_click)(view, cx)
}
}
})
- .group("")
.bg(cx.theme().colors().surface_background)
// TODO: Add focus state
// .when(self.state == InteractionState::Focused, |this| {
@@ -391,8 +390,8 @@ impl ListSeparator {
}
#[derive(Component)]
-pub struct List {
- items: Vec<ListItem>,
+pub struct List<V: 'static> {
+ items: Vec<ListItem<V>>,
/// Message to display when the list is empty
/// Defaults to "No items"
empty_message: SharedString,
@@ -400,8 +399,8 @@ pub struct List {
toggle: Toggle,
}
-impl List {
- pub fn new(items: Vec<ListItem>) -> Self {
+impl<V: 'static> List<V> {
+ pub fn new(items: Vec<ListItem<V>>) -> Self {
Self {
items,
empty_message: "No items".into(),
@@ -425,9 +424,14 @@ impl List {
self
}
- fn render<V: 'static>(self, _view: &mut V, cx: &mut ViewContext<V>) -> impl Component<V> {
+ fn render(self, view: &mut V, cx: &mut ViewContext<V>) -> impl Component<V> {
let list_content = match (self.items.is_empty(), self.toggle) {
- (false, _) => div().children(self.items),
+ (false, _) => div().children(
+ self.items
+ .into_iter()
+ .enumerate()
+ .map(|(ix, item)| item.render(view, ix, cx)),
+ ),
(true, Toggle::Toggled(false)) => div(),
(true, _) => {
div().child(Label::new(self.empty_message.clone()).color(TextColor::Muted))
@@ -478,7 +478,7 @@ pub fn static_new_notification_items_2<V: 'static>() -> Vec<Notification<V>> {
]
}
-pub fn static_project_panel_project_items() -> Vec<ListItem> {
+pub fn static_project_panel_project_items<V>() -> Vec<ListItem<V>> {
vec![
ListEntry::new(Label::new("zed"))
.left_icon(Icon::FolderOpen.into())
@@ -605,7 +605,7 @@ pub fn static_project_panel_project_items() -> Vec<ListItem> {
.collect()
}
-pub fn static_project_panel_single_items() -> Vec<ListItem> {
+pub fn static_project_panel_single_items<V>() -> Vec<ListItem<V>> {
vec![
ListEntry::new(Label::new("todo.md"))
.left_icon(Icon::FileDoc.into())
@@ -622,7 +622,7 @@ pub fn static_project_panel_single_items() -> Vec<ListItem> {
.collect()
}
-pub fn static_collab_panel_current_call() -> Vec<ListItem> {
+pub fn static_collab_panel_current_call<V>() -> Vec<ListItem<V>> {
vec![
ListEntry::new(Label::new("as-cii")).left_avatar("http://github.com/as-cii.png?s=50"),
ListEntry::new(Label::new("nathansobo"))
@@ -635,7 +635,7 @@ pub fn static_collab_panel_current_call() -> Vec<ListItem> {
.collect()
}
-pub fn static_collab_panel_channels() -> Vec<ListItem> {
+pub fn static_collab_panel_channels<V>() -> Vec<ListItem<V>> {
vec![
ListEntry::new(Label::new("zed"))
.left_icon(Icon::Hash.into())
@@ -202,6 +202,14 @@ impl std::fmt::Display for PathMatcher {
}
}
+impl PartialEq for PathMatcher {
+ fn eq(&self, other: &Self) -> bool {
+ self.maybe_path.eq(&other.maybe_path)
+ }
+}
+
+impl Eq for PathMatcher {}
+
impl PathMatcher {
pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
Ok(PathMatcher {
@@ -211,7 +219,19 @@ impl PathMatcher {
}
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
- other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
+ other.as_ref().starts_with(&self.maybe_path)
+ || self.glob.is_match(&other)
+ || self.check_with_end_separator(other.as_ref())
+ }
+
+ fn check_with_end_separator(&self, path: &Path) -> bool {
+ let path_str = path.to_string_lossy();
+ let separator = std::path::MAIN_SEPARATOR_STR;
+ if path_str.ends_with(separator) {
+ self.glob.is_match(path)
+ } else {
+ self.glob.is_match(path_str.to_string() + separator)
+ }
}
}
@@ -388,4 +408,14 @@ mod tests {
let path = Path::new("/a/b/c/.eslintrc.js");
assert_eq!(path.extension_or_hidden_file_name(), Some("js"));
}
+
+ #[test]
+ fn edge_of_glob() {
+ let path = Path::new("/work/node_modules");
+ let path_matcher = PathMatcher::new("**/node_modules/**").unwrap();
+ assert!(
+ path_matcher.is_match(&path),
+ "Path matcher {path_matcher} should match {path:?}"
+ );
+ }
}
@@ -8,7 +8,9 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use theme2::ActiveTheme;
-use ui::{h_stack, menu_handle, ContextMenu, IconButton, InteractionState, Tooltip};
+use ui::{
+ h_stack, menu_handle, ContextMenu, IconButton, InteractionState, Label, ListEntry, Tooltip,
+};
pub enum PanelEvent {
ChangePosition,
@@ -40,7 +42,7 @@ pub trait Panel: FocusableView + EventEmitter<PanelEvent> {
}
pub trait PanelHandle: Send + Sync {
- fn id(&self) -> EntityId;
+ fn entity_id(&self) -> EntityId;
fn persistent_name(&self) -> &'static str;
fn position(&self, cx: &WindowContext) -> DockPosition;
fn position_is_valid(&self, position: DockPosition, cx: &WindowContext) -> bool;
@@ -62,8 +64,8 @@ impl<T> PanelHandle for View<T>
where
T: Panel,
{
- fn id(&self) -> EntityId {
- self.entity_id()
+ fn entity_id(&self) -> EntityId {
+ Entity::entity_id(self)
}
fn persistent_name(&self) -> &'static str {
@@ -254,20 +256,19 @@ impl Dock {
}
}
- // todo!()
- // pub fn set_panel_zoomed(&mut self, panel: &AnyView, zoomed: bool, cx: &mut ViewContext<Self>) {
- // for entry in &mut self.panel_entries {
- // if entry.panel.as_any() == panel {
- // if zoomed != entry.panel.is_zoomed(cx) {
- // entry.panel.set_zoomed(zoomed, cx);
- // }
- // } else if entry.panel.is_zoomed(cx) {
- // entry.panel.set_zoomed(false, cx);
- // }
- // }
+ pub fn set_panel_zoomed(&mut self, panel: &AnyView, zoomed: bool, cx: &mut ViewContext<Self>) {
+ for entry in &mut self.panel_entries {
+ if entry.panel.entity_id() == panel.entity_id() {
+ if zoomed != entry.panel.is_zoomed(cx) {
+ entry.panel.set_zoomed(zoomed, cx);
+ }
+ } else if entry.panel.is_zoomed(cx) {
+ entry.panel.set_zoomed(false, cx);
+ }
+ }
- // cx.notify();
- // }
+ cx.notify();
+ }
pub fn zoom_out(&mut self, cx: &mut ViewContext<Self>) {
for entry in &mut self.panel_entries {
@@ -277,42 +278,91 @@ impl Dock {
}
}
- pub(crate) fn add_panel<T: Panel>(&mut self, panel: View<T>, cx: &mut ViewContext<Self>) {
+ pub(crate) fn add_panel<T: Panel>(
+ &mut self,
+ panel: View<T>,
+ workspace: WeakView<Workspace>,
+ cx: &mut ViewContext<Self>,
+ ) {
let subscriptions = [
cx.observe(&panel, |_, _, cx| cx.notify()),
- cx.subscribe(&panel, |this, panel, event, cx| {
- match event {
- PanelEvent::ChangePosition => {
- //todo!()
- // see: Workspace::add_panel_with_extra_event_handler
- }
- PanelEvent::ZoomIn => {
- //todo!()
- // see: Workspace::add_panel_with_extra_event_handler
- }
- PanelEvent::ZoomOut => {
- // todo!()
- // // see: Workspace::add_panel_with_extra_event_handler
- }
- PanelEvent::Activate => {
- if let Some(ix) = this
- .panel_entries
- .iter()
- .position(|entry| entry.panel.id() == panel.id())
- {
- this.set_open(true, cx);
- this.activate_panel(ix, cx);
- //` todo!()
- // cx.focus(&panel);
+ cx.subscribe(&panel, move |this, panel, event, cx| match event {
+ PanelEvent::ChangePosition => {
+ let new_position = panel.read(cx).position(cx);
+
+ let Ok(new_dock) = workspace.update(cx, |workspace, cx| {
+ if panel.is_zoomed(cx) {
+ workspace.zoomed_position = Some(new_position);
}
- }
- PanelEvent::Close => {
- if this.visible_panel().map_or(false, |p| p.id() == panel.id()) {
- this.set_open(false, cx);
+ match new_position {
+ DockPosition::Left => &workspace.left_dock,
+ DockPosition::Bottom => &workspace.bottom_dock,
+ DockPosition::Right => &workspace.right_dock,
+ }
+ .clone()
+ }) else {
+ return;
+ };
+
+ let was_visible = this.is_open()
+ && this.visible_panel().map_or(false, |active_panel| {
+ active_panel.entity_id() == Entity::entity_id(&panel)
+ });
+
+ this.remove_panel(&panel, cx);
+
+ new_dock.update(cx, |new_dock, cx| {
+ new_dock.add_panel(panel.clone(), workspace.clone(), cx);
+ if was_visible {
+ new_dock.set_open(true, cx);
+ new_dock.activate_panel(this.panels_len() - 1, cx);
}
+ });
+ }
+ PanelEvent::ZoomIn => {
+ this.set_panel_zoomed(&panel.to_any(), true, cx);
+ if !panel.has_focus(cx) {
+ cx.focus_view(&panel);
+ }
+ workspace
+ .update(cx, |workspace, cx| {
+ workspace.zoomed = Some(panel.downgrade().into());
+ workspace.zoomed_position = Some(panel.read(cx).position(cx));
+ })
+ .ok();
+ }
+ PanelEvent::ZoomOut => {
+ this.set_panel_zoomed(&panel.to_any(), false, cx);
+ workspace
+ .update(cx, |workspace, cx| {
+ if workspace.zoomed_position == Some(this.position) {
+ workspace.zoomed = None;
+ workspace.zoomed_position = None;
+ }
+ cx.notify();
+ })
+ .ok();
+ }
+ PanelEvent::Activate => {
+ if let Some(ix) = this
+ .panel_entries
+ .iter()
+ .position(|entry| entry.panel.entity_id() == Entity::entity_id(&panel))
+ {
+ this.set_open(true, cx);
+ this.activate_panel(ix, cx);
+ cx.focus_view(&panel);
+ }
+ }
+ PanelEvent::Close => {
+ if this
+ .visible_panel()
+ .map_or(false, |p| p.entity_id() == Entity::entity_id(&panel))
+ {
+ this.set_open(false, cx);
}
- PanelEvent::Focus => todo!(),
}
+ PanelEvent::Focus => todo!(),
}),
];
@@ -335,7 +385,7 @@ impl Dock {
if let Some(panel_ix) = self
.panel_entries
.iter()
- .position(|entry| entry.panel.id() == panel.id())
+ .position(|entry| entry.panel.entity_id() == Entity::entity_id(panel))
{
if panel_ix == self.active_panel_index {
self.active_panel_index = 0;
@@ -396,7 +446,7 @@ impl Dock {
pub fn panel_size(&self, panel: &dyn PanelHandle, cx: &WindowContext) -> Option<f32> {
self.panel_entries
.iter()
- .find(|entry| entry.panel.id() == panel.id())
+ .find(|entry| entry.panel.entity_id() == panel.entity_id())
.map(|entry| entry.panel.size(cx))
}
@@ -620,6 +670,7 @@ impl Render for PanelButtons {
let dock = self.dock.read(cx);
let active_index = dock.active_panel_index;
let is_open = dock.is_open;
+ let dock_position = dock.position;
let (menu_anchor, menu_attach) = match dock.position {
DockPosition::Left => (AnchorCorner::BottomLeft, AnchorCorner::TopLeft),
@@ -632,9 +683,10 @@ impl Render for PanelButtons {
.panel_entries
.iter()
.enumerate()
- .filter_map(|(i, panel)| {
- let icon = panel.panel.icon(cx)?;
- let name = panel.panel.persistent_name();
+ .filter_map(|(i, entry)| {
+ let icon = entry.panel.icon(cx)?;
+ let name = entry.panel.persistent_name();
+ let panel = entry.panel.clone();
let mut button: IconButton<Self> = if i == active_index && is_open {
let action = dock.toggle_action();
@@ -645,7 +697,7 @@ impl Render for PanelButtons {
.action(action.boxed_clone())
.tooltip(move |_, cx| Tooltip::for_action(tooltip.clone(), &*action, cx))
} else {
- let action = panel.panel.toggle_action(cx);
+ let action = entry.panel.toggle_action(cx);
IconButton::new(name, icon)
.action(action.boxed_clone())
@@ -656,7 +708,30 @@ impl Render for PanelButtons {
menu_handle()
.id(name)
.menu(move |_, cx| {
- cx.build_view(|cx| ContextMenu::new(cx).header("SECTION"))
+ const POSITIONS: [DockPosition; 3] = [
+ DockPosition::Left,
+ DockPosition::Right,
+ DockPosition::Bottom,
+ ];
+ ContextMenu::build(cx, |mut menu, cx| {
+ for position in POSITIONS {
+ if position != dock_position
+ && panel.position_is_valid(position, cx)
+ {
+ let panel = panel.clone();
+ menu = menu.entry(
+ ListEntry::new(Label::new(format!(
+ "Dock {}",
+ position.to_label()
+ ))),
+ move |_, cx| {
+ panel.set_position(position, cx);
+ },
+ )
+ }
+ }
+ menu
+ })
})
.anchor(menu_anchor)
.attach(menu_attach)
@@ -813,7 +813,9 @@ impl Workspace {
DockPosition::Right => &self.right_dock,
};
- dock.update(cx, |dock, cx| dock.add_panel(panel, cx));
+ dock.update(cx, |dock, cx| {
+ dock.add_panel(panel, self.weak_self.clone(), cx)
+ });
}
pub fn status_bar(&self) -> &View<StatusBar> {