From 6680e1e9feda6d7cc1bd1bc2c8745515c7917d3a Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 17 Nov 2023 11:46:25 +0200 Subject: [PATCH 01/75] Create new Zed release channel: nightly --- .github/workflows/ci.yml | 6 +++- crates/auto_update/src/auto_update.rs | 41 ++++++++++++++------------- crates/client/src/client.rs | 24 ++++++++++------ crates/client/src/telemetry.rs | 2 +- crates/client2/src/client2.rs | 22 ++++++++------ crates/client2/src/telemetry.rs | 2 +- crates/util/src/channel.rs | 19 +++++++++++++ crates/zed/Cargo.toml | 10 ++++++- crates/zed/src/only_instance.rs | 2 ++ crates/zed2/Cargo.toml | 9 ++++++ crates/zed2/src/only_instance.rs | 2 ++ script/bump-zed-minor-versions | 5 ++-- script/bump-zed-patch-version | 5 +++- script/deploy | 1 + script/deploy-migration | 3 +- script/what-is-deployed | 1 + 16 files changed, 111 insertions(+), 43 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 60dc4c1f528a34ae2e74fc237650ab7a21516fb8..39b49da8ed038e388392ba289ad4ef4cd95a5af4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -130,6 +130,8 @@ jobs: expected_tag_name="v${version}";; preview) expected_tag_name="v${version}-pre";; + nightly) + expected_tag_name="v${version}-nightly";; *) echo "can't publish a release on channel ${channel}" exit 1;; @@ -154,7 +156,9 @@ jobs: - uses: softprops/action-gh-release@v1 name: Upload app bundle to release - if: ${{ env.RELEASE_CHANNEL }} + # TODO kb seems that zed.dev relies on GitHub releases for release version tracking. + # Find alternatives for `nightly` or just go on with more releases? + if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }} with: draft: true prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }} diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 0d537b882a85fe5e7ce54f1270c8d7b28de1f9c4..36690bcd2555ce9497b85ee6fd7f0396eef7c08b 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -118,14 +118,20 @@ fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) { let auto_updater = auto_updater.read(cx); let server_url = &auto_updater.server_url; let current_version = auto_updater.current_version; - let latest_release_url = if cx.has_global::() - && *cx.global::() == ReleaseChannel::Preview - { - format!("{server_url}/releases/preview/{current_version}") - } else { - format!("{server_url}/releases/stable/{current_version}") - }; - cx.platform().open_url(&latest_release_url); + if cx.has_global::() { + match cx.global::() { + ReleaseChannel::Dev => {} + ReleaseChannel::Nightly => cx + .platform() + .open_url(&format!("{server_url}/releases/nightly/{current_version}")), + ReleaseChannel::Preview => cx + .platform() + .open_url(&format!("{server_url}/releases/preview/{current_version}")), + ReleaseChannel::Stable => cx + .platform() + .open_url(&format!("{server_url}/releases/stable/{current_version}")), + } + } } } @@ -224,22 +230,19 @@ impl AutoUpdater { ) }); - let preview_param = cx.read(|cx| { + let mut url_string = format!( + "{server_url}/api/releases/latest?token={ZED_SECRET_CLIENT_TOKEN}&asset=Zed.dmg" + ); + cx.read(|cx| { if cx.has_global::() { - if *cx.global::() == ReleaseChannel::Preview { - return "&preview=1"; + if let Some(param) = cx.global::().release_query_param() { + url_string += "&"; + url_string += param; } } - "" }); - let mut response = client - .get( - &format!("{server_url}/api/releases/latest?token={ZED_SECRET_CLIENT_TOKEN}&asset=Zed.dmg{preview_param}"), - Default::default(), - true, - ) - .await?; + let mut response = client.get(&url_string, Default::default(), true).await?; let mut body = Vec::new(); response diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 9f63d0e2bed327fd306692a0c28952ea18c854fd..a14088cc50066283771b50bd3a33d92f750ab8fc 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -987,9 +987,17 @@ impl Client { self.establish_websocket_connection(credentials, cx) } - async fn get_rpc_url(http: Arc, is_preview: bool) -> Result { - let preview_param = if is_preview { "?preview=1" } else { "" }; - let url = format!("{}/rpc{preview_param}", *ZED_SERVER_URL); + async fn get_rpc_url( + http: Arc, + release_channel: Option, + ) -> Result { + let mut url = format!("{}/rpc", *ZED_SERVER_URL); + if let Some(preview_param) = + release_channel.and_then(|channel| channel.release_query_param()) + { + url += "?"; + url += preview_param; + } let response = http.get(&url, Default::default(), false).await?; // Normally, ZED_SERVER_URL is set to the URL of zed.dev website. @@ -1024,11 +1032,11 @@ impl Client { credentials: &Credentials, cx: &AsyncAppContext, ) -> Task> { - let use_preview_server = cx.read(|cx| { + let release_channel = cx.read(|cx| { if cx.has_global::() { - *cx.global::() != ReleaseChannel::Stable + Some(*cx.global::()) } else { - false + None } }); @@ -1041,7 +1049,7 @@ impl Client { let http = self.http.clone(); cx.background().spawn(async move { - let mut rpc_url = Self::get_rpc_url(http, use_preview_server).await?; + let mut rpc_url = Self::get_rpc_url(http, release_channel).await?; let rpc_host = rpc_url .host_str() .zip(rpc_url.port_or_known_default()) @@ -1191,7 +1199,7 @@ impl Client { // Use the collab server's admin API to retrieve the id // of the impersonated user. - let mut url = Self::get_rpc_url(http.clone(), false).await?; + let mut url = Self::get_rpc_url(http.clone(), None).await?; url.set_path("/user"); url.set_query(Some(&format!("github_login={login}"))); let request = Request::get(url.as_str()) diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index fd93aaeec890437968d50854ca83534c62ed4d36..ad2b29c3887731de0d5e477dc344154eba54d0d1 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -20,7 +20,7 @@ pub struct Telemetry { #[derive(Default)] struct TelemetryState { metrics_id: Option>, // Per logged-in user - installation_id: Option>, // Per app installation (different for dev, preview, and stable) + installation_id: Option>, // Per app installation (different for dev, nightly, preview, and stable) session_id: Option>, // Per app launch app_version: Option>, release_channel: Option<&'static str>, diff --git a/crates/client2/src/client2.rs b/crates/client2/src/client2.rs index 93ec7f329bfab51f11d3689904cbc4d5edb62071..b4279b023ecd7412d8ea0c4a69ddc0215be97fb2 100644 --- a/crates/client2/src/client2.rs +++ b/crates/client2/src/client2.rs @@ -923,9 +923,17 @@ impl Client { self.establish_websocket_connection(credentials, cx) } - async fn get_rpc_url(http: Arc, is_preview: bool) -> Result { - let preview_param = if is_preview { "?preview=1" } else { "" }; - let url = format!("{}/rpc{preview_param}", *ZED_SERVER_URL); + async fn get_rpc_url( + http: Arc, + release_channel: Option, + ) -> Result { + let mut url = format!("{}/rpc", *ZED_SERVER_URL); + if let Some(preview_param) = + release_channel.and_then(|channel| channel.release_query_param()) + { + url += "?"; + url += preview_param; + } let response = http.get(&url, Default::default(), false).await?; // Normally, ZED_SERVER_URL is set to the URL of zed.dev website. @@ -960,9 +968,7 @@ impl Client { credentials: &Credentials, cx: &AsyncAppContext, ) -> Task> { - let use_preview_server = cx - .try_read_global(|channel: &ReleaseChannel, _| *channel != ReleaseChannel::Stable) - .unwrap_or(false); + let release_channel = cx.try_read_global(|channel: &ReleaseChannel, _| *channel); let request = Request::builder() .header( @@ -973,7 +979,7 @@ impl Client { let http = self.http.clone(); cx.background_executor().spawn(async move { - let mut rpc_url = Self::get_rpc_url(http, use_preview_server).await?; + let mut rpc_url = Self::get_rpc_url(http, release_channel).await?; let rpc_host = rpc_url .host_str() .zip(rpc_url.port_or_known_default()) @@ -1120,7 +1126,7 @@ impl Client { // Use the collab server's admin API to retrieve the id // of the impersonated user. - let mut url = Self::get_rpc_url(http.clone(), false).await?; + let mut url = Self::get_rpc_url(http.clone(), None).await?; url.set_path("/user"); url.set_query(Some(&format!("github_login={login}"))); let request = Request::get(url.as_str()) diff --git a/crates/client2/src/telemetry.rs b/crates/client2/src/telemetry.rs index 3723f7b906a03f881040c4a351b3ef55b23749dd..cf5b3b765bcade42878be3746c63f827e611d160 100644 --- a/crates/client2/src/telemetry.rs +++ b/crates/client2/src/telemetry.rs @@ -20,7 +20,7 @@ pub struct Telemetry { struct TelemetryState { metrics_id: Option>, // Per logged-in user - installation_id: Option>, // Per app installation (different for dev, preview, and stable) + installation_id: Option>, // Per app installation (different for dev, nightly, preview, and stable) session_id: Option>, // Per app launch release_channel: Option<&'static str>, app_metadata: AppMetadata, diff --git a/crates/util/src/channel.rs b/crates/util/src/channel.rs index 47c6a570a1f84c4f0fb16c24df650082d5aee60e..fa94bfe1ecfa185f442042090c76d5dbb1684404 100644 --- a/crates/util/src/channel.rs +++ b/crates/util/src/channel.rs @@ -11,6 +11,7 @@ lazy_static! { }; pub static ref RELEASE_CHANNEL: ReleaseChannel = match RELEASE_CHANNEL_NAME.as_str() { "dev" => ReleaseChannel::Dev, + "nightly" => ReleaseChannel::Nightly, "preview" => ReleaseChannel::Preview, "stable" => ReleaseChannel::Stable, _ => panic!("invalid release channel {}", *RELEASE_CHANNEL_NAME), @@ -21,6 +22,7 @@ lazy_static! { pub enum ReleaseChannel { #[default] Dev, + Nightly, Preview, Stable, } @@ -29,6 +31,7 @@ impl ReleaseChannel { pub fn display_name(&self) -> &'static str { match self { ReleaseChannel::Dev => "Zed Dev", + ReleaseChannel::Nightly => "Zed Nightly", ReleaseChannel::Preview => "Zed Preview", ReleaseChannel::Stable => "Zed", } @@ -37,6 +40,8 @@ impl ReleaseChannel { pub fn dev_name(&self) -> &'static str { match self { ReleaseChannel::Dev => "dev", + // TODO kb need to add DB data + ReleaseChannel::Nightly => "nightly", ReleaseChannel::Preview => "preview", ReleaseChannel::Stable => "stable", } @@ -45,6 +50,7 @@ impl ReleaseChannel { pub fn url_scheme(&self) -> &'static str { match self { ReleaseChannel::Dev => "zed-dev://", + ReleaseChannel::Nightly => "zed-nightly://", ReleaseChannel::Preview => "zed-preview://", ReleaseChannel::Stable => "zed://", } @@ -53,15 +59,28 @@ impl ReleaseChannel { pub fn link_prefix(&self) -> &'static str { match self { ReleaseChannel::Dev => "https://zed.dev/dev/", + // TODO kb need to add server handling + ReleaseChannel::Nightly => "https://zed.dev/nightly/", ReleaseChannel::Preview => "https://zed.dev/preview/", ReleaseChannel::Stable => "https://zed.dev/", } } + + pub fn release_query_param(&self) -> Option<&'static str> { + match self { + Self::Dev => None, + // TODO kb need to add server handling + Self::Nightly => Some("nightly=1"), + Self::Preview => Some("preview=1"), + Self::Stable => None, + } + } } pub fn parse_zed_link(link: &str) -> Option<&str> { for release in [ ReleaseChannel::Dev, + ReleaseChannel::Nightly, ReleaseChannel::Preview, ReleaseChannel::Stable, ] { diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 028653696ac7a13d979044a36277711413b1c874..ab8d5b7efeb742da1fd6591833526246425b8579 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -170,6 +170,15 @@ osx_minimum_system_version = "10.15.7" osx_info_plist_exts = ["resources/info/*"] osx_url_schemes = ["zed-dev"] +[package.metadata.bundle-nightly] +# TODO kb different icon? +icon = ["resources/app-icon-preview@2x.png", "resources/app-icon-preview.png"] +identifier = "dev.zed.Zed-Nightly" +name = "Zed Nightly" +osx_minimum_system_version = "10.15.7" +osx_info_plist_exts = ["resources/info/*"] +osx_url_schemes = ["zed-nightly"] + [package.metadata.bundle-preview] icon = ["resources/app-icon-preview@2x.png", "resources/app-icon-preview.png"] identifier = "dev.zed.Zed-Preview" @@ -178,7 +187,6 @@ osx_minimum_system_version = "10.15.7" osx_info_plist_exts = ["resources/info/*"] osx_url_schemes = ["zed-preview"] - [package.metadata.bundle-stable] icon = ["resources/app-icon@2x.png", "resources/app-icon.png"] identifier = "dev.zed.Zed" diff --git a/crates/zed/src/only_instance.rs b/crates/zed/src/only_instance.rs index a8c4b30816dfbbed4ae5c186d2d0e43722245293..85dbd3684ac42303401558fa9cfc699f5758fcdc 100644 --- a/crates/zed/src/only_instance.rs +++ b/crates/zed/src/only_instance.rs @@ -17,6 +17,7 @@ fn address() -> SocketAddr { ReleaseChannel::Dev => 43737, ReleaseChannel::Preview => 43738, ReleaseChannel::Stable => 43739, + ReleaseChannel::Nightly => 43740, }; SocketAddr::V4(SocketAddrV4::new(LOCALHOST, port)) @@ -25,6 +26,7 @@ fn address() -> SocketAddr { fn instance_handshake() -> &'static str { match *util::channel::RELEASE_CHANNEL { ReleaseChannel::Dev => "Zed Editor Dev Instance Running", + ReleaseChannel::Nightly => "Zed Editor Nightly Instance Running", ReleaseChannel::Preview => "Zed Editor Preview Instance Running", ReleaseChannel::Stable => "Zed Editor Stable Instance Running", } diff --git a/crates/zed2/Cargo.toml b/crates/zed2/Cargo.toml index aacaedca882cef47bfcb33c563bbfd31a093842d..c82f1eef5d619c2eec7c784d4fd7e131720dca70 100644 --- a/crates/zed2/Cargo.toml +++ b/crates/zed2/Cargo.toml @@ -166,6 +166,15 @@ osx_minimum_system_version = "10.15.7" osx_info_plist_exts = ["resources/info/*"] osx_url_schemes = ["zed-dev"] +[package.metadata.bundle-nightly] +# TODO kb different icon? +icon = ["resources/app-icon-preview@2x.png", "resources/app-icon-preview.png"] +identifier = "dev.zed.Zed-Nightly" +name = "Zed Nightly" +osx_minimum_system_version = "10.15.7" +osx_info_plist_exts = ["resources/info/*"] +osx_url_schemes = ["zed-nightly"] + [package.metadata.bundle-preview] icon = ["resources/app-icon-preview@2x.png", "resources/app-icon-preview.png"] identifier = "dev.zed.Zed-Preview" diff --git a/crates/zed2/src/only_instance.rs b/crates/zed2/src/only_instance.rs index a8c4b30816dfbbed4ae5c186d2d0e43722245293..85dbd3684ac42303401558fa9cfc699f5758fcdc 100644 --- a/crates/zed2/src/only_instance.rs +++ b/crates/zed2/src/only_instance.rs @@ -17,6 +17,7 @@ fn address() -> SocketAddr { ReleaseChannel::Dev => 43737, ReleaseChannel::Preview => 43738, ReleaseChannel::Stable => 43739, + ReleaseChannel::Nightly => 43740, }; SocketAddr::V4(SocketAddrV4::new(LOCALHOST, port)) @@ -25,6 +26,7 @@ fn address() -> SocketAddr { fn instance_handshake() -> &'static str { match *util::channel::RELEASE_CHANNEL { ReleaseChannel::Dev => "Zed Editor Dev Instance Running", + ReleaseChannel::Nightly => "Zed Editor Nightly Instance Running", ReleaseChannel::Preview => "Zed Editor Preview Instance Running", ReleaseChannel::Stable => "Zed Editor Stable Instance Running", } diff --git a/script/bump-zed-minor-versions b/script/bump-zed-minor-versions index 8dcf7e334e30dbd06f793acd302b2f9242815739..9e03d8a70c937796f0b6d23757c918dcd12cfbc4 100755 --- a/script/bump-zed-minor-versions +++ b/script/bump-zed-minor-versions @@ -43,8 +43,8 @@ if [[ $patch != 0 ]]; then echo "patch version on main should be zero" exit 1 fi -if [[ $(cat crates/zed/RELEASE_CHANNEL) != dev ]]; then - echo "release channel on main should be dev" +if [[ $(cat crates/zed/RELEASE_CHANNEL) != dev && $(cat crates/zed/RELEASE_CHANNEL) != nightly ]]; then + echo "release channel on main should be dev or nightly" exit 1 fi if git show-ref --quiet refs/tags/${preview_tag_name}; then @@ -59,6 +59,7 @@ if ! git show-ref --quiet refs/heads/${prev_minor_branch_name}; then echo "previous branch ${minor_branch_name} doesn't exist" exit 1 fi +# TODO kb anything else for RELEASE_CHANNEL == nightly needs to be done below? if [[ $(git show ${prev_minor_branch_name}:crates/zed/RELEASE_CHANNEL) != preview ]]; then echo "release channel on branch ${prev_minor_branch_name} should be preview" exit 1 diff --git a/script/bump-zed-patch-version b/script/bump-zed-patch-version index de0c0f7d661267429e20eb684e4eb6234f957443..e00e747aa32dacaf94fbd91e0aa0dd8a7446be11 100755 --- a/script/bump-zed-patch-version +++ b/script/bump-zed-patch-version @@ -9,8 +9,11 @@ case $channel in preview) tag_suffix="-pre" ;; + nightly) + tag_suffix="-nightly" + ;; *) - echo "this must be run on a stable or preview release branch" >&2 + echo "this must be run on either of stable|preview|nightly release branches" >&2 exit 1 ;; esac diff --git a/script/deploy b/script/deploy index d32d38733932d0c7e64eb6f315c6d8dfda5cfc0e..fcb2db4e29bbe632e932dadfcc309025c9a53eaf 100755 --- a/script/deploy +++ b/script/deploy @@ -4,6 +4,7 @@ set -eu source script/lib/deploy-helpers.sh if [[ $# < 2 ]]; then + # TODO kb nightly deploy? echo "Usage: $0 " exit 1 fi diff --git a/script/deploy-migration b/script/deploy-migration index a6b1574c044ef96f7c20e02e429da22eb9755806..4ea36993fe5c40e1761f8c8e41d794bac81de820 100755 --- a/script/deploy-migration +++ b/script/deploy-migration @@ -4,6 +4,7 @@ set -eu source script/lib/deploy-helpers.sh if [[ $# < 2 ]]; then + # TODO kb nightly migrations? echo "Usage: $0 " exit 1 fi @@ -23,4 +24,4 @@ envsubst < crates/collab/k8s/migrate.template.yml | kubectl apply -f - pod=$(kubectl --namespace=${environment} get pods --selector=job-name=${ZED_MIGRATE_JOB_NAME} --output=jsonpath='{.items[0].metadata.name}') echo "Job pod:" $pod -kubectl --namespace=${environment} logs -f ${pod} \ No newline at end of file +kubectl --namespace=${environment} logs -f ${pod} diff --git a/script/what-is-deployed b/script/what-is-deployed index f97e216f4a1256e697498a7db66068e3df2855f3..6d18edbd311381d9a5ef30b1636e44269c478e7b 100755 --- a/script/what-is-deployed +++ b/script/what-is-deployed @@ -4,6 +4,7 @@ set -eu source script/lib/deploy-helpers.sh if [[ $# < 1 ]]; then + # TODO kb infra for nightly? echo "Usage: $0 " exit 1 fi From 55548ca61ac741458eaf0f022a79bb91a15ff346 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 17 Nov 2023 09:14:31 -0800 Subject: [PATCH 02/75] Fix underspecified assertions in new tests for file reload race --- crates/project2/src/project_tests.rs | 62 +++++++++++++++++++++++++--- 1 file changed, 57 insertions(+), 5 deletions(-) diff --git a/crates/project2/src/project_tests.rs b/crates/project2/src/project_tests.rs index 9eb9a49e49e2930ba1b133b27b8f8fcf799a261e..81a4def456a202152865420f770f3e6cbfd7124e 100644 --- a/crates/project2/src/project_tests.rs +++ b/crates/project2/src/project_tests.rs @@ -2633,6 +2633,60 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) .unwrap(); worktree.next_event(cx); + cx.executor().run_until_parked(); + let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap(); + buffer.read_with(cx, |buffer, _| { + assert_eq!(buffer.text(), on_disk_text); + assert!(!buffer.is_dirty(), "buffer should not be dirty"); + assert!(!buffer.has_conflict(), "buffer should not be dirty"); + }); +} + +#[gpui::test(iterations = 30)] +async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/dir", + json!({ + "file1": "the original contents", + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await; + let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap()); + let buffer = project + .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx)) + .await + .unwrap(); + + // Simulate buffer diffs being slow, so that they don't complete before + // the next file change occurs. + cx.executor().deprioritize(*language::BUFFER_DIFF_TASK); + + // Change the buffer's file on disk, and then wait for the file change + // to be detected by the worktree, so that the buffer starts reloading. + fs.save( + "/dir/file1".as_ref(), + &"the first contents".into(), + Default::default(), + ) + .await + .unwrap(); + worktree.next_event(cx); + + cx.executor() + .spawn(cx.executor().simulate_random_delay()) + .await; + + // Perform a noop edit, causing the buffer's version to increase. + buffer.update(cx, |buffer, cx| { + buffer.edit([(0..0, " ")], None, cx); + buffer.undo(cx); + }); + cx.executor().run_until_parked(); let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap(); buffer.read_with(cx, |buffer, _| { @@ -2646,10 +2700,8 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) // If the file change occurred while the buffer was processing the first // change, the buffer will be in a conflicting state. else { - assert!( - buffer.is_dirty() && buffer.has_conflict(), - "buffer should report that it has a conflict. text: {buffer_text:?}, disk text: {on_disk_text:?}" - ); + assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}"); + assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}"); } }); } @@ -4084,7 +4136,7 @@ async fn search( fn init_test(cx: &mut gpui::TestAppContext) { if std::env::var("RUST_LOG").is_ok() { - env_logger::init(); + env_logger::try_init().ok(); } cx.update(|cx| { From 01d9d53f4a66109532fe53fe5731f743dd05e57e Mon Sep 17 00:00:00 2001 From: Mikayla Date: Fri, 17 Nov 2023 09:51:11 -0800 Subject: [PATCH 03/75] Adjust the type arrangement on ManagedViews --- .../command_palette2/src/command_palette.rs | 11 +++++--- crates/file_finder2/src/file_finder.rs | 15 ++++++----- crates/go_to_line2/src/go_to_line.rs | 16 ++++++----- crates/gpui2/src/window.rs | 13 +++------ crates/ui2/src/components/context_menu.rs | 27 ++++++++++--------- crates/workspace2/src/modal_layer.rs | 6 ++--- crates/workspace2/src/workspace2.rs | 12 ++++----- 7 files changed, 53 insertions(+), 47 deletions(-) diff --git a/crates/command_palette2/src/command_palette.rs b/crates/command_palette2/src/command_palette.rs index 9463cab68ca1b76984f372573e1271cb6fac76fc..ddb54e1505dc3e4e84b64edb082b75e06250e132 100644 --- a/crates/command_palette2/src/command_palette.rs +++ b/crates/command_palette2/src/command_palette.rs @@ -1,8 +1,9 @@ use collections::{CommandPaletteFilter, HashMap}; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{ - actions, div, prelude::*, Action, AppContext, Component, Dismiss, Div, FocusHandle, Keystroke, - ManagedView, ParentComponent, Render, Styled, View, ViewContext, VisualContext, WeakView, + actions, div, prelude::*, Action, AppContext, Component, Div, EventEmitter, FocusHandle, + FocusableView, Keystroke, ManagedView, ParentComponent, Render, Styled, View, ViewContext, + VisualContext, WeakView, }; use picker::{Picker, PickerDelegate}; use std::{ @@ -68,7 +69,9 @@ impl CommandPalette { } } -impl ManagedView for CommandPalette { +impl EventEmitter for CommandPalette {} + +impl FocusableView for CommandPalette { fn focus_handle(&self, cx: &AppContext) -> FocusHandle { self.picker.focus_handle(cx) } @@ -265,7 +268,7 @@ impl PickerDelegate for CommandPaletteDelegate { fn dismissed(&mut self, cx: &mut ViewContext>) { self.command_palette - .update(cx, |_, cx| cx.emit(Dismiss)) + .update(cx, |_, cx| cx.emit(ManagedView::Dismiss)) .log_err(); } diff --git a/crates/file_finder2/src/file_finder.rs b/crates/file_finder2/src/file_finder.rs index 0fee5102e6d0314d134848eb3abf5697d71003d5..5312152f9f9be80108347b9c872ad5aa2b24f71f 100644 --- a/crates/file_finder2/src/file_finder.rs +++ b/crates/file_finder2/src/file_finder.rs @@ -2,9 +2,9 @@ use collections::HashMap; use editor::{scroll::autoscroll::Autoscroll, Bias, Editor}; use fuzzy::{CharBag, PathMatch, PathMatchCandidate}; use gpui::{ - actions, div, AppContext, Component, Dismiss, Div, FocusHandle, InteractiveComponent, - ManagedView, Model, ParentComponent, Render, Styled, Task, View, ViewContext, VisualContext, - WeakView, + actions, div, AppContext, Component, Div, EventEmitter, FocusHandle, FocusableView, + InteractiveComponent, ManagedView, Model, ParentComponent, Render, Styled, Task, View, + ViewContext, VisualContext, WeakView, }; use picker::{Picker, PickerDelegate}; use project::{PathMatchCandidateSet, Project, ProjectPath, WorktreeId}; @@ -111,7 +111,8 @@ impl FileFinder { } } -impl ManagedView for FileFinder { +impl EventEmitter for FileFinder {} +impl FocusableView for FileFinder { fn focus_handle(&self, cx: &AppContext) -> FocusHandle { self.picker.focus_handle(cx) } @@ -688,7 +689,9 @@ impl PickerDelegate for FileFinderDelegate { .log_err(); } } - finder.update(&mut cx, |_, cx| cx.emit(Dismiss)).ok()?; + finder + .update(&mut cx, |_, cx| cx.emit(ManagedView::Dismiss)) + .ok()?; Some(()) }) @@ -699,7 +702,7 @@ impl PickerDelegate for FileFinderDelegate { fn dismissed(&mut self, cx: &mut ViewContext>) { self.file_finder - .update(cx, |_, cx| cx.emit(Dismiss)) + .update(cx, |_, cx| cx.emit(ManagedView::Dismiss)) .log_err(); } diff --git a/crates/go_to_line2/src/go_to_line.rs b/crates/go_to_line2/src/go_to_line.rs index 565afb5e939f01225341ae84e1628ead5daf5cbd..04c3f744736261c2c174b0611103f566f5a427d4 100644 --- a/crates/go_to_line2/src/go_to_line.rs +++ b/crates/go_to_line2/src/go_to_line.rs @@ -1,7 +1,8 @@ use editor::{display_map::ToDisplayPoint, scroll::autoscroll::Autoscroll, Editor}; use gpui::{ - actions, div, prelude::*, AppContext, Dismiss, Div, FocusHandle, ManagedView, ParentComponent, - Render, SharedString, Styled, Subscription, View, ViewContext, VisualContext, WindowContext, + actions, div, prelude::*, AppContext, Div, EventEmitter, FocusHandle, FocusableView, + ManagedView, ParentComponent, Render, SharedString, Styled, Subscription, View, ViewContext, + VisualContext, WindowContext, }; use text::{Bias, Point}; use theme::ActiveTheme; @@ -23,11 +24,12 @@ pub struct GoToLine { _subscriptions: Vec, } -impl ManagedView for GoToLine { +impl FocusableView for GoToLine { fn focus_handle(&self, cx: &AppContext) -> FocusHandle { - self.line_editor.focus_handle(cx) + self.active_editor.focus_handle(cx) } } +impl EventEmitter for GoToLine {} impl GoToLine { fn register(workspace: &mut Workspace, _: &mut ViewContext) { @@ -87,7 +89,7 @@ impl GoToLine { ) { match event { // todo!() this isn't working... - editor::Event::Blurred => cx.emit(Dismiss), + editor::Event::Blurred => cx.emit(ManagedView::Dismiss), editor::Event::BufferEdited { .. } => self.highlight_current_line(cx), _ => {} } @@ -122,7 +124,7 @@ impl GoToLine { } fn cancel(&mut self, _: &menu::Cancel, cx: &mut ViewContext) { - cx.emit(Dismiss); + cx.emit(ManagedView::Dismiss); } fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext) { @@ -139,7 +141,7 @@ impl GoToLine { self.prev_scroll_position.take(); } - cx.emit(Dismiss); + cx.emit(ManagedView::Dismiss); } } diff --git a/crates/gpui2/src/window.rs b/crates/gpui2/src/window.rs index 6d07f06d9441b838828f7cf15ab0c2a6da72ff4e..5234049943d1034f4c6526bff4ef8dabe32ae6a7 100644 --- a/crates/gpui2/src/window.rs +++ b/crates/gpui2/src/window.rs @@ -193,17 +193,12 @@ pub trait FocusableView: Render { /// ManagedView is a view (like a Modal, Popover, Menu, etc.) /// where the lifecycle of the view is handled by another view. -pub trait ManagedView: Render { - fn focus_handle(&self, cx: &AppContext) -> FocusHandle; -} +pub trait Managed: FocusableView + EventEmitter {} -pub struct Dismiss; -impl EventEmitter for T {} +impl> Managed for M {} -impl FocusableView for T { - fn focus_handle(&self, cx: &AppContext) -> FocusHandle { - self.focus_handle(cx) - } +pub enum ManagedView { + Dismiss, } // Holds the state for a specific window. diff --git a/crates/ui2/src/components/context_menu.rs b/crates/ui2/src/components/context_menu.rs index d3214cbff1b31d7fda3c8fe80a108a55490f2119..c6a0d9689a53b8b14cbb3d5c6f1fb6c054fd2176 100644 --- a/crates/ui2/src/components/context_menu.rs +++ b/crates/ui2/src/components/context_menu.rs @@ -4,8 +4,9 @@ use std::rc::Rc; use crate::prelude::*; use crate::{v_stack, Label, List, ListEntry, ListItem, ListSeparator, ListSubHeader}; use gpui::{ - overlay, px, Action, AnchorCorner, AnyElement, Bounds, Dismiss, DispatchPhase, Div, - FocusHandle, LayoutId, ManagedView, MouseButton, MouseDownEvent, Pixels, Point, Render, View, + overlay, px, Action, AnchorCorner, AnyElement, AppContext, Bounds, DispatchPhase, Div, + EventEmitter, FocusHandle, FocusableView, LayoutId, Managed, ManagedView, MouseButton, + MouseDownEvent, Pixels, Point, Render, View, }; pub struct ContextMenu { @@ -13,12 +14,14 @@ pub struct ContextMenu { focus_handle: FocusHandle, } -impl ManagedView for ContextMenu { - fn focus_handle(&self, cx: &gpui::AppContext) -> FocusHandle { +impl FocusableView for ContextMenu { + fn focus_handle(&self, _cx: &AppContext) -> FocusHandle { self.focus_handle.clone() } } +impl EventEmitter for ContextMenu {} + impl ContextMenu { pub fn new(cx: &mut WindowContext) -> Self { Self { @@ -44,11 +47,11 @@ impl ContextMenu { pub fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext) { // todo!() - cx.emit(Dismiss); + cx.emit(ManagedView::Dismiss); } pub fn cancel(&mut self, _: &menu::Cancel, cx: &mut ViewContext) { - cx.emit(Dismiss); + cx.emit(ManagedView::Dismiss); } } @@ -76,7 +79,7 @@ impl Render for ContextMenu { } } -pub struct MenuHandle { +pub struct MenuHandle { id: Option, child_builder: Option AnyElement + 'static>>, menu_builder: Option) -> View + 'static>>, @@ -85,7 +88,7 @@ pub struct MenuHandle { attach: Option, } -impl MenuHandle { +impl MenuHandle { pub fn id(mut self, id: impl Into) -> Self { self.id = Some(id.into()); self @@ -115,7 +118,7 @@ impl MenuHandle { } } -pub fn menu_handle() -> MenuHandle { +pub fn menu_handle() -> MenuHandle { MenuHandle { id: None, child_builder: None, @@ -132,7 +135,7 @@ pub struct MenuHandleState { child_element: Option>, menu_element: Option>, } -impl Element for MenuHandle { +impl Element for MenuHandle { type ElementState = MenuHandleState; fn element_id(&self) -> Option { @@ -226,7 +229,7 @@ impl Element for MenuHandle { let new_menu = (builder)(view_state, cx); let menu2 = menu.clone(); cx.subscribe(&new_menu, move |this, modal, e, cx| match e { - &Dismiss => { + &ManagedView::Dismiss => { *menu2.borrow_mut() = None; cx.notify(); } @@ -247,7 +250,7 @@ impl Element for MenuHandle { } } -impl Component for MenuHandle { +impl Component for MenuHandle { fn render(self) -> AnyElement { AnyElement::new(self) } diff --git a/crates/workspace2/src/modal_layer.rs b/crates/workspace2/src/modal_layer.rs index 8afd8317f94ed5452e49106c50b5e69f056a6e6e..3ed50fefdf230059846777a0d7ea00a2713082aa 100644 --- a/crates/workspace2/src/modal_layer.rs +++ b/crates/workspace2/src/modal_layer.rs @@ -1,5 +1,5 @@ use gpui::{ - div, prelude::*, px, AnyView, Div, FocusHandle, ManagedView, Render, Subscription, View, + div, prelude::*, px, AnyView, Div, FocusHandle, Managed, Render, Subscription, View, ViewContext, }; use ui::{h_stack, v_stack}; @@ -22,7 +22,7 @@ impl ModalLayer { pub fn toggle_modal(&mut self, cx: &mut ViewContext, build_view: B) where - V: ManagedView, + V: Managed, B: FnOnce(&mut ViewContext) -> V, { if let Some(active_modal) = &self.active_modal { @@ -38,7 +38,7 @@ impl ModalLayer { pub fn show_modal(&mut self, new_modal: View, cx: &mut ViewContext) where - V: ManagedView, + V: Managed, { self.active_modal = Some(ActiveModal { modal: new_modal.clone().into(), diff --git a/crates/workspace2/src/workspace2.rs b/crates/workspace2/src/workspace2.rs index 08d248f6f2a2e3fb1cb266c5c448e4824401af59..34ebdbd3e225ddc72e2b1813141dd68405aa25a8 100644 --- a/crates/workspace2/src/workspace2.rs +++ b/crates/workspace2/src/workspace2.rs @@ -31,10 +31,10 @@ use futures::{ use gpui::{ actions, div, point, size, Action, AnyModel, AnyView, AnyWeakView, AppContext, AsyncAppContext, AsyncWindowContext, Bounds, Context, Div, Entity, EntityId, EventEmitter, FocusHandle, - FocusableView, GlobalPixels, InteractiveComponent, KeyContext, ManagedView, Model, - ModelContext, ParentComponent, PathPromptOptions, Point, PromptLevel, Render, Size, Styled, - Subscription, Task, View, ViewContext, VisualContext, WeakView, WindowBounds, WindowContext, - WindowHandle, WindowOptions, + FocusableView, GlobalPixels, InteractiveComponent, KeyContext, Managed, Model, ModelContext, + ParentComponent, PathPromptOptions, Point, PromptLevel, Render, Size, Styled, Subscription, + Task, View, ViewContext, VisualContext, WeakView, WindowBounds, WindowContext, WindowHandle, + WindowOptions, }; use item::{FollowableItem, FollowableItemHandle, Item, ItemHandle, ItemSettings, ProjectItem}; use itertools::Itertools; @@ -3364,14 +3364,14 @@ impl Workspace { div } - pub fn active_modal( + pub fn active_modal( &mut self, cx: &ViewContext, ) -> Option> { self.modal_layer.read(cx).active_modal() } - pub fn toggle_modal(&mut self, cx: &mut ViewContext, build: B) + pub fn toggle_modal(&mut self, cx: &mut ViewContext, build: B) where B: FnOnce(&mut ViewContext) -> V, { From 17d53d0e38420ba34b324453fd5df96bbf48d472 Mon Sep 17 00:00:00 2001 From: Mikayla Date: Fri, 17 Nov 2023 10:06:41 -0800 Subject: [PATCH 04/75] Rename again, add fun cx APIs using new traits --- .../command_palette2/src/command_palette.rs | 6 ++--- crates/file_finder2/src/file_finder.rs | 8 +++---- crates/go_to_line2/src/go_to_line.rs | 10 ++++---- crates/gpui2/src/app/async_context.rs | 11 ++++++++- crates/gpui2/src/app/test_context.rs | 11 +++++++++ crates/gpui2/src/gpui2.rs | 4 ++++ crates/gpui2/src/window.rs | 24 ++++++++++++++++--- crates/ui2/src/components/context_menu.rs | 20 ++++++++-------- crates/workspace2/src/modal_layer.rs | 6 ++--- crates/workspace2/src/workspace2.rs | 12 +++++----- 10 files changed, 77 insertions(+), 35 deletions(-) diff --git a/crates/command_palette2/src/command_palette.rs b/crates/command_palette2/src/command_palette.rs index ddb54e1505dc3e4e84b64edb082b75e06250e132..678fec7dac8f150a7ae436522a3e56c4699449af 100644 --- a/crates/command_palette2/src/command_palette.rs +++ b/crates/command_palette2/src/command_palette.rs @@ -2,7 +2,7 @@ use collections::{CommandPaletteFilter, HashMap}; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{ actions, div, prelude::*, Action, AppContext, Component, Div, EventEmitter, FocusHandle, - FocusableView, Keystroke, ManagedView, ParentComponent, Render, Styled, View, ViewContext, + FocusableView, Keystroke, ManagedEvent, ParentComponent, Render, Styled, View, ViewContext, VisualContext, WeakView, }; use picker::{Picker, PickerDelegate}; @@ -69,7 +69,7 @@ impl CommandPalette { } } -impl EventEmitter for CommandPalette {} +impl EventEmitter for CommandPalette {} impl FocusableView for CommandPalette { fn focus_handle(&self, cx: &AppContext) -> FocusHandle { @@ -268,7 +268,7 @@ impl PickerDelegate for CommandPaletteDelegate { fn dismissed(&mut self, cx: &mut ViewContext>) { self.command_palette - .update(cx, |_, cx| cx.emit(ManagedView::Dismiss)) + .update(cx, |_, cx| cx.emit(ManagedEvent::Dismiss)) .log_err(); } diff --git a/crates/file_finder2/src/file_finder.rs b/crates/file_finder2/src/file_finder.rs index 5312152f9f9be80108347b9c872ad5aa2b24f71f..d64a27e74a9e5ac9840d30a59c5daa5318b54e6b 100644 --- a/crates/file_finder2/src/file_finder.rs +++ b/crates/file_finder2/src/file_finder.rs @@ -3,7 +3,7 @@ use editor::{scroll::autoscroll::Autoscroll, Bias, Editor}; use fuzzy::{CharBag, PathMatch, PathMatchCandidate}; use gpui::{ actions, div, AppContext, Component, Div, EventEmitter, FocusHandle, FocusableView, - InteractiveComponent, ManagedView, Model, ParentComponent, Render, Styled, Task, View, + InteractiveComponent, ManagedEvent, Model, ParentComponent, Render, Styled, Task, View, ViewContext, VisualContext, WeakView, }; use picker::{Picker, PickerDelegate}; @@ -111,7 +111,7 @@ impl FileFinder { } } -impl EventEmitter for FileFinder {} +impl EventEmitter for FileFinder {} impl FocusableView for FileFinder { fn focus_handle(&self, cx: &AppContext) -> FocusHandle { self.picker.focus_handle(cx) @@ -690,7 +690,7 @@ impl PickerDelegate for FileFinderDelegate { } } finder - .update(&mut cx, |_, cx| cx.emit(ManagedView::Dismiss)) + .update(&mut cx, |_, cx| cx.emit(ManagedEvent::Dismiss)) .ok()?; Some(()) @@ -702,7 +702,7 @@ impl PickerDelegate for FileFinderDelegate { fn dismissed(&mut self, cx: &mut ViewContext>) { self.file_finder - .update(cx, |_, cx| cx.emit(ManagedView::Dismiss)) + .update(cx, |_, cx| cx.emit(ManagedEvent::Dismiss)) .log_err(); } diff --git a/crates/go_to_line2/src/go_to_line.rs b/crates/go_to_line2/src/go_to_line.rs index 04c3f744736261c2c174b0611103f566f5a427d4..ec0d8eca5d1c622e9e2fdcd5de6723901745b6c6 100644 --- a/crates/go_to_line2/src/go_to_line.rs +++ b/crates/go_to_line2/src/go_to_line.rs @@ -1,7 +1,7 @@ use editor::{display_map::ToDisplayPoint, scroll::autoscroll::Autoscroll, Editor}; use gpui::{ actions, div, prelude::*, AppContext, Div, EventEmitter, FocusHandle, FocusableView, - ManagedView, ParentComponent, Render, SharedString, Styled, Subscription, View, ViewContext, + ManagedEvent, ParentComponent, Render, SharedString, Styled, Subscription, View, ViewContext, VisualContext, WindowContext, }; use text::{Bias, Point}; @@ -29,7 +29,7 @@ impl FocusableView for GoToLine { self.active_editor.focus_handle(cx) } } -impl EventEmitter for GoToLine {} +impl EventEmitter for GoToLine {} impl GoToLine { fn register(workspace: &mut Workspace, _: &mut ViewContext) { @@ -89,7 +89,7 @@ impl GoToLine { ) { match event { // todo!() this isn't working... - editor::Event::Blurred => cx.emit(ManagedView::Dismiss), + editor::Event::Blurred => cx.emit(ManagedEvent::Dismiss), editor::Event::BufferEdited { .. } => self.highlight_current_line(cx), _ => {} } @@ -124,7 +124,7 @@ impl GoToLine { } fn cancel(&mut self, _: &menu::Cancel, cx: &mut ViewContext) { - cx.emit(ManagedView::Dismiss); + cx.emit(ManagedEvent::Dismiss); } fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext) { @@ -141,7 +141,7 @@ impl GoToLine { self.prev_scroll_position.take(); } - cx.emit(ManagedView::Dismiss); + cx.emit(ManagedEvent::Dismiss); } } diff --git a/crates/gpui2/src/app/async_context.rs b/crates/gpui2/src/app/async_context.rs index 83b3ccebe7dc0ebb169a0647fcdc2602ad4ea107..04a26f663ba7404832afe7db654fd3d2a1f3b8b1 100644 --- a/crates/gpui2/src/app/async_context.rs +++ b/crates/gpui2/src/app/async_context.rs @@ -1,6 +1,6 @@ use crate::{ AnyView, AnyWindowHandle, AppCell, AppContext, BackgroundExecutor, Context, FocusableView, - ForegroundExecutor, Model, ModelContext, Render, Result, Task, View, ViewContext, + ForegroundExecutor, ManagedEvent, Model, ModelContext, Render, Result, Task, View, ViewContext, VisualContext, WindowContext, WindowHandle, }; use anyhow::{anyhow, Context as _}; @@ -320,4 +320,13 @@ impl VisualContext for AsyncWindowContext { view.read(cx).focus_handle(cx).clone().focus(cx); }) } + + fn dismiss_view(&mut self, view: &View) -> Self::Result<()> + where + V: crate::ManagedView, + { + self.window.update(self, |_, cx| { + view.update(cx, |_, cx| cx.emit(ManagedEvent::Dismiss)) + }) + } } diff --git a/crates/gpui2/src/app/test_context.rs b/crates/gpui2/src/app/test_context.rs index 940492573f0ed504defc711f559375fc3686c0ce..a3058aa080b1b3cbd6fa9908c710313e2c4925ff 100644 --- a/crates/gpui2/src/app/test_context.rs +++ b/crates/gpui2/src/app/test_context.rs @@ -579,6 +579,17 @@ impl<'a> VisualContext for VisualTestContext<'a> { }) .unwrap() } + + fn dismiss_view(&mut self, view: &View) -> Self::Result<()> + where + V: crate::ManagedView, + { + self.window + .update(self.cx, |_, cx| { + view.update(cx, |_, cx| cx.emit(crate::ManagedEvent::Dismiss)) + }) + .unwrap() + } } impl AnyWindowHandle { diff --git a/crates/gpui2/src/gpui2.rs b/crates/gpui2/src/gpui2.rs index 88ecd52c037415696cefc6e663758ed7dcfcab8f..a24509386b5a229102cc483b64084b5c2319849e 100644 --- a/crates/gpui2/src/gpui2.rs +++ b/crates/gpui2/src/gpui2.rs @@ -141,6 +141,10 @@ pub trait VisualContext: Context { fn focus_view(&mut self, view: &View) -> Self::Result<()> where V: FocusableView; + + fn dismiss_view(&mut self, view: &View) -> Self::Result<()> + where + V: ManagedView; } pub trait Entity: Sealed { diff --git a/crates/gpui2/src/window.rs b/crates/gpui2/src/window.rs index 5234049943d1034f4c6526bff4ef8dabe32ae6a7..e4ae3f9c039007950c343033ef07ec2c88173234 100644 --- a/crates/gpui2/src/window.rs +++ b/crates/gpui2/src/window.rs @@ -193,11 +193,11 @@ pub trait FocusableView: Render { /// ManagedView is a view (like a Modal, Popover, Menu, etc.) /// where the lifecycle of the view is handled by another view. -pub trait Managed: FocusableView + EventEmitter {} +pub trait ManagedView: FocusableView + EventEmitter {} -impl> Managed for M {} +impl> ManagedView for M {} -pub enum ManagedView { +pub enum ManagedEvent { Dismiss, } @@ -1577,6 +1577,13 @@ impl VisualContext for WindowContext<'_> { view.focus_handle(cx).clone().focus(cx); }) } + + fn dismiss_view(&mut self, view: &View) -> Self::Result<()> + where + V: ManagedView, + { + self.update_view(view, |_, cx| cx.emit(ManagedEvent::Dismiss)) + } } impl<'a> std::ops::Deref for WindowContext<'a> { @@ -2270,6 +2277,13 @@ impl<'a, V: 'static> ViewContext<'a, V> { { self.defer(|view, cx| view.focus_handle(cx).focus(cx)) } + + pub fn dismiss_self(&mut self) + where + V: ManagedView, + { + self.defer(|_, cx| cx.emit(ManagedEvent::Dismiss)) + } } impl Context for ViewContext<'_, V> { @@ -2349,6 +2363,10 @@ impl VisualContext for ViewContext<'_, V> { fn focus_view(&mut self, view: &View) -> Self::Result<()> { self.window_cx.focus_view(view) } + + fn dismiss_view(&mut self, view: &View) -> Self::Result<()> { + self.window_cx.dismiss_view(view) + } } impl<'a, V> std::ops::Deref for ViewContext<'a, V> { diff --git a/crates/ui2/src/components/context_menu.rs b/crates/ui2/src/components/context_menu.rs index c6a0d9689a53b8b14cbb3d5c6f1fb6c054fd2176..80c22cbbcf2206bf4807da3564b9285348f79d4b 100644 --- a/crates/ui2/src/components/context_menu.rs +++ b/crates/ui2/src/components/context_menu.rs @@ -5,7 +5,7 @@ use crate::prelude::*; use crate::{v_stack, Label, List, ListEntry, ListItem, ListSeparator, ListSubHeader}; use gpui::{ overlay, px, Action, AnchorCorner, AnyElement, AppContext, Bounds, DispatchPhase, Div, - EventEmitter, FocusHandle, FocusableView, LayoutId, Managed, ManagedView, MouseButton, + EventEmitter, FocusHandle, FocusableView, LayoutId, ManagedEvent, ManagedView, MouseButton, MouseDownEvent, Pixels, Point, Render, View, }; @@ -20,7 +20,7 @@ impl FocusableView for ContextMenu { } } -impl EventEmitter for ContextMenu {} +impl EventEmitter for ContextMenu {} impl ContextMenu { pub fn new(cx: &mut WindowContext) -> Self { @@ -47,11 +47,11 @@ impl ContextMenu { pub fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext) { // todo!() - cx.emit(ManagedView::Dismiss); + cx.emit(ManagedEvent::Dismiss); } pub fn cancel(&mut self, _: &menu::Cancel, cx: &mut ViewContext) { - cx.emit(ManagedView::Dismiss); + cx.emit(ManagedEvent::Dismiss); } } @@ -79,7 +79,7 @@ impl Render for ContextMenu { } } -pub struct MenuHandle { +pub struct MenuHandle { id: Option, child_builder: Option AnyElement + 'static>>, menu_builder: Option) -> View + 'static>>, @@ -88,7 +88,7 @@ pub struct MenuHandle { attach: Option, } -impl MenuHandle { +impl MenuHandle { pub fn id(mut self, id: impl Into) -> Self { self.id = Some(id.into()); self @@ -118,7 +118,7 @@ impl MenuHandle { } } -pub fn menu_handle() -> MenuHandle { +pub fn menu_handle() -> MenuHandle { MenuHandle { id: None, child_builder: None, @@ -135,7 +135,7 @@ pub struct MenuHandleState { child_element: Option>, menu_element: Option>, } -impl Element for MenuHandle { +impl Element for MenuHandle { type ElementState = MenuHandleState; fn element_id(&self) -> Option { @@ -229,7 +229,7 @@ impl Element for MenuHandle { let new_menu = (builder)(view_state, cx); let menu2 = menu.clone(); cx.subscribe(&new_menu, move |this, modal, e, cx| match e { - &ManagedView::Dismiss => { + &ManagedEvent::Dismiss => { *menu2.borrow_mut() = None; cx.notify(); } @@ -250,7 +250,7 @@ impl Element for MenuHandle { } } -impl Component for MenuHandle { +impl Component for MenuHandle { fn render(self) -> AnyElement { AnyElement::new(self) } diff --git a/crates/workspace2/src/modal_layer.rs b/crates/workspace2/src/modal_layer.rs index 3ed50fefdf230059846777a0d7ea00a2713082aa..8afd8317f94ed5452e49106c50b5e69f056a6e6e 100644 --- a/crates/workspace2/src/modal_layer.rs +++ b/crates/workspace2/src/modal_layer.rs @@ -1,5 +1,5 @@ use gpui::{ - div, prelude::*, px, AnyView, Div, FocusHandle, Managed, Render, Subscription, View, + div, prelude::*, px, AnyView, Div, FocusHandle, ManagedView, Render, Subscription, View, ViewContext, }; use ui::{h_stack, v_stack}; @@ -22,7 +22,7 @@ impl ModalLayer { pub fn toggle_modal(&mut self, cx: &mut ViewContext, build_view: B) where - V: Managed, + V: ManagedView, B: FnOnce(&mut ViewContext) -> V, { if let Some(active_modal) = &self.active_modal { @@ -38,7 +38,7 @@ impl ModalLayer { pub fn show_modal(&mut self, new_modal: View, cx: &mut ViewContext) where - V: Managed, + V: ManagedView, { self.active_modal = Some(ActiveModal { modal: new_modal.clone().into(), diff --git a/crates/workspace2/src/workspace2.rs b/crates/workspace2/src/workspace2.rs index 34ebdbd3e225ddc72e2b1813141dd68405aa25a8..08d248f6f2a2e3fb1cb266c5c448e4824401af59 100644 --- a/crates/workspace2/src/workspace2.rs +++ b/crates/workspace2/src/workspace2.rs @@ -31,10 +31,10 @@ use futures::{ use gpui::{ actions, div, point, size, Action, AnyModel, AnyView, AnyWeakView, AppContext, AsyncAppContext, AsyncWindowContext, Bounds, Context, Div, Entity, EntityId, EventEmitter, FocusHandle, - FocusableView, GlobalPixels, InteractiveComponent, KeyContext, Managed, Model, ModelContext, - ParentComponent, PathPromptOptions, Point, PromptLevel, Render, Size, Styled, Subscription, - Task, View, ViewContext, VisualContext, WeakView, WindowBounds, WindowContext, WindowHandle, - WindowOptions, + FocusableView, GlobalPixels, InteractiveComponent, KeyContext, ManagedView, Model, + ModelContext, ParentComponent, PathPromptOptions, Point, PromptLevel, Render, Size, Styled, + Subscription, Task, View, ViewContext, VisualContext, WeakView, WindowBounds, WindowContext, + WindowHandle, WindowOptions, }; use item::{FollowableItem, FollowableItemHandle, Item, ItemHandle, ItemSettings, ProjectItem}; use itertools::Itertools; @@ -3364,14 +3364,14 @@ impl Workspace { div } - pub fn active_modal( + pub fn active_modal( &mut self, cx: &ViewContext, ) -> Option> { self.modal_layer.read(cx).active_modal() } - pub fn toggle_modal(&mut self, cx: &mut ViewContext, build: B) + pub fn toggle_modal(&mut self, cx: &mut ViewContext, build: B) where B: FnOnce(&mut ViewContext) -> V, { From 5cf953d559cffcc9b6105e164f6c6928d49d09e7 Mon Sep 17 00:00:00 2001 From: Mikayla Date: Fri, 17 Nov 2023 10:31:10 -0800 Subject: [PATCH 05/75] Add error messages to server deployment for nightly --- script/deploy | 8 ++++++-- script/deploy-migration | 8 ++++++-- script/what-is-deployed | 8 ++++++-- 3 files changed, 18 insertions(+), 6 deletions(-) diff --git a/script/deploy b/script/deploy index fcb2db4e29bbe632e932dadfcc309025c9a53eaf..b6da3f8f848e02e236095c3d70e2823cf0efcebd 100755 --- a/script/deploy +++ b/script/deploy @@ -4,13 +4,17 @@ set -eu source script/lib/deploy-helpers.sh if [[ $# < 2 ]]; then - # TODO kb nightly deploy? - echo "Usage: $0 " + echo "Usage: $0 (nightly is not yet supported)" exit 1 fi environment=$1 version=$2 +if [[ ${environment} == "nightly" ]]; then + echo "nightly is not yet supported" + exit 1 +fi + export_vars_for_environment ${environment} image_id=$(image_id_for_version ${version}) diff --git a/script/deploy-migration b/script/deploy-migration index 4ea36993fe5c40e1761f8c8e41d794bac81de820..340e6cef1f6a25bcc341586f8cd10fccd91a4673 100755 --- a/script/deploy-migration +++ b/script/deploy-migration @@ -4,13 +4,17 @@ set -eu source script/lib/deploy-helpers.sh if [[ $# < 2 ]]; then - # TODO kb nightly migrations? - echo "Usage: $0 " + echo "Usage: $0 (nightly is not yet supported)" exit 1 fi environment=$1 version=$2 +if [[ ${environment} == "nightly" ]]; then + echo "nightly is not yet supported" + exit 1 +fi + export_vars_for_environment ${environment} image_id=$(image_id_for_version ${version}) diff --git a/script/what-is-deployed b/script/what-is-deployed index 6d18edbd311381d9a5ef30b1636e44269c478e7b..b6a68dd3b3245bdf925ffe2d80c23725e43c1c81 100755 --- a/script/what-is-deployed +++ b/script/what-is-deployed @@ -4,12 +4,16 @@ set -eu source script/lib/deploy-helpers.sh if [[ $# < 1 ]]; then - # TODO kb infra for nightly? - echo "Usage: $0 " + echo "Usage: $0 (nightly is not yet supported)" exit 1 fi environment=$1 +if [[ ${environment} == "nightly" ]]; then + echo "nightly is not yet supported" + exit 1 +fi + export_vars_for_environment ${environment} target_zed_kube_cluster From 9d742b90c386594d7962c4419c4d2e8505a9f355 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 17 Nov 2023 11:57:51 -0700 Subject: [PATCH 06/75] Allow you to click on a context menu item --- crates/gpui2/src/app/entity_map.rs | 11 ++- crates/gpui2/src/elements/div.rs | 5 + crates/gpui2/src/platform/mac/window.rs | 5 +- crates/ui2/src/components/context_menu.rs | 111 +++++++++++++++++----- crates/ui2/src/components/list.rs | 74 ++++++++------- crates/ui2/src/static_data.rs | 8 +- 6 files changed, 140 insertions(+), 74 deletions(-) diff --git a/crates/gpui2/src/app/entity_map.rs b/crates/gpui2/src/app/entity_map.rs index 4a3cca040b7a9982101e8bdf10816d7f15a1bff2..f1e7fad6a1967dbe238f32f7d1768799b9a69439 100644 --- a/crates/gpui2/src/app/entity_map.rs +++ b/crates/gpui2/src/app/entity_map.rs @@ -71,11 +71,12 @@ impl EntityMap { #[track_caller] pub fn lease<'a, T>(&mut self, model: &'a Model) -> Lease<'a, T> { self.assert_valid_context(model); - let entity = Some( - self.entities - .remove(model.entity_id) - .expect("Circular entity lease. Is the entity already being updated?"), - ); + let entity = Some(self.entities.remove(model.entity_id).unwrap_or_else(|| { + panic!( + "Circular entity lease of {}. Is it already being updated?", + std::any::type_name::() + ) + })); Lease { model, entity, diff --git a/crates/gpui2/src/elements/div.rs b/crates/gpui2/src/elements/div.rs index f9560f2c53188a17b6c336e9bb284ee3ec58b07f..a37e3dee2adb5898b1526e3fd944491ed637c6b2 100644 --- a/crates/gpui2/src/elements/div.rs +++ b/crates/gpui2/src/elements/div.rs @@ -1124,9 +1124,14 @@ where } } } + // if self.hover_style.is_some() { if bounds.contains_point(&mouse_position) { + // eprintln!("div hovered {bounds:?} {mouse_position:?}"); style.refine(&self.hover_style); + } else { + // eprintln!("div NOT hovered {bounds:?} {mouse_position:?}"); } + // } if let Some(drag) = cx.active_drag.take() { for (state_type, group_drag_style) in &self.group_drag_over_styles { diff --git a/crates/gpui2/src/platform/mac/window.rs b/crates/gpui2/src/platform/mac/window.rs index 03782d13a84a0cb36e681a6a06470054c61e28e5..bb3a659a62bb998d191d28e31e796e31ca1eb3fe 100644 --- a/crates/gpui2/src/platform/mac/window.rs +++ b/crates/gpui2/src/platform/mac/window.rs @@ -1205,10 +1205,7 @@ extern "C" fn handle_view_event(this: &Object, _: Sel, native_event: id) { InputEvent::MouseMove(_) if !(is_active || lock.kind == WindowKind::PopUp) => return, - InputEvent::MouseUp(MouseUpEvent { - button: MouseButton::Left, - .. - }) => { + InputEvent::MouseUp(MouseUpEvent { .. }) => { lock.synthetic_drag_counter += 1; } diff --git a/crates/ui2/src/components/context_menu.rs b/crates/ui2/src/components/context_menu.rs index d3214cbff1b31d7fda3c8fe80a108a55490f2119..b1e30dbe3bbd5bf85feb8b85129e26d3524c77cf 100644 --- a/crates/ui2/src/components/context_menu.rs +++ b/crates/ui2/src/components/context_menu.rs @@ -6,39 +6,66 @@ use crate::{v_stack, Label, List, ListEntry, ListItem, ListSeparator, ListSubHea use gpui::{ overlay, px, Action, AnchorCorner, AnyElement, Bounds, Dismiss, DispatchPhase, Div, FocusHandle, LayoutId, ManagedView, MouseButton, MouseDownEvent, Pixels, Point, Render, View, + VisualContext, WeakView, }; -pub struct ContextMenu { - items: Vec, +pub enum ContextMenuItem { + Separator(ListSeparator), + Header(ListSubHeader), + Entry( + ListEntry>, + Rc)>, + ), +} + +pub struct ContextMenu { + items: Vec>, focus_handle: FocusHandle, + handle: WeakView, } -impl ManagedView for ContextMenu { +impl ManagedView for ContextMenu { fn focus_handle(&self, cx: &gpui::AppContext) -> FocusHandle { self.focus_handle.clone() } } -impl ContextMenu { - pub fn new(cx: &mut WindowContext) -> Self { - Self { - items: Default::default(), - focus_handle: cx.focus_handle(), - } +impl ContextMenu { + pub fn build( + cx: &mut ViewContext, + f: impl FnOnce(Self, &mut ViewContext) -> Self, + ) -> View { + let handle = cx.view().downgrade(); + cx.build_view(|cx| { + f( + Self { + handle, + items: Default::default(), + focus_handle: cx.focus_handle(), + }, + cx, + ) + }) } pub fn header(mut self, title: impl Into) -> Self { - self.items.push(ListItem::Header(ListSubHeader::new(title))); + self.items + .push(ContextMenuItem::Header(ListSubHeader::new(title))); self } pub fn separator(mut self) -> Self { - self.items.push(ListItem::Separator(ListSeparator)); + self.items.push(ContextMenuItem::Separator(ListSeparator)); self } - pub fn entry(mut self, label: Label, action: Box) -> Self { - self.items.push(ListEntry::new(label).action(action).into()); + pub fn entry( + mut self, + view: ListEntry, + on_click: impl Fn(&mut V, &mut ViewContext) + 'static, + ) -> Self { + self.items + .push(ContextMenuItem::Entry(view, Rc::new(on_click))); self } @@ -52,9 +79,9 @@ impl ContextMenu { } } -impl Render for ContextMenu { +impl Render for ContextMenu { type Element = Div; - // todo!() + fn render(&mut self, cx: &mut ViewContext) -> Self::Element { div().elevation_2(cx).flex().flex_row().child( v_stack() @@ -71,7 +98,25 @@ impl Render for ContextMenu { // .bg(cx.theme().colors().elevated_surface_background) // .border() // .border_color(cx.theme().colors().border) - .child(List::new(self.items.clone())), + .child(List::new( + self.items + .iter() + .map(|item| match item { + ContextMenuItem::Separator(separator) => { + ListItem::Separator(separator.clone()) + } + ContextMenuItem::Header(header) => ListItem::Header(header.clone()), + ContextMenuItem::Entry(entry, callback) => { + let callback = callback.clone(); + let handle = self.handle.clone(); + ListItem::Entry(entry.clone().on_click(move |this, cx| { + handle.update(cx, |view, cx| callback(view, cx)).ok(); + cx.emit(Dismiss); + })) + } + }) + .collect(), + )), ) } } @@ -232,6 +277,7 @@ impl Element for MenuHandle { } }) .detach(); + cx.focus_view(&new_menu); *menu.borrow_mut() = Some(new_menu); *position.borrow_mut() = if attach.is_some() && child_layout_id.is_some() { @@ -260,16 +306,25 @@ pub use stories::*; mod stories { use super::*; use crate::story::Story; - use gpui::{actions, Div, Render, VisualContext}; - - actions!(PrintCurrentDate); - - fn build_menu(cx: &mut WindowContext, header: impl Into) -> View { - cx.build_view(|cx| { - ContextMenu::new(cx).header(header).separator().entry( - Label::new("Print current time"), - PrintCurrentDate.boxed_clone(), - ) + use gpui::{actions, Div, Render}; + + actions!(PrintCurrentDate, PrintBestFood); + + fn build_menu( + cx: &mut ViewContext, + header: impl Into, + ) -> View> { + let handle = cx.view().clone(); + ContextMenu::build(cx, |menu, _| { + menu.header(header) + .separator() + .entry(ListEntry::new(Label::new("Print current time")), |v, cx| { + println!("dispatching PrintCurrentTime action"); + cx.dispatch_action(PrintCurrentDate.boxed_clone()) + }) + .entry(ListEntry::new(Label::new("Print best food")), |v, cx| { + cx.dispatch_action(PrintBestFood.boxed_clone()) + }) }) } @@ -281,10 +336,14 @@ mod stories { fn render(&mut self, cx: &mut ViewContext) -> Self::Element { Story::container(cx) .on_action(|_, _: &PrintCurrentDate, _| { + println!("printing unix time!"); if let Ok(unix_time) = std::time::UNIX_EPOCH.elapsed() { println!("Current Unix time is {:?}", unix_time.as_secs()); } }) + .on_action(|_, _: &PrintBestFood, _| { + println!("burrito"); + }) .flex() .flex_row() .justify_between() diff --git a/crates/ui2/src/components/list.rs b/crates/ui2/src/components/list.rs index b9508c54136aa424789f943ef40cf47d58122fae..07ff577ce02f45732d7712eccea6ed06c85be707 100644 --- a/crates/ui2/src/components/list.rs +++ b/crates/ui2/src/components/list.rs @@ -1,4 +1,6 @@ -use gpui::{div, Action}; +use std::rc::Rc; + +use gpui::{div, Div, Stateful, StatefulInteractiveComponent}; use crate::settings::user_settings; use crate::{ @@ -172,35 +174,35 @@ pub enum ListEntrySize { Medium, } -#[derive(Component, Clone)] -pub enum ListItem { - Entry(ListEntry), +#[derive(Clone)] +pub enum ListItem { + Entry(ListEntry), Separator(ListSeparator), Header(ListSubHeader), } -impl From for ListItem { - fn from(entry: ListEntry) -> Self { +impl From> for ListItem { + fn from(entry: ListEntry) -> Self { Self::Entry(entry) } } -impl From for ListItem { +impl From for ListItem { fn from(entry: ListSeparator) -> Self { Self::Separator(entry) } } -impl From for ListItem { +impl From for ListItem { fn from(entry: ListSubHeader) -> Self { Self::Header(entry) } } -impl ListItem { - fn render(self, view: &mut V, cx: &mut ViewContext) -> impl Component { +impl ListItem { + fn render(self, view: &mut V, ix: usize, cx: &mut ViewContext) -> impl Component { match self { - ListItem::Entry(entry) => div().child(entry.render(view, cx)), + ListItem::Entry(entry) => div().child(entry.render(ix, cx)), ListItem::Separator(separator) => div().child(separator.render(view, cx)), ListItem::Header(header) => div().child(header.render(view, cx)), } @@ -210,7 +212,7 @@ impl ListItem { Self::Entry(ListEntry::new(label)) } - pub fn as_entry(&mut self) -> Option<&mut ListEntry> { + pub fn as_entry(&mut self) -> Option<&mut ListEntry> { if let Self::Entry(entry) = self { Some(entry) } else { @@ -219,8 +221,7 @@ impl ListItem { } } -#[derive(Component)] -pub struct ListEntry { +pub struct ListEntry { disabled: bool, // TODO: Reintroduce this // disclosure_control_style: DisclosureControlVisibility, @@ -231,15 +232,13 @@ pub struct ListEntry { size: ListEntrySize, toggle: Toggle, variant: ListItemVariant, - on_click: Option>, + on_click: Option) + 'static>>, } -impl Clone for ListEntry { +impl Clone for ListEntry { fn clone(&self) -> Self { Self { disabled: self.disabled, - // TODO: Reintroduce this - // disclosure_control_style: DisclosureControlVisibility, indent_level: self.indent_level, label: self.label.clone(), left_slot: self.left_slot.clone(), @@ -247,12 +246,12 @@ impl Clone for ListEntry { size: self.size, toggle: self.toggle, variant: self.variant, - on_click: self.on_click.as_ref().map(|opt| opt.boxed_clone()), + on_click: self.on_click.clone(), } } } -impl ListEntry { +impl ListEntry { pub fn new(label: Label) -> Self { Self { disabled: false, @@ -267,8 +266,8 @@ impl ListEntry { } } - pub fn action(mut self, action: impl Into>) -> Self { - self.on_click = Some(action.into()); + pub fn on_click(mut self, handler: impl Fn(&mut V, &mut ViewContext) + 'static) -> Self { + self.on_click = Some(Rc::new(handler)); self } @@ -307,7 +306,7 @@ impl ListEntry { self } - fn render(self, _view: &mut V, cx: &mut ViewContext) -> impl Component { + fn render(self, ix: usize, cx: &mut ViewContext) -> Stateful> { let settings = user_settings(cx); let left_content = match self.left_slot.clone() { @@ -328,21 +327,21 @@ impl ListEntry { ListEntrySize::Medium => div().h_7(), }; div() + .id(ix) .relative() .hover(|mut style| { style.background = Some(cx.theme().colors().editor_background.into()); style }) - .on_mouse_down(gpui::MouseButton::Left, { - let action = self.on_click.map(|action| action.boxed_clone()); + .on_click({ + let on_click = self.on_click.clone(); - move |entry: &mut V, event, cx| { - if let Some(action) = action.as_ref() { - cx.dispatch_action(action.boxed_clone()); + move |view: &mut V, event, cx| { + if let Some(on_click) = &on_click { + (on_click)(view, cx) } } }) - .group("") .bg(cx.theme().colors().surface_background) // TODO: Add focus state // .when(self.state == InteractionState::Focused, |this| { @@ -391,8 +390,8 @@ impl ListSeparator { } #[derive(Component)] -pub struct List { - items: Vec, +pub struct List { + items: Vec>, /// Message to display when the list is empty /// Defaults to "No items" empty_message: SharedString, @@ -400,8 +399,8 @@ pub struct List { toggle: Toggle, } -impl List { - pub fn new(items: Vec) -> Self { +impl List { + pub fn new(items: Vec>) -> Self { Self { items, empty_message: "No items".into(), @@ -425,9 +424,14 @@ impl List { self } - fn render(self, _view: &mut V, cx: &mut ViewContext) -> impl Component { + fn render(self, view: &mut V, cx: &mut ViewContext) -> impl Component { let list_content = match (self.items.is_empty(), self.toggle) { - (false, _) => div().children(self.items), + (false, _) => div().children( + self.items + .into_iter() + .enumerate() + .map(|(ix, item)| item.render(view, ix, cx)), + ), (true, Toggle::Toggled(false)) => div(), (true, _) => { div().child(Label::new(self.empty_message.clone()).color(TextColor::Muted)) diff --git a/crates/ui2/src/static_data.rs b/crates/ui2/src/static_data.rs index bb81d6230fdc7a68f9e2b2eae826a59b6b57297b..dd296cee5d4587619f901ed90f97af4fc7c707fb 100644 --- a/crates/ui2/src/static_data.rs +++ b/crates/ui2/src/static_data.rs @@ -478,7 +478,7 @@ pub fn static_new_notification_items_2() -> Vec> { ] } -pub fn static_project_panel_project_items() -> Vec { +pub fn static_project_panel_project_items() -> Vec> { vec![ ListEntry::new(Label::new("zed")) .left_icon(Icon::FolderOpen.into()) @@ -605,7 +605,7 @@ pub fn static_project_panel_project_items() -> Vec { .collect() } -pub fn static_project_panel_single_items() -> Vec { +pub fn static_project_panel_single_items() -> Vec> { vec![ ListEntry::new(Label::new("todo.md")) .left_icon(Icon::FileDoc.into()) @@ -622,7 +622,7 @@ pub fn static_project_panel_single_items() -> Vec { .collect() } -pub fn static_collab_panel_current_call() -> Vec { +pub fn static_collab_panel_current_call() -> Vec> { vec![ ListEntry::new(Label::new("as-cii")).left_avatar("http://github.com/as-cii.png?s=50"), ListEntry::new(Label::new("nathansobo")) @@ -635,7 +635,7 @@ pub fn static_collab_panel_current_call() -> Vec { .collect() } -pub fn static_collab_panel_channels() -> Vec { +pub fn static_collab_panel_channels() -> Vec> { vec![ ListEntry::new(Label::new("zed")) .left_icon(Icon::Hash.into()) From eb04160d2d9877fedd8efd5219c0e96c1cad0bf7 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 17 Nov 2023 12:14:06 -0700 Subject: [PATCH 07/75] Dock menu --- crates/terminal_view2/src/terminal_view.rs | 13 ++++---- crates/ui2/src/components/context_menu.rs | 5 +++ crates/workspace2/src/dock.rs | 39 ++++++++++++++++++---- 3 files changed, 44 insertions(+), 13 deletions(-) diff --git a/crates/terminal_view2/src/terminal_view.rs b/crates/terminal_view2/src/terminal_view.rs index b6ab7e86b9191fa6910e5632158ae0c587059c21..56de1ee7efef0497e691be607f8e56859ecd9c11 100644 --- a/crates/terminal_view2/src/terminal_view.rs +++ b/crates/terminal_view2/src/terminal_view.rs @@ -32,7 +32,7 @@ use workspace::{ notifications::NotifyResultExt, register_deserializable_item, searchable::{SearchEvent, SearchOptions, SearchableItem}, - ui::{ContextMenu, Label}, + ui::{ContextMenu, Label, ListEntry}, CloseActiveItem, NewCenterTerminal, Pane, ToolbarItemLocation, Workspace, WorkspaceId, }; @@ -85,7 +85,7 @@ pub struct TerminalView { has_new_content: bool, //Currently using iTerm bell, show bell emoji in tab until input is received has_bell: bool, - context_menu: Option>, + context_menu: Option>>, blink_state: bool, blinking_on: bool, blinking_paused: bool, @@ -300,11 +300,10 @@ impl TerminalView { position: gpui::Point, cx: &mut ViewContext, ) { - self.context_menu = Some(cx.build_view(|cx| { - ContextMenu::new(cx) - .entry(Label::new("Clear"), Box::new(Clear)) - .entry( - Label::new("Close"), + self.context_menu = Some(ContextMenu::build(cx, |menu, _| { + menu.action(ListEntry::new(Label::new("Clear")), Box::new(Clear)) + .action( + ListEntry::new(Label::new("Close")), Box::new(CloseActiveItem { save_intent: None }), ) })); diff --git a/crates/ui2/src/components/context_menu.rs b/crates/ui2/src/components/context_menu.rs index b1e30dbe3bbd5bf85feb8b85129e26d3524c77cf..8bbc0c1b0c050e3b36b251a5411a90bc259577c6 100644 --- a/crates/ui2/src/components/context_menu.rs +++ b/crates/ui2/src/components/context_menu.rs @@ -69,6 +69,11 @@ impl ContextMenu { self } + pub fn action(self, view: ListEntry, action: Box) -> Self { + // todo: add the keybindings to the list entry + self.entry(view, move |_, cx| cx.dispatch_action(action.boxed_clone())) + } + pub fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext) { // todo!() cx.emit(Dismiss); diff --git a/crates/workspace2/src/dock.rs b/crates/workspace2/src/dock.rs index f62633e439f0883975e90d220ac74fb04e80039e..07237d6f62dd3c8ec102fcf979c9da40a7862583 100644 --- a/crates/workspace2/src/dock.rs +++ b/crates/workspace2/src/dock.rs @@ -8,7 +8,9 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use std::sync::Arc; use theme2::ActiveTheme; -use ui::{h_stack, menu_handle, ContextMenu, IconButton, InteractionState, Tooltip}; +use ui::{ + h_stack, menu_handle, ContextMenu, IconButton, InteractionState, Label, ListEntry, Tooltip, +}; pub enum PanelEvent { ChangePosition, @@ -672,6 +674,7 @@ impl Render for PanelButtons { let dock = self.dock.read(cx); let active_index = dock.active_panel_index; let is_open = dock.is_open; + let dock_position = dock.position; let (menu_anchor, menu_attach) = match dock.position { DockPosition::Left => (AnchorCorner::BottomLeft, AnchorCorner::TopLeft), @@ -684,9 +687,10 @@ impl Render for PanelButtons { .panel_entries .iter() .enumerate() - .filter_map(|(i, panel)| { - let icon = panel.panel.icon(cx)?; - let name = panel.panel.persistent_name(); + .filter_map(|(i, entry)| { + let icon = entry.panel.icon(cx)?; + let name = entry.panel.persistent_name(); + let panel = entry.panel.clone(); let mut button: IconButton = if i == active_index && is_open { let action = dock.toggle_action(); @@ -697,7 +701,7 @@ impl Render for PanelButtons { .action(action.boxed_clone()) .tooltip(move |_, cx| Tooltip::for_action(tooltip.clone(), &*action, cx)) } else { - let action = panel.panel.toggle_action(cx); + let action = entry.panel.toggle_action(cx); IconButton::new(name, icon) .action(action.boxed_clone()) @@ -708,7 +712,30 @@ impl Render for PanelButtons { menu_handle() .id(name) .menu(move |_, cx| { - cx.build_view(|cx| ContextMenu::new(cx).header("SECTION")) + const POSITIONS: [DockPosition; 3] = [ + DockPosition::Left, + DockPosition::Right, + DockPosition::Bottom, + ]; + ContextMenu::build(cx, |mut menu, cx| { + for position in POSITIONS { + if position != dock_position + && panel.position_is_valid(position, cx) + { + let panel = panel.clone(); + menu = menu.entry( + ListEntry::new(Label::new(format!( + "Dock {}", + position.to_label() + ))), + move |_, cx| { + panel.set_position(position, cx); + }, + ) + } + } + menu + }) }) .anchor(menu_anchor) .attach(menu_attach) From a5c615ceb4ad7d34a230db83684ebd6556ef9ae8 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 10 Nov 2023 10:56:28 +0200 Subject: [PATCH 08/75] Pass a new query parameter into the search --- crates/collab/src/tests/integration_tests.rs | 2 +- .../random_project_collaboration_tests.rs | 3 ++- crates/collab2/src/tests/integration_tests.rs | 2 +- .../random_project_collaboration_tests.rs | 3 ++- .../src/chat_panel/message_editor.rs | 10 ++------ crates/project/src/project.rs | 20 +++++++++++++--- crates/project/src/project_tests.rs | 16 +++++++++++-- crates/project/src/search.rs | 20 ++++++++++++++++ crates/project2/src/project2.rs | 20 +++++++++++++--- crates/project2/src/project_tests.rs | 16 +++++++++++-- crates/project2/src/search.rs | 20 ++++++++++++++++ crates/rpc/proto/zed.proto | 1 + crates/rpc2/proto/zed.proto | 1 + crates/search/src/buffer_search.rs | 2 ++ crates/search/src/project_search.rs | 15 +++++++++++- crates/search/src/search.rs | 24 ++++++++++++------- 16 files changed, 143 insertions(+), 32 deletions(-) diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 550c3a2bd8ade7b0bb8e8df3481ae54549079af8..fa7c4fe67df4fed4645e8c6552e242b3d7662276 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -5052,7 +5052,7 @@ async fn test_project_search( let mut results = HashMap::default(); let mut search_rx = project_b.update(cx_b, |project, cx| { project.search( - SearchQuery::text("world", false, false, Vec::new(), Vec::new()).unwrap(), + SearchQuery::text("world", false, false, false, Vec::new(), Vec::new()).unwrap(), cx, ) }); diff --git a/crates/collab/src/tests/random_project_collaboration_tests.rs b/crates/collab/src/tests/random_project_collaboration_tests.rs index 6f9513c3253ebece9aaa553d8839b80ded113fff..42a2b7927581f26a6d341ed9ed1d0683b43c89f6 100644 --- a/crates/collab/src/tests/random_project_collaboration_tests.rs +++ b/crates/collab/src/tests/random_project_collaboration_tests.rs @@ -869,7 +869,8 @@ impl RandomizedTest for ProjectCollaborationTest { let mut search = project.update(cx, |project, cx| { project.search( - SearchQuery::text(query, false, false, Vec::new(), Vec::new()).unwrap(), + SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()) + .unwrap(), cx, ) }); diff --git a/crates/collab2/src/tests/integration_tests.rs b/crates/collab2/src/tests/integration_tests.rs index 121a98c1d2ce766bb2a5a3d7dcce5d31a007ebf8..f2a39f35113df98df00f42eba2ff5fce59059358 100644 --- a/crates/collab2/src/tests/integration_tests.rs +++ b/crates/collab2/src/tests/integration_tests.rs @@ -4599,7 +4599,7 @@ async fn test_project_search( let mut results = HashMap::default(); let mut search_rx = project_b.update(cx_b, |project, cx| { project.search( - SearchQuery::text("world", false, false, Vec::new(), Vec::new()).unwrap(), + SearchQuery::text("world", false, false, false, Vec::new(), Vec::new()).unwrap(), cx, ) }); diff --git a/crates/collab2/src/tests/random_project_collaboration_tests.rs b/crates/collab2/src/tests/random_project_collaboration_tests.rs index 361ca00c33d65841078447b421a39f9f288e73de..47b936a6117df1873702cb1937614548aa03d796 100644 --- a/crates/collab2/src/tests/random_project_collaboration_tests.rs +++ b/crates/collab2/src/tests/random_project_collaboration_tests.rs @@ -870,7 +870,8 @@ impl RandomizedTest for ProjectCollaborationTest { let mut search = project.update(cx, |project, cx| { project.search( - SearchQuery::text(query, false, false, Vec::new(), Vec::new()).unwrap(), + SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()) + .unwrap(), cx, ) }); diff --git a/crates/collab_ui/src/chat_panel/message_editor.rs b/crates/collab_ui/src/chat_panel/message_editor.rs index 6dbe3aa204e9edf19d605ef880e52dacf4fe627d..9e6bfb553ebf1afd9c070162b25f589aa012762e 100644 --- a/crates/collab_ui/src/chat_panel/message_editor.rs +++ b/crates/collab_ui/src/chat_panel/message_editor.rs @@ -14,14 +14,8 @@ use std::{sync::Arc, time::Duration}; const MENTIONS_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(50); lazy_static! { - static ref MENTIONS_SEARCH: SearchQuery = SearchQuery::regex( - "@[-_\\w]+", - false, - false, - Default::default(), - Default::default() - ) - .unwrap(); + static ref MENTIONS_SEARCH: SearchQuery = + SearchQuery::regex("@[-_\\w]+", false, false, false, Vec::new(), Vec::new()).unwrap(); } pub struct MessageEditor { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index ab6cbd88c07ec8721d3adc2431964f5c69668d99..09d2c9a98195c0d2322bee5b1b0b38b1dacf1ebc 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -5548,7 +5548,16 @@ impl Project { .collect::>(); let background = cx.background().clone(); - let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum(); + let path_count: usize = snapshots + .iter() + .map(|s| { + if query.include_ignored() { + s.file_count() + } else { + s.visible_file_count() + } + }) + .sum(); if path_count == 0 { let (_, rx) = smol::channel::bounded(1024); return rx; @@ -5735,7 +5744,12 @@ impl Project { let mut snapshot_start_ix = 0; let mut abs_path = PathBuf::new(); for snapshot in snapshots { - let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count(); + let snapshot_end_ix = snapshot_start_ix + + if query.include_ignored() { + snapshot.file_count() + } else { + snapshot.visible_file_count() + }; if worker_end_ix <= snapshot_start_ix { break; } else if worker_start_ix > snapshot_end_ix { @@ -5748,7 +5762,7 @@ impl Project { cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix; for entry in snapshot - .files(false, start_in_snapshot) + .files(query.include_ignored(), start_in_snapshot) .take(end_in_snapshot - start_in_snapshot) { if matching_paths_tx.is_closed() { diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 90d32643d56097b170e89e197a5c5297a0eaf821..264c1ff7b54fa52dbffd87545603736704a5f932 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -3598,7 +3598,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) { assert_eq!( search( &project, - SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(), + SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(), cx ) .await @@ -3623,7 +3623,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) { assert_eq!( search( &project, - SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(), + SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(), cx ) .await @@ -3662,6 +3662,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, vec![PathMatcher::new("*.odd").unwrap()], Vec::new() ) @@ -3681,6 +3682,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, vec![PathMatcher::new("*.rs").unwrap()], Vec::new() ) @@ -3703,6 +3705,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, vec![ PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.odd").unwrap(), @@ -3727,6 +3730,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, vec![ PathMatcher::new("*.rs").unwrap(), PathMatcher::new("*.ts").unwrap(), @@ -3774,6 +3778,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, Vec::new(), vec![PathMatcher::new("*.odd").unwrap()], ) @@ -3798,6 +3803,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, Vec::new(), vec![PathMatcher::new("*.rs").unwrap()], ) @@ -3820,6 +3826,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, Vec::new(), vec![ PathMatcher::new("*.ts").unwrap(), @@ -3844,6 +3851,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, Vec::new(), vec![ PathMatcher::new("*.rs").unwrap(), @@ -3885,6 +3893,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex search_query, false, true, + false, vec![PathMatcher::new("*.odd").unwrap()], vec![PathMatcher::new("*.odd").unwrap()], ) @@ -3904,6 +3913,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex search_query, false, true, + false, vec![PathMatcher::new("*.ts").unwrap()], vec![PathMatcher::new("*.ts").unwrap()], ).unwrap(), @@ -3922,6 +3932,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex search_query, false, true, + false, vec![ PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.odd").unwrap() @@ -3947,6 +3958,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex search_query, false, true, + false, vec![ PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.odd").unwrap() diff --git a/crates/project/src/search.rs b/crates/project/src/search.rs index 7e360e22ee213b1f9a2e438dbd337bfedaf15255..c673440326e82630bd34c8117665b3f3cc092b69 100644 --- a/crates/project/src/search.rs +++ b/crates/project/src/search.rs @@ -39,6 +39,7 @@ pub enum SearchQuery { replacement: Option, whole_word: bool, case_sensitive: bool, + include_ignored: bool, inner: SearchInputs, }, @@ -48,6 +49,7 @@ pub enum SearchQuery { multiline: bool, whole_word: bool, case_sensitive: bool, + include_ignored: bool, inner: SearchInputs, }, } @@ -57,6 +59,7 @@ impl SearchQuery { query: impl ToString, whole_word: bool, case_sensitive: bool, + include_ignored: bool, files_to_include: Vec, files_to_exclude: Vec, ) -> Result { @@ -74,6 +77,7 @@ impl SearchQuery { replacement: None, whole_word, case_sensitive, + include_ignored, inner, }) } @@ -82,6 +86,7 @@ impl SearchQuery { query: impl ToString, whole_word: bool, case_sensitive: bool, + include_ignored: bool, files_to_include: Vec, files_to_exclude: Vec, ) -> Result { @@ -111,6 +116,7 @@ impl SearchQuery { multiline, whole_word, case_sensitive, + include_ignored, inner, }) } @@ -121,6 +127,7 @@ impl SearchQuery { message.query, message.whole_word, message.case_sensitive, + message.include_ignored, deserialize_path_matches(&message.files_to_include)?, deserialize_path_matches(&message.files_to_exclude)?, ) @@ -129,6 +136,7 @@ impl SearchQuery { message.query, message.whole_word, message.case_sensitive, + message.include_ignored, deserialize_path_matches(&message.files_to_include)?, deserialize_path_matches(&message.files_to_exclude)?, ) @@ -156,6 +164,7 @@ impl SearchQuery { regex: self.is_regex(), whole_word: self.whole_word(), case_sensitive: self.case_sensitive(), + include_ignored: self.include_ignored(), files_to_include: self .files_to_include() .iter() @@ -336,6 +345,17 @@ impl SearchQuery { } } + pub fn include_ignored(&self) -> bool { + match self { + Self::Text { + include_ignored, .. + } => *include_ignored, + Self::Regex { + include_ignored, .. + } => *include_ignored, + } + } + pub fn is_regex(&self) -> bool { matches!(self, Self::Regex { .. }) } diff --git a/crates/project2/src/project2.rs b/crates/project2/src/project2.rs index f2e47b71842c0ec3aedd94c20b57bd90123149ca..3cc4e8a2932611379a2c49cd55e99a32d508fea0 100644 --- a/crates/project2/src/project2.rs +++ b/crates/project2/src/project2.rs @@ -5618,7 +5618,16 @@ impl Project { .collect::>(); let background = cx.background_executor().clone(); - let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum(); + let path_count: usize = snapshots + .iter() + .map(|s| { + if query.include_ignored() { + s.file_count() + } else { + s.visible_file_count() + } + }) + .sum(); if path_count == 0 { let (_, rx) = smol::channel::bounded(1024); return rx; @@ -5806,7 +5815,12 @@ impl Project { let mut snapshot_start_ix = 0; let mut abs_path = PathBuf::new(); for snapshot in snapshots { - let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count(); + let snapshot_end_ix = snapshot_start_ix + + if query.include_ignored() { + snapshot.file_count() + } else { + snapshot.visible_file_count() + }; if worker_end_ix <= snapshot_start_ix { break; } else if worker_start_ix > snapshot_end_ix { @@ -5819,7 +5833,7 @@ impl Project { cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix; for entry in snapshot - .files(false, start_in_snapshot) + .files(query.include_ignored(), start_in_snapshot) .take(end_in_snapshot - start_in_snapshot) { if matching_paths_tx.is_closed() { diff --git a/crates/project2/src/project_tests.rs b/crates/project2/src/project_tests.rs index 81a4def456a202152865420f770f3e6cbfd7124e..53b2f6ba1fc7cc04489bce5256a0954b9e2bf7ff 100644 --- a/crates/project2/src/project_tests.rs +++ b/crates/project2/src/project_tests.rs @@ -3730,7 +3730,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) { assert_eq!( search( &project, - SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(), + SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(), cx ) .await @@ -3755,7 +3755,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) { assert_eq!( search( &project, - SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(), + SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(), cx ) .await @@ -3794,6 +3794,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, vec![PathMatcher::new("*.odd").unwrap()], Vec::new() ) @@ -3813,6 +3814,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, vec![PathMatcher::new("*.rs").unwrap()], Vec::new() ) @@ -3835,6 +3837,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, vec![ PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.odd").unwrap(), @@ -3859,6 +3862,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, vec![ PathMatcher::new("*.rs").unwrap(), PathMatcher::new("*.ts").unwrap(), @@ -3906,6 +3910,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, Vec::new(), vec![PathMatcher::new("*.odd").unwrap()], ) @@ -3930,6 +3935,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, Vec::new(), vec![PathMatcher::new("*.rs").unwrap()], ) @@ -3952,6 +3958,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, Vec::new(), vec![ PathMatcher::new("*.ts").unwrap(), @@ -3976,6 +3983,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) { search_query, false, true, + false, Vec::new(), vec![ PathMatcher::new("*.rs").unwrap(), @@ -4017,6 +4025,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex search_query, false, true, + false, vec![PathMatcher::new("*.odd").unwrap()], vec![PathMatcher::new("*.odd").unwrap()], ) @@ -4036,6 +4045,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex search_query, false, true, + false, vec![PathMatcher::new("*.ts").unwrap()], vec![PathMatcher::new("*.ts").unwrap()], ).unwrap(), @@ -4054,6 +4064,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex search_query, false, true, + false, vec![ PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.odd").unwrap() @@ -4079,6 +4090,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex search_query, false, true, + false, vec![ PathMatcher::new("*.ts").unwrap(), PathMatcher::new("*.odd").unwrap() diff --git a/crates/project2/src/search.rs b/crates/project2/src/search.rs index 7e360e22ee213b1f9a2e438dbd337bfedaf15255..c673440326e82630bd34c8117665b3f3cc092b69 100644 --- a/crates/project2/src/search.rs +++ b/crates/project2/src/search.rs @@ -39,6 +39,7 @@ pub enum SearchQuery { replacement: Option, whole_word: bool, case_sensitive: bool, + include_ignored: bool, inner: SearchInputs, }, @@ -48,6 +49,7 @@ pub enum SearchQuery { multiline: bool, whole_word: bool, case_sensitive: bool, + include_ignored: bool, inner: SearchInputs, }, } @@ -57,6 +59,7 @@ impl SearchQuery { query: impl ToString, whole_word: bool, case_sensitive: bool, + include_ignored: bool, files_to_include: Vec, files_to_exclude: Vec, ) -> Result { @@ -74,6 +77,7 @@ impl SearchQuery { replacement: None, whole_word, case_sensitive, + include_ignored, inner, }) } @@ -82,6 +86,7 @@ impl SearchQuery { query: impl ToString, whole_word: bool, case_sensitive: bool, + include_ignored: bool, files_to_include: Vec, files_to_exclude: Vec, ) -> Result { @@ -111,6 +116,7 @@ impl SearchQuery { multiline, whole_word, case_sensitive, + include_ignored, inner, }) } @@ -121,6 +127,7 @@ impl SearchQuery { message.query, message.whole_word, message.case_sensitive, + message.include_ignored, deserialize_path_matches(&message.files_to_include)?, deserialize_path_matches(&message.files_to_exclude)?, ) @@ -129,6 +136,7 @@ impl SearchQuery { message.query, message.whole_word, message.case_sensitive, + message.include_ignored, deserialize_path_matches(&message.files_to_include)?, deserialize_path_matches(&message.files_to_exclude)?, ) @@ -156,6 +164,7 @@ impl SearchQuery { regex: self.is_regex(), whole_word: self.whole_word(), case_sensitive: self.case_sensitive(), + include_ignored: self.include_ignored(), files_to_include: self .files_to_include() .iter() @@ -336,6 +345,17 @@ impl SearchQuery { } } + pub fn include_ignored(&self) -> bool { + match self { + Self::Text { + include_ignored, .. + } => *include_ignored, + Self::Regex { + include_ignored, .. + } => *include_ignored, + } + } + pub fn is_regex(&self) -> bool { matches!(self, Self::Regex { .. }) } diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 206777879b6882c64f72fb87f22937003b86835a..a6d27fa57d4a0a9a063f4f0a30b634207ef8ac63 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -884,6 +884,7 @@ message SearchProject { bool case_sensitive = 5; string files_to_include = 6; string files_to_exclude = 7; + bool include_ignored = 8; } message SearchProjectResponse { diff --git a/crates/rpc2/proto/zed.proto b/crates/rpc2/proto/zed.proto index 206777879b6882c64f72fb87f22937003b86835a..a6d27fa57d4a0a9a063f4f0a30b634207ef8ac63 100644 --- a/crates/rpc2/proto/zed.proto +++ b/crates/rpc2/proto/zed.proto @@ -884,6 +884,7 @@ message SearchProject { bool case_sensitive = 5; string files_to_include = 6; string files_to_exclude = 7; + bool include_ignored = 8; } message SearchProjectResponse { diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index ef8c56f2a7b4ed17305ae01a1cd638980079ea0a..29ffe7c021f23b4084d8b10dd9db3c688b8a6b24 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -805,6 +805,7 @@ impl BufferSearchBar { query, self.search_options.contains(SearchOptions::WHOLE_WORD), self.search_options.contains(SearchOptions::CASE_SENSITIVE), + false, Vec::new(), Vec::new(), ) { @@ -820,6 +821,7 @@ impl BufferSearchBar { query, self.search_options.contains(SearchOptions::WHOLE_WORD), self.search_options.contains(SearchOptions::CASE_SENSITIVE), + false, Vec::new(), Vec::new(), ) { diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index f6e17bbee5d12685385ca64de790d5f8217bb92d..74e06b4e2a49de66d41464d9947a7641ff08a8f4 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -4,7 +4,7 @@ use crate::{ search_bar::{render_nav_button, render_option_button_icon, render_search_mode_button}, ActivateRegexMode, ActivateSemanticMode, ActivateTextMode, CycleMode, NextHistoryQuery, PreviousHistoryQuery, ReplaceAll, ReplaceNext, SearchOptions, SelectNextMatch, SelectPrevMatch, - ToggleCaseSensitive, ToggleReplace, ToggleWholeWord, + ToggleCaseSensitive, ToggleIncludeIgnored, ToggleReplace, ToggleWholeWord, }; use anyhow::{Context, Result}; use collections::HashMap; @@ -85,6 +85,7 @@ pub fn init(cx: &mut AppContext) { cx.capture_action(ProjectSearchView::replace_next); add_toggle_option_action::(SearchOptions::CASE_SENSITIVE, cx); add_toggle_option_action::(SearchOptions::WHOLE_WORD, cx); + add_toggle_option_action::(SearchOptions::INCLUDE_IGNORED, cx); add_toggle_filters_action::(cx); } @@ -1192,6 +1193,7 @@ impl ProjectSearchView { text, self.search_options.contains(SearchOptions::WHOLE_WORD), self.search_options.contains(SearchOptions::CASE_SENSITIVE), + self.search_options.contains(SearchOptions::INCLUDE_IGNORED), included_files, excluded_files, ) { @@ -1210,6 +1212,7 @@ impl ProjectSearchView { text, self.search_options.contains(SearchOptions::WHOLE_WORD), self.search_options.contains(SearchOptions::CASE_SENSITIVE), + self.search_options.contains(SearchOptions::INCLUDE_IGNORED), included_files, excluded_files, ) { @@ -1764,6 +1767,15 @@ impl View for ProjectSearchBar { render_option_button_icon("icons/word_search.svg", SearchOptions::WHOLE_WORD, cx) }); + let include_ignored = is_semantic_disabled.then(|| { + render_option_button_icon( + // TODO kb icon + "icons/case_insensitive.svg", + SearchOptions::INCLUDE_IGNORED, + cx, + ) + }); + let search_button_for_mode = |mode, side, cx: &mut ViewContext| { let is_active = if let Some(search) = self.active_project_search.as_ref() { let search = search.read(cx); @@ -1863,6 +1875,7 @@ impl View for ProjectSearchBar { .with_child(filter_button) .with_children(case_sensitive) .with_children(whole_word) + .with_children(include_ignored) .flex(1., false) .constrained() .contained(), diff --git a/crates/search/src/search.rs b/crates/search/src/search.rs index ba06b3f9c772a5cd97483636fefc0d0c2bc7c191..db39455dca3fa5c5b821eb3234595be00fc7ebfa 100644 --- a/crates/search/src/search.rs +++ b/crates/search/src/search.rs @@ -29,6 +29,7 @@ actions!( CycleMode, ToggleWholeWord, ToggleCaseSensitive, + ToggleIncludeIgnored, ToggleReplace, SelectNextMatch, SelectPrevMatch, @@ -49,31 +50,35 @@ bitflags! { const NONE = 0b000; const WHOLE_WORD = 0b001; const CASE_SENSITIVE = 0b010; + const INCLUDE_IGNORED = 0b100; } } impl SearchOptions { pub fn label(&self) -> &'static str { match *self { - SearchOptions::WHOLE_WORD => "Match Whole Word", - SearchOptions::CASE_SENSITIVE => "Match Case", - _ => panic!("{:?} is not a named SearchOption", self), + Self::WHOLE_WORD => "Match Whole Word", + Self::CASE_SENSITIVE => "Match Case", + Self::INCLUDE_IGNORED => "Include Ignored", + _ => panic!("{self:?} is not a named SearchOption"), } } pub fn icon(&self) -> &'static str { match *self { - SearchOptions::WHOLE_WORD => "icons/word_search.svg", - SearchOptions::CASE_SENSITIVE => "icons/case_insensitive.svg", - _ => panic!("{:?} is not a named SearchOption", self), + Self::WHOLE_WORD => "icons/word_search.svg", + Self::CASE_SENSITIVE => "icons/case_insensitive.svg", + Self::INCLUDE_IGNORED => "icons/case_insensitive.svg", + _ => panic!("{self:?} is not a named SearchOption"), } } pub fn to_toggle_action(&self) -> Box { match *self { - SearchOptions::WHOLE_WORD => Box::new(ToggleWholeWord), - SearchOptions::CASE_SENSITIVE => Box::new(ToggleCaseSensitive), - _ => panic!("{:?} is not a named SearchOption", self), + Self::WHOLE_WORD => Box::new(ToggleWholeWord), + Self::CASE_SENSITIVE => Box::new(ToggleCaseSensitive), + Self::INCLUDE_IGNORED => Box::new(ToggleIncludeIgnored), + _ => panic!("{self:?} is not a named SearchOption"), } } @@ -85,6 +90,7 @@ impl SearchOptions { let mut options = SearchOptions::NONE; options.set(SearchOptions::WHOLE_WORD, query.whole_word()); options.set(SearchOptions::CASE_SENSITIVE, query.case_sensitive()); + options.set(SearchOptions::INCLUDE_IGNORED, query.include_ignored()); options } From 7d97dfa6beefc555a62df70a94981fd7b79d75f0 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 10 Nov 2023 15:23:08 +0200 Subject: [PATCH 09/75] Test and filter data draft --- crates/project/src/ignore.rs | 22 +----- crates/project/src/project_settings.rs | 6 ++ crates/project/src/worktree.rs | 105 ++++++++++++++++++++++--- crates/project/src/worktree_tests.rs | 85 +++++++++++++++++++- 4 files changed, 186 insertions(+), 32 deletions(-) diff --git a/crates/project/src/ignore.rs b/crates/project/src/ignore.rs index 8bac08b96c3a7b920328d946723ae423404b529e..4f28160e3a743ac822f0bb85815a664c438543d9 100644 --- a/crates/project/src/ignore.rs +++ b/crates/project/src/ignore.rs @@ -1,5 +1,5 @@ use ignore::gitignore::Gitignore; -use std::{ffi::OsStr, path::Path, sync::Arc}; +use std::{path::Path, sync::Arc}; pub enum IgnoreStack { None, @@ -34,24 +34,4 @@ impl IgnoreStack { }), } } - - pub fn is_abs_path_ignored(&self, abs_path: &Path, is_dir: bool) -> bool { - if is_dir && abs_path.file_name() == Some(OsStr::new(".git")) { - return true; - } - - match self { - Self::None => false, - Self::All => true, - Self::Some { - abs_base_path, - ignore, - parent: prev, - } => match ignore.matched(abs_path.strip_prefix(abs_base_path).unwrap(), is_dir) { - ignore::Match::None => prev.is_abs_path_ignored(abs_path, is_dir), - ignore::Match::Ignore(_) => true, - ignore::Match::Whitelist(_) => false, - }, - } - } } diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 607b2848139aa88b1d36821030507de8c85ed72a..b2bafe228eb28ec53bfe6200b0ab2679b4cb2fed 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -10,6 +10,12 @@ pub struct ProjectSettings { pub lsp: HashMap, LspSettings>, #[serde(default)] pub git: GitSettings, + // TODO kb better names and docs + // TODO kb how to react on their changes? + #[serde(default)] + pub scan_exclude_files: Vec, + #[serde(default)] + pub scan_include_files: Vec, } #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index d59885225acbff208153370e7ed3ec14050661ef..d1633b828df70de9331dacee18787474cc2641ea 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -1,5 +1,6 @@ use crate::{ - copy_recursive, ignore::IgnoreStack, DiagnosticSummary, ProjectEntryId, RemoveOptions, + copy_recursive, ignore::IgnoreStack, project_settings::ProjectSettings, DiagnosticSummary, + ProjectEntryId, RemoveOptions, }; use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; use anyhow::{anyhow, Context, Result}; @@ -55,7 +56,10 @@ use std::{ time::{Duration, SystemTime}, }; use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet}; -use util::{paths::HOME, ResultExt}; +use util::{ + paths::{PathMatcher, HOME}, + ResultExt, +}; #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] pub struct WorktreeId(usize); @@ -216,6 +220,8 @@ pub struct LocalSnapshot { /// All of the git repositories in the worktree, indexed by the project entry /// id of their parent directory. git_repositories: TreeMap, + scan_exclude_files: Vec, + scan_include_files: Vec, } struct BackgroundScannerState { @@ -303,8 +309,34 @@ impl Worktree { let root_name = abs_path .file_name() .map_or(String::new(), |f| f.to_string_lossy().to_string()); - + let project_settings = settings::get::(cx); + let scan_exclude_files = project_settings.scan_exclude_files.iter() + .filter_map(|pattern| { + PathMatcher::new(pattern) + .map(Some) + .unwrap_or_else(|e| { + log::error!( + "Skipping pattern {pattern} in `scan_exclude_files` project settings due to parsing error: {e:#}" + ); + None + }) + }) + .collect::>(); + let scan_include_files = project_settings.scan_include_files.iter() + .filter_map(|pattern| { + PathMatcher::new(pattern) + .map(Some) + .unwrap_or_else(|e| { + log::error!( + "Skipping pattern {pattern} in `scan_include_files` project settings due to parsing error: {e:#}" + ); + None + }) + }) + .collect::>(); let mut snapshot = LocalSnapshot { + scan_include_files, + scan_exclude_files, ignores_by_parent_abs_path: Default::default(), git_repositories: Default::default(), snapshot: Snapshot { @@ -2042,7 +2074,7 @@ impl LocalSnapshot { let mut ignore_stack = IgnoreStack::none(); for (parent_abs_path, ignore) in new_ignores.into_iter().rev() { - if ignore_stack.is_abs_path_ignored(parent_abs_path, true) { + if self.is_abs_path_ignored(parent_abs_path, &ignore_stack, true) { ignore_stack = IgnoreStack::all(); break; } else if let Some(ignore) = ignore { @@ -2050,7 +2082,7 @@ impl LocalSnapshot { } } - if ignore_stack.is_abs_path_ignored(abs_path, is_dir) { + if self.is_abs_path_ignored(abs_path, &ignore_stack, is_dir) { ignore_stack = IgnoreStack::all(); } ignore_stack @@ -2145,6 +2177,45 @@ impl LocalSnapshot { paths.sort_by(|a, b| a.0.cmp(b.0)); paths } + + fn is_abs_path_ignored( + &self, + abs_path: &Path, + ignore_stack: &IgnoreStack, + is_dir: bool, + ) -> bool { + dbg!(&abs_path); + if self + .scan_include_files + .iter() + .any(|include_matcher| include_matcher.is_match(abs_path)) + { + dbg!("included!!"); + return false; + } else if self + .scan_exclude_files + .iter() + .any(|exclude_matcher| exclude_matcher.is_match(abs_path)) + { + dbg!("excluded!!"); + return true; + } else if is_dir && abs_path.file_name() == Some(OsStr::new(".git")) { + return true; + } + match ignore_stack { + IgnoreStack::None => false, + IgnoreStack::All => true, + IgnoreStack::Some { + abs_base_path, + ignore, + parent: prev, + } => match ignore.matched(abs_path.strip_prefix(abs_base_path).unwrap(), is_dir) { + ignore::Match::None => self.is_abs_path_ignored(abs_path, &prev, is_dir), + ignore::Match::Ignore(_) => true, + ignore::Match::Whitelist(_) => false, + }, + } + } } impl BackgroundScannerState { @@ -2767,7 +2838,7 @@ pub struct Entry { pub mtime: SystemTime, pub is_symlink: bool, - /// Whether this entry is ignored by Git. + /// Whether this entry is ignored by Zed. /// /// We only scan ignored entries once the directory is expanded and /// exclude them from searches. @@ -3464,7 +3535,7 @@ impl BackgroundScanner { for entry in &mut new_entries { let entry_abs_path = root_abs_path.join(&entry.path); entry.is_ignored = - ignore_stack.is_abs_path_ignored(&entry_abs_path, entry.is_dir()); + self.is_abs_path_ignored(&entry_abs_path, &ignore_stack, entry.is_dir()); if entry.is_dir() { if let Some(job) = new_jobs.next().expect("missing scan job for entry") { @@ -3523,7 +3594,8 @@ impl BackgroundScanner { } if child_entry.is_dir() { - child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true); + child_entry.is_ignored = + self.is_abs_path_ignored(&child_abs_path, &ignore_stack, true); // Avoid recursing until crash in the case of a recursive symlink if !job.ancestor_inodes.contains(&child_entry.inode) { @@ -3547,7 +3619,8 @@ impl BackgroundScanner { new_jobs.push(None); } } else { - child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false); + child_entry.is_ignored = + self.is_abs_path_ignored(&child_abs_path, &ignore_stack, false); if !child_entry.is_ignored { if let Some((repository_dir, repository, staged_statuses)) = &job.containing_repository @@ -3825,7 +3898,7 @@ impl BackgroundScanner { for mut entry in snapshot.child_entries(path).cloned() { let was_ignored = entry.is_ignored; let abs_path: Arc = snapshot.abs_path().join(&entry.path).into(); - entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, entry.is_dir()); + entry.is_ignored = self.is_abs_path_ignored(&abs_path, &ignore_stack, entry.is_dir()); if entry.is_dir() { let child_ignore_stack = if entry.is_ignored { IgnoreStack::all() @@ -4008,6 +4081,18 @@ impl BackgroundScanner { smol::Timer::after(Duration::from_millis(100)).await; } + + fn is_abs_path_ignored( + &self, + abs_path: &Path, + ignore_stack: &IgnoreStack, + is_dir: bool, + ) -> bool { + self.state + .lock() + .snapshot + .is_abs_path_ignored(abs_path, ignore_stack, is_dir) + } } fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag { diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index 4253f45b0ce912412b0f9716474f92d0f875f026..fff23a36b4894e37a38d911a88b9cb296056639d 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -1,6 +1,7 @@ use crate::{ + project_settings::ProjectSettings, worktree::{Event, Snapshot, WorktreeModelHandle}, - Entry, EntryKind, PathChange, Worktree, + Entry, EntryKind, PathChange, Project, Worktree, }; use anyhow::Result; use client::Client; @@ -12,6 +13,7 @@ use postage::stream::Stream; use pretty_assertions::assert_eq; use rand::prelude::*; use serde_json::json; +use settings::SettingsStore; use std::{ env, fmt::Write, @@ -877,6 +879,87 @@ async fn test_write_file(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_ignore_inclusions_and_exclusions(cx: &mut TestAppContext) { + let dir = temp_tree(json!({ + ".git": {}, + ".gitignore": "**/target\n/node_modules\n", + "target": {}, + "node_modules": { + ".DS_Store": "", + "prettier": { + "package.json": "{}", + }, + }, + "src": { + ".DS_Store": "", + "foo": { + "foo.rs": "mod another;\n", + "another.rs": "// another", + }, + "bar": { + "bar.rs": "// bar", + }, + "lib.rs": "mod foo;\nmod bar;\n", + }, + ".DS_Store": "", + })); + cx.update(|cx| { + cx.set_global(SettingsStore::test(cx)); + Project::init_settings(cx); + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.scan_exclude_files = + vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]; + project_settings.scan_include_files = vec!["**/node_modules".to_string()]; + }); + }); + }); + + let tree = Worktree::local( + build_client(cx), + dir.path(), + true, + Arc::new(RealFs), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + tree.flush_fs_events(cx).await; + + // tree.update(cx, |tree, cx| { + // tree.as_local().unwrap().write_file( + // Path::new("tracked-dir/file.txt"), + // "hello".into(), + // Default::default(), + // cx, + // ) + // }) + // .await + // .unwrap(); + // tree.update(cx, |tree, cx| { + // tree.as_local().unwrap().write_file( + // Path::new("ignored-dir/file.txt"), + // "world".into(), + // Default::default(), + // cx, + // ) + // }) + // .await + // .unwrap(); + + // tree.read_with(cx, |tree, _| { + // let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap(); + // let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap(); + // assert!(!tracked.is_ignored); + // assert!(ignored.is_ignored); + // }); + dbg!("!!!!!!!!!!!!"); +} + #[gpui::test(iterations = 30)] async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) { let fs = FakeFs::new(cx.background()); From 9072e5a50707ab2dc799803bf0b04dc825caff9d Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 10 Nov 2023 17:51:44 +0200 Subject: [PATCH 10/75] Properly set ignore stacks and is_ignored values --- crates/project/src/worktree.rs | 99 ++++++++++++++++++++++------------ 1 file changed, 65 insertions(+), 34 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index d1633b828df70de9331dacee18787474cc2641ea..eb2612a73503333fce7ed98cd031b9f5a0399f88 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -2072,9 +2072,12 @@ impl LocalSnapshot { } } + // TODO kb choose the correct ignore stack for custom `is_abs_path_included(..) = true` cases let mut ignore_stack = IgnoreStack::none(); for (parent_abs_path, ignore) in new_ignores.into_iter().rev() { - if self.is_abs_path_ignored(parent_abs_path, &ignore_stack, true) { + if !self.is_abs_path_included(parent_abs_path) + && self.is_abs_path_ignored(parent_abs_path, &ignore_stack, true) + { ignore_stack = IgnoreStack::all(); break; } else if let Some(ignore) = ignore { @@ -2082,7 +2085,9 @@ impl LocalSnapshot { } } - if self.is_abs_path_ignored(abs_path, &ignore_stack, is_dir) { + if !self.is_abs_path_included(abs_path) + && self.is_abs_path_ignored(abs_path, &ignore_stack, is_dir) + { ignore_stack = IgnoreStack::all(); } ignore_stack @@ -2178,30 +2183,28 @@ impl LocalSnapshot { paths } + fn is_abs_path_included(&self, abs_path: &Path) -> bool { + self.scan_include_files + .iter() + .any(|include_matcher| include_matcher.is_match(abs_path)) + } + fn is_abs_path_ignored( &self, abs_path: &Path, ignore_stack: &IgnoreStack, is_dir: bool, ) -> bool { - dbg!(&abs_path); if self - .scan_include_files - .iter() - .any(|include_matcher| include_matcher.is_match(abs_path)) - { - dbg!("included!!"); - return false; - } else if self .scan_exclude_files .iter() .any(|exclude_matcher| exclude_matcher.is_match(abs_path)) { - dbg!("excluded!!"); return true; } else if is_dir && abs_path.file_name() == Some(OsStr::new(".git")) { return true; } + match ignore_stack { IgnoreStack::None => false, IgnoreStack::All => true, @@ -2219,8 +2222,8 @@ impl LocalSnapshot { } impl BackgroundScannerState { - fn should_scan_directory(&self, entry: &Entry) -> bool { - (!entry.is_external && !entry.is_ignored) + fn should_scan_directory(&self, entry: &Entry, entry_abs_path: &Path) -> bool { + (!entry.is_external && (!entry.is_ignored || self.snapshot.is_abs_path_included(entry_abs_path))) || entry.path.file_name() == Some(&*DOT_GIT) || self.scanned_dirs.contains(&entry.id) // If we've ever scanned it, keep scanning || self @@ -2325,6 +2328,16 @@ impl BackgroundScannerState { let mut entries_by_id_edits = Vec::new(); for entry in entries { + let abs_path = self.snapshot.abs_path.join(&entry.path); + let ignore_stack = self + .snapshot + .ignore_stack_for_abs_path(&abs_path, entry.is_dir()); + let actual_ignored = + self.snapshot + .is_abs_path_ignored(&abs_path, &ignore_stack, entry.is_dir()); + if entry.path.to_string_lossy().contains("node_modules") { + dbg!("@@@@@@@@@", &entry, actual_ignored, ignore_stack.is_all()); + } entries_by_id_edits.push(Edit::Insert(PathEntry { id: entry.id, path: entry.path.clone(), @@ -3165,7 +3178,10 @@ impl BackgroundScanner { let ignore_stack = state .snapshot .ignore_stack_for_abs_path(&root_abs_path, true); - if ignore_stack.is_all() { + if state + .snapshot + .is_abs_path_ignored(&root_abs_path, &ignore_stack, true) + { root_entry.is_ignored = true; state.insert_entry(root_entry.clone(), self.fs.as_ref()); } @@ -3539,7 +3555,9 @@ impl BackgroundScanner { if entry.is_dir() { if let Some(job) = new_jobs.next().expect("missing scan job for entry") { - job.ignore_stack = if entry.is_ignored { + job.ignore_stack = if entry.is_ignored + && !self.is_abs_path_included(&entry_abs_path) + { IgnoreStack::all() } else { ignore_stack.clone() @@ -3603,15 +3621,17 @@ impl BackgroundScanner { ancestor_inodes.insert(child_entry.inode); new_jobs.push(Some(ScanJob { - abs_path: child_abs_path, path: child_path, is_external: child_entry.is_external, - ignore_stack: if child_entry.is_ignored { + ignore_stack: if child_entry.is_ignored + && !self.is_abs_path_included(&child_abs_path) + { IgnoreStack::all() } else { ignore_stack.clone() }, ancestor_inodes, + abs_path: child_abs_path, scan_queue: job.scan_queue.clone(), containing_repository: job.containing_repository.clone(), })); @@ -3621,7 +3641,7 @@ impl BackgroundScanner { } else { child_entry.is_ignored = self.is_abs_path_ignored(&child_abs_path, &ignore_stack, false); - if !child_entry.is_ignored { + if !child_entry.is_ignored || self.is_abs_path_included(&child_abs_path) { if let Some((repository_dir, repository, staged_statuses)) = &job.containing_repository { @@ -3648,7 +3668,8 @@ impl BackgroundScanner { for entry in &mut new_entries { state.reuse_entry_id(entry); if entry.is_dir() { - if state.should_scan_directory(&entry) { + let entry_abs_path = root_abs_path.join(&entry.path); + if state.should_scan_directory(&entry, &entry_abs_path) { job_ix += 1; } else { log::debug!("defer scanning directory {:?}", entry.path); @@ -3735,25 +3756,27 @@ impl BackgroundScanner { self.next_entry_id.as_ref(), state.snapshot.root_char_bag, ); - fs_entry.is_ignored = ignore_stack.is_all(); + let is_dir = fs_entry.is_dir(); + fs_entry.is_ignored = + state + .snapshot + .is_abs_path_ignored(&abs_path, &ignore_stack, is_dir); fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path); - if !fs_entry.is_ignored { - if !fs_entry.is_dir() { - if let Some((work_dir, repo)) = - state.snapshot.local_repo_for_path(&path) - { - if let Ok(repo_path) = path.strip_prefix(work_dir.0) { - let repo_path = RepoPath(repo_path.into()); - let repo = repo.repo_ptr.lock(); - fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime); - } + if !is_dir + && (!fs_entry.is_ignored || state.snapshot.is_abs_path_included(&abs_path)) + { + if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) { + if let Ok(repo_path) = path.strip_prefix(work_dir.0) { + let repo_path = RepoPath(repo_path.into()); + let repo = repo.repo_ptr.lock(); + fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime); } } } if let (Some(scan_queue_tx), true) = (&scan_queue_tx, fs_entry.is_dir()) { - if state.should_scan_directory(&fs_entry) { + if state.should_scan_directory(&fs_entry, &abs_path) { state.enqueue_scan_dir(abs_path, &fs_entry, scan_queue_tx); } else { fs_entry.kind = EntryKind::UnloadedDir; @@ -3900,7 +3923,8 @@ impl BackgroundScanner { let abs_path: Arc = snapshot.abs_path().join(&entry.path).into(); entry.is_ignored = self.is_abs_path_ignored(&abs_path, &ignore_stack, entry.is_dir()); if entry.is_dir() { - let child_ignore_stack = if entry.is_ignored { + let child_ignore_stack = if entry.is_ignored && self.is_abs_path_included(&abs_path) + { IgnoreStack::all() } else { ignore_stack.clone() @@ -3908,9 +3932,12 @@ impl BackgroundScanner { // Scan any directories that were previously ignored and weren't // previously scanned. - if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() { + if was_ignored + && (!entry.is_ignored || self.is_abs_path_included(&abs_path)) + && entry.kind.is_unloaded() + { let state = self.state.lock(); - if state.should_scan_directory(&entry) { + if state.should_scan_directory(&entry, &abs_path) { state.enqueue_scan_dir(abs_path.clone(), &entry, &job.scan_queue); } } @@ -4082,6 +4109,10 @@ impl BackgroundScanner { smol::Timer::after(Duration::from_millis(100)).await; } + fn is_abs_path_included(&self, abs_path: &Path) -> bool { + self.state.lock().snapshot.is_abs_path_included(abs_path) + } + fn is_abs_path_ignored( &self, abs_path: &Path, From 401f85bed219441c4c840078422c48f2a2e4cb18 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 13 Nov 2023 18:59:09 +0200 Subject: [PATCH 11/75] Properly ignore elements from configured exceptions --- crates/project/src/project_settings.rs | 2 +- crates/project/src/worktree.rs | 34 ++++++-------------------- 2 files changed, 8 insertions(+), 28 deletions(-) diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index b2bafe228eb28ec53bfe6200b0ab2679b4cb2fed..2caac6191e717119379021f122be1a540075e2ef 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -10,7 +10,7 @@ pub struct ProjectSettings { pub lsp: HashMap, LspSettings>, #[serde(default)] pub git: GitSettings, - // TODO kb better names and docs + // TODO kb better names and docs and tests // TODO kb how to react on their changes? #[serde(default)] pub scan_exclude_files: Vec, diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index eb2612a73503333fce7ed98cd031b9f5a0399f88..af5531f5d35f080ec897fcabcbf8fbec5ba96130 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -1513,7 +1513,7 @@ impl Snapshot { self.entries_by_id.get(&entry_id, &()).is_some() } - pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result { + fn insert_entry(&mut self, entry: proto::Entry) -> Result { let entry = Entry::try_from((&self.root_char_bag, entry))?; let old_entry = self.entries_by_id.insert_or_replace( PathEntry { @@ -2075,9 +2075,7 @@ impl LocalSnapshot { // TODO kb choose the correct ignore stack for custom `is_abs_path_included(..) = true` cases let mut ignore_stack = IgnoreStack::none(); for (parent_abs_path, ignore) in new_ignores.into_iter().rev() { - if !self.is_abs_path_included(parent_abs_path) - && self.is_abs_path_ignored(parent_abs_path, &ignore_stack, true) - { + if !self.is_abs_path_ignored(parent_abs_path, &ignore_stack, true) { ignore_stack = IgnoreStack::all(); break; } else if let Some(ignore) = ignore { @@ -2085,9 +2083,7 @@ impl LocalSnapshot { } } - if !self.is_abs_path_included(abs_path) - && self.is_abs_path_ignored(abs_path, &ignore_stack, is_dir) - { + if !self.is_abs_path_ignored(abs_path, &ignore_stack, is_dir) { ignore_stack = IgnoreStack::all(); } ignore_stack @@ -2241,7 +2237,7 @@ impl BackgroundScannerState { let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true); let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path); let mut containing_repository = None; - if !ignore_stack.is_all() { + if !ignore_stack.is_all() || self.snapshot.is_abs_path_included(&abs_path) { if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) { if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) { containing_repository = Some(( @@ -2326,18 +2322,7 @@ impl BackgroundScannerState { self.scanned_dirs.insert(parent_entry_id); let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)]; let mut entries_by_id_edits = Vec::new(); - for entry in entries { - let abs_path = self.snapshot.abs_path.join(&entry.path); - let ignore_stack = self - .snapshot - .ignore_stack_for_abs_path(&abs_path, entry.is_dir()); - let actual_ignored = - self.snapshot - .is_abs_path_ignored(&abs_path, &ignore_stack, entry.is_dir()); - if entry.path.to_string_lossy().contains("node_modules") { - dbg!("@@@@@@@@@", &entry, actual_ignored, ignore_stack.is_all()); - } entries_by_id_edits.push(Edit::Insert(PathEntry { id: entry.id, path: entry.path.clone(), @@ -3555,9 +3540,7 @@ impl BackgroundScanner { if entry.is_dir() { if let Some(job) = new_jobs.next().expect("missing scan job for entry") { - job.ignore_stack = if entry.is_ignored - && !self.is_abs_path_included(&entry_abs_path) - { + job.ignore_stack = if entry.is_ignored { IgnoreStack::all() } else { ignore_stack.clone() @@ -3623,9 +3606,7 @@ impl BackgroundScanner { new_jobs.push(Some(ScanJob { path: child_path, is_external: child_entry.is_external, - ignore_stack: if child_entry.is_ignored - && !self.is_abs_path_included(&child_abs_path) - { + ignore_stack: if child_entry.is_ignored { IgnoreStack::all() } else { ignore_stack.clone() @@ -3923,8 +3904,7 @@ impl BackgroundScanner { let abs_path: Arc = snapshot.abs_path().join(&entry.path).into(); entry.is_ignored = self.is_abs_path_ignored(&abs_path, &ignore_stack, entry.is_dir()); if entry.is_dir() { - let child_ignore_stack = if entry.is_ignored && self.is_abs_path_included(&abs_path) - { + let child_ignore_stack = if entry.is_ignored { IgnoreStack::all() } else { ignore_stack.clone() From 126e4cce8f7e8f7629eb2d354a731bb5143bb03a Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 13 Nov 2023 22:40:21 +0200 Subject: [PATCH 12/75] Scan all ignored files by default now --- crates/project/src/ignore.rs | 4 -- crates/project/src/project_settings.rs | 2 - crates/project/src/worktree.rs | 88 ++++++++------------------ crates/project/src/worktree_tests.rs | 1 - 4 files changed, 26 insertions(+), 69 deletions(-) diff --git a/crates/project/src/ignore.rs b/crates/project/src/ignore.rs index 4f28160e3a743ac822f0bb85815a664c438543d9..e241f7fbfc674b53f4ab12bd7b2466b99a70da08 100644 --- a/crates/project/src/ignore.rs +++ b/crates/project/src/ignore.rs @@ -20,10 +20,6 @@ impl IgnoreStack { Arc::new(Self::All) } - pub fn is_all(&self) -> bool { - matches!(self, IgnoreStack::All) - } - pub fn append(self: Arc, abs_base_path: Arc, ignore: Arc) -> Arc { match self.as_ref() { IgnoreStack::All => self, diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 2caac6191e717119379021f122be1a540075e2ef..17233219d74061b7d1382cff4dd8b18408f7961d 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -14,8 +14,6 @@ pub struct ProjectSettings { // TODO kb how to react on their changes? #[serde(default)] pub scan_exclude_files: Vec, - #[serde(default)] - pub scan_include_files: Vec, } #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index af5531f5d35f080ec897fcabcbf8fbec5ba96130..4af8548a8308e68a6411fbbe78bcdcb003a8cbb9 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -221,7 +221,6 @@ pub struct LocalSnapshot { /// id of their parent directory. git_repositories: TreeMap, scan_exclude_files: Vec, - scan_include_files: Vec, } struct BackgroundScannerState { @@ -322,20 +321,7 @@ impl Worktree { }) }) .collect::>(); - let scan_include_files = project_settings.scan_include_files.iter() - .filter_map(|pattern| { - PathMatcher::new(pattern) - .map(Some) - .unwrap_or_else(|e| { - log::error!( - "Skipping pattern {pattern} in `scan_include_files` project settings due to parsing error: {e:#}" - ); - None - }) - }) - .collect::>(); let mut snapshot = LocalSnapshot { - scan_include_files, scan_exclude_files, ignores_by_parent_abs_path: Default::default(), git_repositories: Default::default(), @@ -2072,7 +2058,6 @@ impl LocalSnapshot { } } - // TODO kb choose the correct ignore stack for custom `is_abs_path_included(..) = true` cases let mut ignore_stack = IgnoreStack::none(); for (parent_abs_path, ignore) in new_ignores.into_iter().rev() { if !self.is_abs_path_ignored(parent_abs_path, &ignore_stack, true) { @@ -2179,12 +2164,6 @@ impl LocalSnapshot { paths } - fn is_abs_path_included(&self, abs_path: &Path) -> bool { - self.scan_include_files - .iter() - .any(|include_matcher| include_matcher.is_match(abs_path)) - } - fn is_abs_path_ignored( &self, abs_path: &Path, @@ -2218,8 +2197,8 @@ impl LocalSnapshot { } impl BackgroundScannerState { - fn should_scan_directory(&self, entry: &Entry, entry_abs_path: &Path) -> bool { - (!entry.is_external && (!entry.is_ignored || self.snapshot.is_abs_path_included(entry_abs_path))) + fn should_scan_directory(&self, entry: &Entry) -> bool { + !entry.is_external || entry.path.file_name() == Some(&*DOT_GIT) || self.scanned_dirs.contains(&entry.id) // If we've ever scanned it, keep scanning || self @@ -2237,15 +2216,13 @@ impl BackgroundScannerState { let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true); let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path); let mut containing_repository = None; - if !ignore_stack.is_all() || self.snapshot.is_abs_path_included(&abs_path) { - if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) { - if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) { - containing_repository = Some(( - workdir_path, - repo.repo_ptr.clone(), - repo.repo_ptr.lock().staged_statuses(repo_path), - )); - } + if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) { + if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) { + containing_repository = Some(( + workdir_path, + repo.repo_ptr.clone(), + repo.repo_ptr.lock().staged_statuses(repo_path), + )); } } if !ancestor_inodes.contains(&entry.inode) { @@ -3622,19 +3599,17 @@ impl BackgroundScanner { } else { child_entry.is_ignored = self.is_abs_path_ignored(&child_abs_path, &ignore_stack, false); - if !child_entry.is_ignored || self.is_abs_path_included(&child_abs_path) { - if let Some((repository_dir, repository, staged_statuses)) = - &job.containing_repository - { - if let Ok(repo_path) = child_entry.path.strip_prefix(&repository_dir.0) { - let repo_path = RepoPath(repo_path.into()); - child_entry.git_status = combine_git_statuses( - staged_statuses.get(&repo_path).copied(), - repository - .lock() - .unstaged_status(&repo_path, child_entry.mtime), - ); - } + if let Some((repository_dir, repository, staged_statuses)) = + &job.containing_repository + { + if let Ok(repo_path) = child_entry.path.strip_prefix(&repository_dir.0) { + let repo_path = RepoPath(repo_path.into()); + child_entry.git_status = combine_git_statuses( + staged_statuses.get(&repo_path).copied(), + repository + .lock() + .unstaged_status(&repo_path, child_entry.mtime), + ); } } } @@ -3649,8 +3624,7 @@ impl BackgroundScanner { for entry in &mut new_entries { state.reuse_entry_id(entry); if entry.is_dir() { - let entry_abs_path = root_abs_path.join(&entry.path); - if state.should_scan_directory(&entry, &entry_abs_path) { + if state.should_scan_directory(&entry) { job_ix += 1; } else { log::debug!("defer scanning directory {:?}", entry.path); @@ -3744,9 +3718,7 @@ impl BackgroundScanner { .is_abs_path_ignored(&abs_path, &ignore_stack, is_dir); fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path); - if !is_dir - && (!fs_entry.is_ignored || state.snapshot.is_abs_path_included(&abs_path)) - { + if !is_dir { if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) { if let Ok(repo_path) = path.strip_prefix(work_dir.0) { let repo_path = RepoPath(repo_path.into()); @@ -3757,7 +3729,7 @@ impl BackgroundScanner { } if let (Some(scan_queue_tx), true) = (&scan_queue_tx, fs_entry.is_dir()) { - if state.should_scan_directory(&fs_entry, &abs_path) { + if state.should_scan_directory(&fs_entry) { state.enqueue_scan_dir(abs_path, &fs_entry, scan_queue_tx); } else { fs_entry.kind = EntryKind::UnloadedDir; @@ -3910,14 +3882,10 @@ impl BackgroundScanner { ignore_stack.clone() }; - // Scan any directories that were previously ignored and weren't - // previously scanned. - if was_ignored - && (!entry.is_ignored || self.is_abs_path_included(&abs_path)) - && entry.kind.is_unloaded() - { + // Scan any directories that were previously ignored and weren't previously scanned. + if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() { let state = self.state.lock(); - if state.should_scan_directory(&entry, &abs_path) { + if state.should_scan_directory(&entry) { state.enqueue_scan_dir(abs_path.clone(), &entry, &job.scan_queue); } } @@ -4089,10 +4057,6 @@ impl BackgroundScanner { smol::Timer::after(Duration::from_millis(100)).await; } - fn is_abs_path_included(&self, abs_path: &Path) -> bool { - self.state.lock().snapshot.is_abs_path_included(abs_path) - } - fn is_abs_path_ignored( &self, abs_path: &Path, diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index fff23a36b4894e37a38d911a88b9cb296056639d..1fb4aa9a3474740d31f1fab5519b90b502758f4c 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -911,7 +911,6 @@ async fn test_ignore_inclusions_and_exclusions(cx: &mut TestAppContext) { store.update_user_settings::(cx, |project_settings| { project_settings.scan_exclude_files = vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]; - project_settings.scan_include_files = vec!["**/node_modules".to_string()]; }); }); }); From b8be720490e9fe5662a14950d96620a88a0677e1 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 13 Nov 2023 23:18:24 +0200 Subject: [PATCH 13/75] Fix the bugs --- crates/project/src/ignore.rs | 21 +++++- crates/project/src/project_settings.rs | 1 + crates/project/src/worktree.rs | 97 ++++++++------------------ 3 files changed, 50 insertions(+), 69 deletions(-) diff --git a/crates/project/src/ignore.rs b/crates/project/src/ignore.rs index e241f7fbfc674b53f4ab12bd7b2466b99a70da08..ed1ce4006d9ed90269d5d3ca184b0ff9c5fde590 100644 --- a/crates/project/src/ignore.rs +++ b/crates/project/src/ignore.rs @@ -1,5 +1,5 @@ use ignore::gitignore::Gitignore; -use std::{path::Path, sync::Arc}; +use std::{ffi::OsStr, path::Path, sync::Arc}; pub enum IgnoreStack { None, @@ -30,4 +30,23 @@ impl IgnoreStack { }), } } + + pub fn is_abs_path_ignored(&self, abs_path: &Path, is_dir: bool) -> bool { + if is_dir && abs_path.file_name() == Some(OsStr::new(".git")) { + return true; + } + match self { + Self::None => false, + Self::All => true, + Self::Some { + abs_base_path, + ignore, + parent: prev, + } => match ignore.matched(abs_path.strip_prefix(abs_base_path).unwrap(), is_dir) { + ignore::Match::None => prev.is_abs_path_ignored(abs_path, is_dir), + ignore::Match::Ignore(_) => true, + ignore::Match::Whitelist(_) => false, + }, + } + } } diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 17233219d74061b7d1382cff4dd8b18408f7961d..8aebb380b635c732f424075bb6e44df0fb6b3caf 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -12,6 +12,7 @@ pub struct ProjectSettings { pub git: GitSettings, // TODO kb better names and docs and tests // TODO kb how to react on their changes? + // TODO kb /something/node_modules/ does not match `"**/node_modules/**"` glob!!! #[serde(default)] pub scan_exclude_files: Vec, } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 4af8548a8308e68a6411fbbe78bcdcb003a8cbb9..dbc18e086d8cb9bf68fb2c24fe24caa8e5b18ac8 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -2060,7 +2060,7 @@ impl LocalSnapshot { let mut ignore_stack = IgnoreStack::none(); for (parent_abs_path, ignore) in new_ignores.into_iter().rev() { - if !self.is_abs_path_ignored(parent_abs_path, &ignore_stack, true) { + if !ignore_stack.is_abs_path_ignored(parent_abs_path, true) { ignore_stack = IgnoreStack::all(); break; } else if let Some(ignore) = ignore { @@ -2068,7 +2068,7 @@ impl LocalSnapshot { } } - if !self.is_abs_path_ignored(abs_path, &ignore_stack, is_dir) { + if !ignore_stack.is_abs_path_ignored(abs_path, is_dir) { ignore_stack = IgnoreStack::all(); } ignore_stack @@ -2164,41 +2164,16 @@ impl LocalSnapshot { paths } - fn is_abs_path_ignored( - &self, - abs_path: &Path, - ignore_stack: &IgnoreStack, - is_dir: bool, - ) -> bool { - if self - .scan_exclude_files + fn is_abs_path_excluded(&self, abs_path: &Path) -> bool { + self.scan_exclude_files .iter() .any(|exclude_matcher| exclude_matcher.is_match(abs_path)) - { - return true; - } else if is_dir && abs_path.file_name() == Some(OsStr::new(".git")) { - return true; - } - - match ignore_stack { - IgnoreStack::None => false, - IgnoreStack::All => true, - IgnoreStack::Some { - abs_base_path, - ignore, - parent: prev, - } => match ignore.matched(abs_path.strip_prefix(abs_base_path).unwrap(), is_dir) { - ignore::Match::None => self.is_abs_path_ignored(abs_path, &prev, is_dir), - ignore::Match::Ignore(_) => true, - ignore::Match::Whitelist(_) => false, - }, - } } } impl BackgroundScannerState { - fn should_scan_directory(&self, entry: &Entry) -> bool { - !entry.is_external + fn should_scan_directory(&self, entry: &Entry, entry_abs_path: &Path) -> bool { + !entry.is_external && !self.snapshot.is_abs_path_excluded(entry_abs_path) || entry.path.file_name() == Some(&*DOT_GIT) || self.scanned_dirs.contains(&entry.id) // If we've ever scanned it, keep scanning || self @@ -2216,13 +2191,17 @@ impl BackgroundScannerState { let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true); let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path); let mut containing_repository = None; - if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) { - if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) { - containing_repository = Some(( - workdir_path, - repo.repo_ptr.clone(), - repo.repo_ptr.lock().staged_statuses(repo_path), - )); + if !matches!(ignore_stack.as_ref(), &IgnoreStack::All) + && !self.snapshot.is_abs_path_excluded(&abs_path) + { + if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) { + if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) { + containing_repository = Some(( + workdir_path, + repo.repo_ptr.clone(), + repo.repo_ptr.lock().staged_statuses(repo_path), + )); + } } } if !ancestor_inodes.contains(&entry.inode) { @@ -3140,10 +3119,7 @@ impl BackgroundScanner { let ignore_stack = state .snapshot .ignore_stack_for_abs_path(&root_abs_path, true); - if state - .snapshot - .is_abs_path_ignored(&root_abs_path, &ignore_stack, true) - { + if ignore_stack.is_abs_path_ignored(&root_abs_path, true) { root_entry.is_ignored = true; state.insert_entry(root_entry.clone(), self.fs.as_ref()); } @@ -3513,7 +3489,7 @@ impl BackgroundScanner { for entry in &mut new_entries { let entry_abs_path = root_abs_path.join(&entry.path); entry.is_ignored = - self.is_abs_path_ignored(&entry_abs_path, &ignore_stack, entry.is_dir()); + ignore_stack.is_abs_path_ignored(&entry_abs_path, entry.is_dir()); if entry.is_dir() { if let Some(job) = new_jobs.next().expect("missing scan job for entry") { @@ -3572,8 +3548,7 @@ impl BackgroundScanner { } if child_entry.is_dir() { - child_entry.is_ignored = - self.is_abs_path_ignored(&child_abs_path, &ignore_stack, true); + child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true); // Avoid recursing until crash in the case of a recursive symlink if !job.ancestor_inodes.contains(&child_entry.inode) { @@ -3597,8 +3572,7 @@ impl BackgroundScanner { new_jobs.push(None); } } else { - child_entry.is_ignored = - self.is_abs_path_ignored(&child_abs_path, &ignore_stack, false); + child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false); if let Some((repository_dir, repository, staged_statuses)) = &job.containing_repository { @@ -3624,7 +3598,7 @@ impl BackgroundScanner { for entry in &mut new_entries { state.reuse_entry_id(entry); if entry.is_dir() { - if state.should_scan_directory(&entry) { + if state.should_scan_directory(&entry, &job.path.join(&entry.path)) { job_ix += 1; } else { log::debug!("defer scanning directory {:?}", entry.path); @@ -3712,13 +3686,12 @@ impl BackgroundScanner { state.snapshot.root_char_bag, ); let is_dir = fs_entry.is_dir(); - fs_entry.is_ignored = - state - .snapshot - .is_abs_path_ignored(&abs_path, &ignore_stack, is_dir); + fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir); fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path); - if !is_dir { + if !is_dir + && !(fs_entry.is_ignored || state.snapshot.is_abs_path_excluded(&abs_path)) + { if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) { if let Ok(repo_path) = path.strip_prefix(work_dir.0) { let repo_path = RepoPath(repo_path.into()); @@ -3729,7 +3702,7 @@ impl BackgroundScanner { } if let (Some(scan_queue_tx), true) = (&scan_queue_tx, fs_entry.is_dir()) { - if state.should_scan_directory(&fs_entry) { + if state.should_scan_directory(&fs_entry, &abs_path) { state.enqueue_scan_dir(abs_path, &fs_entry, scan_queue_tx); } else { fs_entry.kind = EntryKind::UnloadedDir; @@ -3874,7 +3847,7 @@ impl BackgroundScanner { for mut entry in snapshot.child_entries(path).cloned() { let was_ignored = entry.is_ignored; let abs_path: Arc = snapshot.abs_path().join(&entry.path).into(); - entry.is_ignored = self.is_abs_path_ignored(&abs_path, &ignore_stack, entry.is_dir()); + entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, entry.is_dir()); if entry.is_dir() { let child_ignore_stack = if entry.is_ignored { IgnoreStack::all() @@ -3885,7 +3858,7 @@ impl BackgroundScanner { // Scan any directories that were previously ignored and weren't previously scanned. if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() { let state = self.state.lock(); - if state.should_scan_directory(&entry) { + if state.should_scan_directory(&entry, &abs_path) { state.enqueue_scan_dir(abs_path.clone(), &entry, &job.scan_queue); } } @@ -4056,18 +4029,6 @@ impl BackgroundScanner { smol::Timer::after(Duration::from_millis(100)).await; } - - fn is_abs_path_ignored( - &self, - abs_path: &Path, - ignore_stack: &IgnoreStack, - is_dir: bool, - ) -> bool { - self.state - .lock() - .snapshot - .is_abs_path_ignored(abs_path, ignore_stack, is_dir) - } } fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag { From 1612c90052fd570fbee7856d5d53c45416d3dcf1 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 14 Nov 2023 00:19:51 +0200 Subject: [PATCH 14/75] More lenient file path matchers --- crates/project/src/project_settings.rs | 1 - crates/project/src/worktree.rs | 2 +- crates/util/src/paths.rs | 24 +++++++++++++++++++++++- 3 files changed, 24 insertions(+), 3 deletions(-) diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 8aebb380b635c732f424075bb6e44df0fb6b3caf..17233219d74061b7d1382cff4dd8b18408f7961d 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -12,7 +12,6 @@ pub struct ProjectSettings { pub git: GitSettings, // TODO kb better names and docs and tests // TODO kb how to react on their changes? - // TODO kb /something/node_modules/ does not match `"**/node_modules/**"` glob!!! #[serde(default)] pub scan_exclude_files: Vec, } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index dbc18e086d8cb9bf68fb2c24fe24caa8e5b18ac8..3cc1ff6fef635f9b65ad5f148bef8a6787367be2 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -3598,7 +3598,7 @@ impl BackgroundScanner { for entry in &mut new_entries { state.reuse_entry_id(entry); if entry.is_dir() { - if state.should_scan_directory(&entry, &job.path.join(&entry.path)) { + if state.should_scan_directory(&entry, &root_abs_path.join(&entry.path)) { job_ix += 1; } else { log::debug!("defer scanning directory {:?}", entry.path); diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index d54e0b1cd63fc302101c1020d7fb0ff60ee1d5bf..5999bd1d3923a820c6412aa5d1b4c6f2e915e8d3 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -211,7 +211,19 @@ impl PathMatcher { } pub fn is_match>(&self, other: P) -> bool { - other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other) + other.as_ref().starts_with(&self.maybe_path) + || self.glob.is_match(&other) + || self.check_with_end_separator(other.as_ref()) + } + + fn check_with_end_separator(&self, path: &Path) -> bool { + let path_str = path.to_string_lossy(); + let separator = std::path::MAIN_SEPARATOR_STR; + if path_str.ends_with(separator) { + self.glob.is_match(path) + } else { + self.glob.is_match(path_str.to_string() + separator) + } } } @@ -388,4 +400,14 @@ mod tests { let path = Path::new("/a/b/c/.eslintrc.js"); assert_eq!(path.extension_or_hidden_file_name(), Some("js")); } + + #[test] + fn edge_of_glob() { + let path = Path::new("/work/node_modules"); + let path_matcher = PathMatcher::new("**/node_modules/**").unwrap(); + assert!( + path_matcher.is_match(&path), + "Path matcher {path_matcher} should match {path:?}" + ); + } } From 9373d3843457b3e3acdc07480337de8a3e486345 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 14 Nov 2023 16:35:49 +0200 Subject: [PATCH 15/75] Rescan worktree on scan exclusions settings change --- crates/project/src/project_settings.rs | 1 - crates/project/src/worktree.rs | 193 ++++++++++++++++--------- crates/util/src/paths.rs | 8 + 3 files changed, 136 insertions(+), 66 deletions(-) diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 17233219d74061b7d1382cff4dd8b18408f7961d..7cbcc32d4ee9dc25ab0b7bf0abbef122d54ca9f5 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -11,7 +11,6 @@ pub struct ProjectSettings { #[serde(default)] pub git: GitSettings, // TODO kb better names and docs and tests - // TODO kb how to react on their changes? #[serde(default)] pub scan_exclude_files: Vec, } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 3cc1ff6fef635f9b65ad5f148bef8a6787367be2..316878030508adf732c3041a364255d360f3b931 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -22,7 +22,10 @@ use futures::{ }; use fuzzy::CharBag; use git::{DOT_GIT, GITIGNORE}; -use gpui::{executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task}; +use gpui::{ + executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Subscription, Task, +}; +use itertools::Itertools; use language::{ proto::{ deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending, @@ -37,6 +40,7 @@ use postage::{ prelude::{Sink as _, Stream as _}, watch, }; +use settings::SettingsStore; use smol::channel::{self, Sender}; use std::{ any::Any, @@ -74,7 +78,8 @@ pub struct LocalWorktree { scan_requests_tx: channel::Sender, path_prefixes_to_scan_tx: channel::Sender>, is_scanning: (watch::Sender, watch::Receiver), - _background_scanner_task: Task<()>, + _settings_subscription: Subscription, + _background_scanner_tasks: Vec>, share: Option, diagnostics: HashMap< Arc, @@ -304,30 +309,55 @@ impl Worktree { .await .context("failed to stat worktree path")?; + let closure_fs = Arc::clone(&fs); + let closure_next_entry_id = Arc::clone(&next_entry_id); + let closure_abs_path = abs_path.to_path_buf(); Ok(cx.add_model(move |cx: &mut ModelContext| { + let settings_subscription = cx.observe_global::(move |this, cx| { + if let Self::Local(this) = this { + let new_scan_exclude_files = + scan_exclude_files(settings::get::(cx)); + if new_scan_exclude_files != this.snapshot.scan_exclude_files { + this.snapshot.scan_exclude_files = new_scan_exclude_files; + log::info!( + "Re-scanning due to new scan exclude files: {:?}", + this.snapshot + .scan_exclude_files + .iter() + .map(ToString::to_string) + .collect::>() + ); + + let (scan_requests_tx, scan_requests_rx) = channel::unbounded(); + let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = + channel::unbounded(); + this.scan_requests_tx = scan_requests_tx; + this.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx; + this._background_scanner_tasks = start_background_scan_tasks( + &closure_abs_path, + this.snapshot(), + scan_requests_rx, + path_prefixes_to_scan_rx, + Arc::clone(&closure_next_entry_id), + Arc::clone(&closure_fs), + cx, + ); + this.is_scanning = watch::channel_with(true); + // TODO kb change more state? will this even work now? + } + } + }); + let root_name = abs_path .file_name() .map_or(String::new(), |f| f.to_string_lossy().to_string()); - let project_settings = settings::get::(cx); - let scan_exclude_files = project_settings.scan_exclude_files.iter() - .filter_map(|pattern| { - PathMatcher::new(pattern) - .map(Some) - .unwrap_or_else(|e| { - log::error!( - "Skipping pattern {pattern} in `scan_exclude_files` project settings due to parsing error: {e:#}" - ); - None - }) - }) - .collect::>(); let mut snapshot = LocalSnapshot { - scan_exclude_files, + scan_exclude_files: scan_exclude_files(settings::get::(cx)), ignores_by_parent_abs_path: Default::default(), git_repositories: Default::default(), snapshot: Snapshot { id: WorktreeId::from_usize(cx.model_id()), - abs_path: abs_path.clone(), + abs_path: abs_path.to_path_buf().into(), root_name: root_name.clone(), root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(), entries_by_path: Default::default(), @@ -352,60 +382,23 @@ impl Worktree { let (scan_requests_tx, scan_requests_rx) = channel::unbounded(); let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded(); - let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded(); - - cx.spawn_weak(|this, mut cx| async move { - while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) { - this.update(&mut cx, |this, cx| { - let this = this.as_local_mut().unwrap(); - match state { - ScanState::Started => { - *this.is_scanning.0.borrow_mut() = true; - } - ScanState::Updated { - snapshot, - changes, - barrier, - scanning, - } => { - *this.is_scanning.0.borrow_mut() = scanning; - this.set_snapshot(snapshot, changes, cx); - drop(barrier); - } - } - cx.notify(); - }); - } - }) - .detach(); - - let background_scanner_task = cx.background().spawn({ - let fs = fs.clone(); - let snapshot = snapshot.clone(); - let background = cx.background().clone(); - async move { - let events = fs.watch(&abs_path, Duration::from_millis(100)).await; - BackgroundScanner::new( - snapshot, - next_entry_id, - fs, - scan_states_tx, - background, - scan_requests_rx, - path_prefixes_to_scan_rx, - ) - .run(events) - .await; - } - }); - + let task_snapshot = snapshot.clone(); Worktree::Local(LocalWorktree { snapshot, is_scanning: watch::channel_with(true), share: None, scan_requests_tx, path_prefixes_to_scan_tx, - _background_scanner_task: background_scanner_task, + _settings_subscription: settings_subscription, + _background_scanner_tasks: start_background_scan_tasks( + &abs_path, + task_snapshot, + scan_requests_rx, + path_prefixes_to_scan_rx, + Arc::clone(&next_entry_id), + Arc::clone(&fs), + cx, + ), diagnostics: Default::default(), diagnostic_summaries: Default::default(), client, @@ -602,6 +595,76 @@ impl Worktree { } } +fn start_background_scan_tasks( + abs_path: &Path, + snapshot: LocalSnapshot, + scan_requests_rx: channel::Receiver, + path_prefixes_to_scan_rx: channel::Receiver>, + next_entry_id: Arc, + fs: Arc, + cx: &mut ModelContext<'_, Worktree>, +) -> Vec> { + let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded(); + let background_scanner = cx.background().spawn({ + let abs_path = abs_path.to_path_buf(); + let background = cx.background().clone(); + async move { + let events = fs.watch(&abs_path, Duration::from_millis(100)).await; + BackgroundScanner::new( + snapshot, + next_entry_id, + fs, + scan_states_tx, + background, + scan_requests_rx, + path_prefixes_to_scan_rx, + ) + .run(events) + .await; + } + }); + let scan_state_updater = cx.spawn_weak(|this, mut cx| async move { + while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) { + this.update(&mut cx, |this, cx| { + let this = this.as_local_mut().unwrap(); + match state { + ScanState::Started => { + *this.is_scanning.0.borrow_mut() = true; + } + ScanState::Updated { + snapshot, + changes, + barrier, + scanning, + } => { + *this.is_scanning.0.borrow_mut() = scanning; + this.set_snapshot(snapshot, changes, cx); + drop(barrier); + } + } + cx.notify(); + }); + } + }); + vec![background_scanner, scan_state_updater] +} + +fn scan_exclude_files(project_settings: &ProjectSettings) -> Vec { + project_settings.scan_exclude_files.iter() + .sorted() + .filter_map(|pattern| { + PathMatcher::new(pattern) + .map(Some) + .unwrap_or_else(|e| { + log::error!( + "Skipping pattern {pattern} in `scan_exclude_files` project settings due to parsing error: {e:#}" + ); + None + }) + }) + .collect() +} + impl LocalWorktree { pub fn contains_abs_path(&self, path: &Path) -> bool { path.starts_with(&self.abs_path) diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index 5999bd1d3923a820c6412aa5d1b4c6f2e915e8d3..d0ba7957ec28c90aabacef8903a1544cd05e5a42 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -202,6 +202,14 @@ impl std::fmt::Display for PathMatcher { } } +impl PartialEq for PathMatcher { + fn eq(&self, other: &Self) -> bool { + self.maybe_path.eq(&other.maybe_path) + } +} + +impl Eq for PathMatcher {} + impl PathMatcher { pub fn new(maybe_glob: &str) -> Result { Ok(PathMatcher { From c52fe2f536a92999b014e526c97c1942863fcc6f Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 14 Nov 2023 22:43:33 +0200 Subject: [PATCH 16/75] Move toggle ignored button into include directories editor --- crates/search/src/project_search.rs | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 74e06b4e2a49de66d41464d9947a7641ff08a8f4..8f19e7480259e67e7affdfd7b476d750de8545e4 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -1875,7 +1875,6 @@ impl View for ProjectSearchBar { .with_child(filter_button) .with_children(case_sensitive) .with_children(whole_word) - .with_children(include_ignored) .flex(1., false) .constrained() .contained(), @@ -1892,7 +1891,15 @@ impl View for ProjectSearchBar { .with_children(search.filters_enabled.then(|| { Flex::row() .with_child( - ChildView::new(&search.included_files_editor, cx) + Flex::row() + .with_child( + ChildView::new(&search.included_files_editor, cx) + .contained() + .constrained() + .with_height(theme.search.search_bar_row_height) + .flex(1., true), + ) + .with_children(include_ignored) .contained() .with_style(include_container_style) .constrained() From 26f7e66b492c4c9860705e997b40a5208d0ecb0e Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 14 Nov 2023 22:55:06 +0200 Subject: [PATCH 17/75] Add default scan excluded files settings --- assets/settings/default.json | 12 ++++++++++++ crates/project/src/project_settings.rs | 2 +- crates/project/src/worktree.rs | 2 +- crates/project/src/worktree_tests.rs | 2 +- 4 files changed, 15 insertions(+), 3 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 08d85dd723cc13ca98b0b239a199b263f738d99a..4a21b708eef99dc0193fc57910b48e505fe2cee8 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -268,6 +268,18 @@ // Whether to show warnings or not by default. "include_warnings": true }, + // TODO kb docs + "scan_exclude_files": [ + "**/.git", + "**/.svn", + "**/.hg", + "**/CVS", + "**/.DS_Store", + "**/Thumbs.db", + "**/.classpath", + "**/.settings", + "**/target" + ], // Git gutter behavior configuration. "git": { // Control whether the git gutter is shown. May take 2 values: diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 7cbcc32d4ee9dc25ab0b7bf0abbef122d54ca9f5..511241bc22df8a2fb3c45478a76336aec7d8fc7c 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -12,7 +12,7 @@ pub struct ProjectSettings { pub git: GitSettings, // TODO kb better names and docs and tests #[serde(default)] - pub scan_exclude_files: Vec, + pub scan_exclude_files: Option>, } #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 316878030508adf732c3041a364255d360f3b931..41fd647c9ced85410aa1bd7142ead3a2eca55474 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -650,7 +650,7 @@ fn start_background_scan_tasks( } fn scan_exclude_files(project_settings: &ProjectSettings) -> Vec { - project_settings.scan_exclude_files.iter() + project_settings.scan_exclude_files.as_deref().unwrap_or(&[]).iter() .sorted() .filter_map(|pattern| { PathMatcher::new(pattern) diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index 1fb4aa9a3474740d31f1fab5519b90b502758f4c..79ac25a1476fb221fb3f4faa41d1acb1aca11073 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -910,7 +910,7 @@ async fn test_ignore_inclusions_and_exclusions(cx: &mut TestAppContext) { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { project_settings.scan_exclude_files = - vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]; + Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]); }); }); }); From ce2cfc60356c4d76af8d109e8f78138b2b1c5414 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 15 Nov 2023 10:15:50 +0200 Subject: [PATCH 18/75] Fix the ! bug, better test draft --- crates/project/src/worktree.rs | 4 +- crates/project/src/worktree_tests.rs | 115 +++++++++++++++++++-------- 2 files changed, 86 insertions(+), 33 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 41fd647c9ced85410aa1bd7142ead3a2eca55474..49ba8c93c4091f17c08e56260ad464fce4a2b1aa 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -2123,7 +2123,7 @@ impl LocalSnapshot { let mut ignore_stack = IgnoreStack::none(); for (parent_abs_path, ignore) in new_ignores.into_iter().rev() { - if !ignore_stack.is_abs_path_ignored(parent_abs_path, true) { + if ignore_stack.is_abs_path_ignored(parent_abs_path, true) { ignore_stack = IgnoreStack::all(); break; } else if let Some(ignore) = ignore { @@ -2131,7 +2131,7 @@ impl LocalSnapshot { } } - if !ignore_stack.is_abs_path_ignored(abs_path, is_dir) { + if ignore_stack.is_abs_path_ignored(abs_path, is_dir) { ignore_stack = IgnoreStack::all(); } ignore_stack diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index 79ac25a1476fb221fb3f4faa41d1acb1aca11073..74268d2ed50d56c5cdff6667fd82d83d17339483 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -7,7 +7,7 @@ use anyhow::Result; use client::Client; use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions}; use git::GITIGNORE; -use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext}; +use gpui::{executor::Deterministic, ModelContext, ModelHandle, Task, TestAppContext}; use parking_lot::Mutex; use postage::stream::Stream; use pretty_assertions::assert_eq; @@ -882,9 +882,13 @@ async fn test_write_file(cx: &mut TestAppContext) { #[gpui::test] async fn test_ignore_inclusions_and_exclusions(cx: &mut TestAppContext) { let dir = temp_tree(json!({ - ".git": {}, + ".git": { + "index": "blah" + }, ".gitignore": "**/target\n/node_modules\n", - "target": {}, + "target": { + "index2": "blah2" + }, "node_modules": { ".DS_Store": "", "prettier": { @@ -928,35 +932,52 @@ async fn test_ignore_inclusions_and_exclusions(cx: &mut TestAppContext) { cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) .await; tree.flush_fs_events(cx).await; + check_worktree_entries( + &tree, + &[ + "src/foo/foo.rs", + "src/foo/another.rs", + // TODO kb + // "node_modules/.DS_Store", + // "src/.DS_Store", + // ".DS_Store", + ], + &[ + ".git/index", + "target/index2", + "node_modules/prettier/package.json", + ], + &["src/lib.rs", "src/bar/bar.rs", ".gitignore"], + cx, + ); - // tree.update(cx, |tree, cx| { - // tree.as_local().unwrap().write_file( - // Path::new("tracked-dir/file.txt"), - // "hello".into(), - // Default::default(), - // cx, - // ) - // }) - // .await - // .unwrap(); - // tree.update(cx, |tree, cx| { - // tree.as_local().unwrap().write_file( - // Path::new("ignored-dir/file.txt"), - // "world".into(), - // Default::default(), - // cx, - // ) - // }) - // .await - // .unwrap(); - - // tree.read_with(cx, |tree, _| { - // let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap(); - // let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap(); - // assert!(!tracked.is_ignored); - // assert!(ignored.is_ignored); - // }); - dbg!("!!!!!!!!!!!!"); + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.scan_exclude_files = Some(vec!["**/node_modules/**".to_string()]); + }); + }); + }); + tree.flush_fs_events(cx).await; + cx.foreground().run_until_parked(); + check_worktree_entries( + &tree, + &[ + "node_modules/prettier/package.json", + "node_modules/.DS_Store", + ], + &[".git/index", "target/index2"], + &[ + ".gitignore", + "src/lib.rs", + "src/bar/bar.rs", + "src/foo/foo.rs", + "src/foo/another.rs", + "src/.DS_Store", + ".DS_Store", + ], + cx, + ); } #[gpui::test(iterations = 30)] @@ -2221,3 +2242,35 @@ fn git_status(repo: &git2::Repository) -> collections::HashMap, + expected_excluded_paths: &[&str], + expected_ignored_paths: &[&str], + expected_tracked_paths: &[&str], + cx: &mut TestAppContext, +) { + tree.read_with(cx, |tree, _| { + for path in expected_excluded_paths { + let entry = tree.entry_for_path(path); + assert!( + entry.is_none(), + "expected path '{path}' to be excluded, but got entry: {entry:?}", + ); + } + for path in expected_ignored_paths { + let entry = tree.entry_for_path(path).unwrap(); + assert!( + entry.is_ignored, + "expected path '{path}' to be ignored, but got entry: {entry:?}", + ); + } + for path in expected_tracked_paths { + let entry = tree.entry_for_path(path).unwrap(); + assert!( + !entry.is_ignored, + "expected path '{path}' to be tracked, but got entry: {entry:?}", + ); + } + }); +} From cafeba103bc157058868a25a06d4331beddc0844 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 15 Nov 2023 16:36:00 +0200 Subject: [PATCH 19/75] Exclude ignored opened buffers from search --- crates/project/src/project.rs | 12 ++- crates/project/src/worktree_tests.rs | 127 +++++++++++++-------------- 2 files changed, 70 insertions(+), 69 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 09d2c9a98195c0d2322bee5b1b0b38b1dacf1ebc..c24fb5eea1f620b43920d249e6d67b308549ae41 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -5570,8 +5570,16 @@ impl Project { .iter() .filter_map(|(_, b)| { let buffer = b.upgrade(cx)?; - let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - if let Some(path) = snapshot.file().map(|file| file.path()) { + let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| { + let is_ignored = buffer + .project_path(cx) + .and_then(|path| self.entry_for_path(&path, cx)) + .map_or(false, |entry| entry.is_ignored); + (is_ignored, buffer.snapshot()) + }); + if is_ignored && !query.include_ignored() { + return None; + } else if let Some(path) = snapshot.file().map(|file| file.path()) { Some((path.clone(), (buffer, snapshot))) } else { unnamed_files.push(buffer); diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index 74268d2ed50d56c5cdff6667fd82d83d17339483..4c0df4dd7d6f5471d35d81d5ecffe2481134f8d6 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -7,7 +7,7 @@ use anyhow::Result; use client::Client; use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions}; use git::GITIGNORE; -use gpui::{executor::Deterministic, ModelContext, ModelHandle, Task, TestAppContext}; +use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext}; use parking_lot::Mutex; use postage::stream::Stream; use pretty_assertions::assert_eq; @@ -880,14 +880,11 @@ async fn test_write_file(cx: &mut TestAppContext) { } #[gpui::test] -async fn test_ignore_inclusions_and_exclusions(cx: &mut TestAppContext) { +async fn test_ignore_exclusions(cx: &mut TestAppContext) { let dir = temp_tree(json!({ - ".git": { - "index": "blah" - }, ".gitignore": "**/target\n/node_modules\n", "target": { - "index2": "blah2" + "index": "blah2" }, "node_modules": { ".DS_Store": "", @@ -932,24 +929,21 @@ async fn test_ignore_inclusions_and_exclusions(cx: &mut TestAppContext) { cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) .await; tree.flush_fs_events(cx).await; - check_worktree_entries( - &tree, - &[ - "src/foo/foo.rs", - "src/foo/another.rs", - // TODO kb - // "node_modules/.DS_Store", - // "src/.DS_Store", - // ".DS_Store", - ], - &[ - ".git/index", - "target/index2", - "node_modules/prettier/package.json", - ], - &["src/lib.rs", "src/bar/bar.rs", ".gitignore"], - cx, - ); + tree.read_with(cx, |tree, _| { + check_worktree_entries( + tree, + &[ + "src/foo/foo.rs", + "src/foo/another.rs", + // TODO kb + // "node_modules/.DS_Store", + // "src/.DS_Store", + // ".DS_Store", + ], + &["target/index", "node_modules/prettier/package.json"], + &["src/lib.rs", "src/bar/bar.rs", ".gitignore"], + ) + }); cx.update(|cx| { cx.update_global::(|store, cx| { @@ -960,24 +954,25 @@ async fn test_ignore_inclusions_and_exclusions(cx: &mut TestAppContext) { }); tree.flush_fs_events(cx).await; cx.foreground().run_until_parked(); - check_worktree_entries( - &tree, - &[ - "node_modules/prettier/package.json", - "node_modules/.DS_Store", - ], - &[".git/index", "target/index2"], - &[ - ".gitignore", - "src/lib.rs", - "src/bar/bar.rs", - "src/foo/foo.rs", - "src/foo/another.rs", - "src/.DS_Store", - ".DS_Store", - ], - cx, - ); + tree.read_with(cx, |tree, _| { + check_worktree_entries( + tree, + &[ + "node_modules/prettier/package.json", + "node_modules/.DS_Store", + ], + &["target/index"], + &[ + ".gitignore", + "src/lib.rs", + "src/bar/bar.rs", + "src/foo/foo.rs", + "src/foo/another.rs", + "src/.DS_Store", + ".DS_Store", + ], + ) + }); } #[gpui::test(iterations = 30)] @@ -2243,34 +2238,32 @@ fn git_status(repo: &git2::Repository) -> collections::HashMap, + tree: &Worktree, expected_excluded_paths: &[&str], expected_ignored_paths: &[&str], expected_tracked_paths: &[&str], - cx: &mut TestAppContext, ) { - tree.read_with(cx, |tree, _| { - for path in expected_excluded_paths { - let entry = tree.entry_for_path(path); - assert!( - entry.is_none(), - "expected path '{path}' to be excluded, but got entry: {entry:?}", - ); - } - for path in expected_ignored_paths { - let entry = tree.entry_for_path(path).unwrap(); - assert!( - entry.is_ignored, - "expected path '{path}' to be ignored, but got entry: {entry:?}", - ); - } - for path in expected_tracked_paths { - let entry = tree.entry_for_path(path).unwrap(); - assert!( - !entry.is_ignored, - "expected path '{path}' to be tracked, but got entry: {entry:?}", - ); - } - }); + for path in expected_excluded_paths { + let entry = tree.entry_for_path(path); + assert!( + entry.is_none(), + "expected path '{path}' to be excluded, but got entry: {entry:?}", + ); + } + for path in expected_ignored_paths { + let entry = tree.entry_for_path(path).unwrap(); + assert!( + entry.is_ignored, + "expected path '{path}' to be ignored, but got entry: {entry:?}", + ); + } + for path in expected_tracked_paths { + let entry = tree.entry_for_path(path).unwrap(); + assert!( + !entry.is_ignored, + "expected path '{path}' to be tracked, but got entry: {entry:?}", + ); + } } From d3ce82e82cae8935220ba6bed4478a951fed44ff Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 15 Nov 2023 18:37:15 +0200 Subject: [PATCH 20/75] Fix the new test --- crates/project/src/worktree.rs | 47 +++++++++++++++++++--------- crates/project/src/worktree_tests.rs | 7 ++--- 2 files changed, 35 insertions(+), 19 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 49ba8c93c4091f17c08e56260ad464fce4a2b1aa..6508a8635c7e84efd0303b7949f86b4c3473b3e2 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -320,7 +320,7 @@ impl Worktree { if new_scan_exclude_files != this.snapshot.scan_exclude_files { this.snapshot.scan_exclude_files = new_scan_exclude_files; log::info!( - "Re-scanning due to new scan exclude files: {:?}", + "Re-scanning directories, new scan exclude files: {:?}", this.snapshot .scan_exclude_files .iter() @@ -343,7 +343,6 @@ impl Worktree { cx, ); this.is_scanning = watch::channel_with(true); - // TODO kb change more state? will this even work now? } } }); @@ -3489,18 +3488,26 @@ impl BackgroundScanner { } async fn scan_dir(&self, job: &ScanJob) -> Result<()> { - log::debug!("scan directory {:?}", job.path); - - let mut ignore_stack = job.ignore_stack.clone(); - let mut new_ignore = None; - let (root_abs_path, root_char_bag, next_entry_id) = { - let snapshot = &self.state.lock().snapshot; - ( - snapshot.abs_path().clone(), - snapshot.root_char_bag, - self.next_entry_id.clone(), - ) - }; + let root_abs_path; + let mut ignore_stack; + let mut new_ignore; + let root_char_bag; + let next_entry_id; + { + let state = self.state.lock(); + let snapshot = &state.snapshot; + root_abs_path = snapshot.abs_path().clone(); + if snapshot.is_abs_path_excluded(&job.abs_path) { + log::error!("skipping excluded directory {:?}", job.path); + return Ok(()); + } + log::debug!("scanning directory {:?}", job.path); + ignore_stack = job.ignore_stack.clone(); + new_ignore = None; + root_char_bag = snapshot.root_char_bag; + next_entry_id = self.next_entry_id.clone(); + drop(state); + } let mut dotgit_path = None; let mut root_canonical_path = None; @@ -3515,8 +3522,18 @@ impl BackgroundScanner { continue; } }; - let child_name = child_abs_path.file_name().unwrap(); + { + let mut state = self.state.lock(); + if state.snapshot.is_abs_path_excluded(&child_abs_path) { + let relative_path = job.path.join(child_name); + log::debug!("skipping excluded child entry {relative_path:?}"); + state.remove_path(&relative_path); + continue; + } + drop(state); + } + let child_path: Arc = job.path.join(child_name).into(); let child_metadata = match self.fs.metadata(&child_abs_path).await { Ok(Some(metadata)) => metadata, diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index 4c0df4dd7d6f5471d35d81d5ecffe2481134f8d6..bd548bb9cbe6aae799a310b9aed47505bef803a6 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -935,10 +935,9 @@ async fn test_ignore_exclusions(cx: &mut TestAppContext) { &[ "src/foo/foo.rs", "src/foo/another.rs", - // TODO kb - // "node_modules/.DS_Store", - // "src/.DS_Store", - // ".DS_Store", + "node_modules/.DS_Store", + "src/.DS_Store", + ".DS_Store", ], &["target/index", "node_modules/prettier/package.json"], &["src/lib.rs", "src/bar/bar.rs", ".gitignore"], From 5f468970f039db8e7ba609d0bf53c7035ebed238 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 15 Nov 2023 19:07:16 +0200 Subject: [PATCH 21/75] Fix some of the old tests --- crates/collab/src/tests/integration_tests.rs | 2 ++ crates/project/src/worktree_tests.rs | 26 ++++++++++++++++++-- 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index fa7c4fe67df4fed4645e8c6552e242b3d7662276..61bbabccddfd2cb96596c854682c3f8d5e918154 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -1381,6 +1381,8 @@ async fn test_share_project( Path::new("a.txt"), Path::new("b.txt"), Path::new("ignored-dir"), + Path::new("ignored-dir/c.txt"), + Path::new("ignored-dir/d.txt"), ] ); }); diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index bd548bb9cbe6aae799a310b9aed47505bef803a6..256bc6477a764a4ed55207aa132bf733ebe00638 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -25,6 +25,7 @@ use util::{http::FakeHttpClient, test::temp_tree, ResultExt}; #[gpui::test] async fn test_traversal(cx: &mut TestAppContext) { + init_test(cx); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -80,6 +81,7 @@ async fn test_traversal(cx: &mut TestAppContext) { #[gpui::test] async fn test_descendent_entries(cx: &mut TestAppContext) { + init_test(cx); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -187,6 +189,7 @@ async fn test_descendent_entries(cx: &mut TestAppContext) { #[gpui::test(iterations = 10)] async fn test_circular_symlinks(executor: Arc, cx: &mut TestAppContext) { + init_test(cx); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -266,6 +269,7 @@ async fn test_circular_symlinks(executor: Arc, cx: &mut TestAppCo #[gpui::test] async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) { + init_test(cx); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -441,6 +445,7 @@ async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) { #[gpui::test] async fn test_open_gitignored_files(cx: &mut TestAppContext) { + init_test(cx); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -601,6 +606,7 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) { #[gpui::test] async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) { + init_test(cx); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -724,6 +730,7 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) { #[gpui::test(iterations = 10)] async fn test_rescan_with_gitignore(cx: &mut TestAppContext) { + init_test(cx); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -829,6 +836,7 @@ async fn test_rescan_with_gitignore(cx: &mut TestAppContext) { #[gpui::test] async fn test_write_file(cx: &mut TestAppContext) { + init_test(cx); let dir = temp_tree(json!({ ".git": {}, ".gitignore": "ignored-dir\n", @@ -881,6 +889,7 @@ async fn test_write_file(cx: &mut TestAppContext) { #[gpui::test] async fn test_ignore_exclusions(cx: &mut TestAppContext) { + init_test(cx); let dir = temp_tree(json!({ ".gitignore": "**/target\n/node_modules\n", "target": { @@ -906,8 +915,6 @@ async fn test_ignore_exclusions(cx: &mut TestAppContext) { ".DS_Store": "", })); cx.update(|cx| { - cx.set_global(SettingsStore::test(cx)); - Project::init_settings(cx); cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { project_settings.scan_exclude_files = @@ -976,6 +983,7 @@ async fn test_ignore_exclusions(cx: &mut TestAppContext) { #[gpui::test(iterations = 30)] async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) { + init_test(cx); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -1035,6 +1043,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) { #[gpui::test] async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) { + init_test(cx); let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); let fs_fake = FakeFs::new(cx.background()); @@ -1151,6 +1160,7 @@ async fn test_random_worktree_operations_during_initial_scan( cx: &mut TestAppContext, mut rng: StdRng, ) { + init_test(cx); let operations = env::var("OPERATIONS") .map(|o| o.parse().unwrap()) .unwrap_or(5); @@ -1240,6 +1250,7 @@ async fn test_random_worktree_operations_during_initial_scan( #[gpui::test(iterations = 100)] async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) { + init_test(cx); let operations = env::var("OPERATIONS") .map(|o| o.parse().unwrap()) .unwrap_or(40); @@ -1654,6 +1665,7 @@ fn random_filename(rng: &mut impl Rng) -> String { #[gpui::test] async fn test_rename_work_directory(cx: &mut TestAppContext) { + init_test(cx); let root = temp_tree(json!({ "projects": { "project1": { @@ -1724,6 +1736,7 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) { #[gpui::test] async fn test_git_repository_for_path(cx: &mut TestAppContext) { + init_test(cx); let root = temp_tree(json!({ "c.txt": "", "dir1": { @@ -1844,6 +1857,7 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) { #[gpui::test] async fn test_git_status(deterministic: Arc, cx: &mut TestAppContext) { + init_test(cx); const IGNORE_RULE: &'static str = "**/target"; let root = temp_tree(json!({ @@ -2032,6 +2046,7 @@ async fn test_git_status(deterministic: Arc, cx: &mut TestAppCont #[gpui::test] async fn test_propagate_git_statuses(cx: &mut TestAppContext) { + init_test(cx); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -2266,3 +2281,10 @@ fn check_worktree_entries( ); } } + +fn init_test(cx: &mut gpui::TestAppContext) { + cx.update(|cx| { + cx.set_global(SettingsStore::test(cx)); + Project::init_settings(cx); + }); +} From 30fefa0ef8a05f9d8a337065b667511a3d33ce5d Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 15 Nov 2023 19:09:58 +0200 Subject: [PATCH 22/75] Use a better name --- assets/settings/default.json | 5 ++--- crates/project/src/project_settings.rs | 4 ++-- crates/project/src/worktree.rs | 22 +++++++++++----------- crates/project/src/worktree_tests.rs | 7 ++++--- 4 files changed, 19 insertions(+), 19 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 4a21b708eef99dc0193fc57910b48e505fe2cee8..b47f0dc2e7e17decf3ff76de32eccd3d713daa4c 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -269,7 +269,7 @@ "include_warnings": true }, // TODO kb docs - "scan_exclude_files": [ + "file_scan_exclusions": [ "**/.git", "**/.svn", "**/.hg", @@ -277,8 +277,7 @@ "**/.DS_Store", "**/Thumbs.db", "**/.classpath", - "**/.settings", - "**/target" + "**/.settings" ], // Git gutter behavior configuration. "git": { diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 511241bc22df8a2fb3c45478a76336aec7d8fc7c..cda37be601c8e67b96eda2b708074b638e240f30 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -10,9 +10,9 @@ pub struct ProjectSettings { pub lsp: HashMap, LspSettings>, #[serde(default)] pub git: GitSettings, - // TODO kb better names and docs and tests + // TODO kb docs and project_search test #[serde(default)] - pub scan_exclude_files: Option>, + pub file_scan_exclusions: Option>, } #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 6508a8635c7e84efd0303b7949f86b4c3473b3e2..b05593cd4dfdaeaf92f41ad8adc1b65aff6bc37e 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -225,7 +225,7 @@ pub struct LocalSnapshot { /// All of the git repositories in the worktree, indexed by the project entry /// id of their parent directory. git_repositories: TreeMap, - scan_exclude_files: Vec, + file_scan_exclusions: Vec, } struct BackgroundScannerState { @@ -315,14 +315,14 @@ impl Worktree { Ok(cx.add_model(move |cx: &mut ModelContext| { let settings_subscription = cx.observe_global::(move |this, cx| { if let Self::Local(this) = this { - let new_scan_exclude_files = - scan_exclude_files(settings::get::(cx)); - if new_scan_exclude_files != this.snapshot.scan_exclude_files { - this.snapshot.scan_exclude_files = new_scan_exclude_files; + let new_file_scan_exclusions = + file_scan_exclusions(settings::get::(cx)); + if new_file_scan_exclusions != this.snapshot.file_scan_exclusions { + this.snapshot.file_scan_exclusions = new_file_scan_exclusions; log::info!( "Re-scanning directories, new scan exclude files: {:?}", this.snapshot - .scan_exclude_files + .file_scan_exclusions .iter() .map(ToString::to_string) .collect::>() @@ -351,7 +351,7 @@ impl Worktree { .file_name() .map_or(String::new(), |f| f.to_string_lossy().to_string()); let mut snapshot = LocalSnapshot { - scan_exclude_files: scan_exclude_files(settings::get::(cx)), + file_scan_exclusions: file_scan_exclusions(settings::get::(cx)), ignores_by_parent_abs_path: Default::default(), git_repositories: Default::default(), snapshot: Snapshot { @@ -648,15 +648,15 @@ fn start_background_scan_tasks( vec![background_scanner, scan_state_updater] } -fn scan_exclude_files(project_settings: &ProjectSettings) -> Vec { - project_settings.scan_exclude_files.as_deref().unwrap_or(&[]).iter() +fn file_scan_exclusions(project_settings: &ProjectSettings) -> Vec { + project_settings.file_scan_exclusions.as_deref().unwrap_or(&[]).iter() .sorted() .filter_map(|pattern| { PathMatcher::new(pattern) .map(Some) .unwrap_or_else(|e| { log::error!( - "Skipping pattern {pattern} in `scan_exclude_files` project settings due to parsing error: {e:#}" + "Skipping pattern {pattern} in `file_scan_exclusions` project settings due to parsing error: {e:#}" ); None }) @@ -2227,7 +2227,7 @@ impl LocalSnapshot { } fn is_abs_path_excluded(&self, abs_path: &Path) -> bool { - self.scan_exclude_files + self.file_scan_exclusions .iter() .any(|exclude_matcher| exclude_matcher.is_match(abs_path)) } diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index 256bc6477a764a4ed55207aa132bf733ebe00638..f66a71ee7d59eae38ffe93c71b599a00f3a751c6 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -888,7 +888,7 @@ async fn test_write_file(cx: &mut TestAppContext) { } #[gpui::test] -async fn test_ignore_exclusions(cx: &mut TestAppContext) { +async fn test_file_scan_exclusions(cx: &mut TestAppContext) { init_test(cx); let dir = temp_tree(json!({ ".gitignore": "**/target\n/node_modules\n", @@ -917,7 +917,7 @@ async fn test_ignore_exclusions(cx: &mut TestAppContext) { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { - project_settings.scan_exclude_files = + project_settings.file_scan_exclusions = Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]); }); }); @@ -954,7 +954,8 @@ async fn test_ignore_exclusions(cx: &mut TestAppContext) { cx.update(|cx| { cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { - project_settings.scan_exclude_files = Some(vec!["**/node_modules/**".to_string()]); + project_settings.file_scan_exclusions = + Some(vec!["**/node_modules/**".to_string()]); }); }); }); From 906db58188fccbc14638b296c45170de0fc7bcea Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 15 Nov 2023 22:53:02 +0200 Subject: [PATCH 23/75] Defer ignored dirs scanning --- crates/project/src/ignore.rs | 4 ++++ crates/project/src/worktree.rs | 12 ++++++++---- crates/project/src/worktree_tests.rs | 8 ++++++-- 3 files changed, 18 insertions(+), 6 deletions(-) diff --git a/crates/project/src/ignore.rs b/crates/project/src/ignore.rs index ed1ce4006d9ed90269d5d3ca184b0ff9c5fde590..a29e00501960367c72beca7e364b418cfbac8901 100644 --- a/crates/project/src/ignore.rs +++ b/crates/project/src/ignore.rs @@ -20,6 +20,10 @@ impl IgnoreStack { Arc::new(Self::All) } + pub fn is_all(&self) -> bool { + matches!(self, Self::All) + } + pub fn append(self: Arc, abs_base_path: Arc, ignore: Arc) -> Arc { match self.as_ref() { IgnoreStack::All => self, diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index b05593cd4dfdaeaf92f41ad8adc1b65aff6bc37e..e8114832c24576fe888ae1d8e72630e6269e467d 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -2253,9 +2253,7 @@ impl BackgroundScannerState { let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true); let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path); let mut containing_repository = None; - if !matches!(ignore_stack.as_ref(), &IgnoreStack::All) - && !self.snapshot.is_abs_path_excluded(&abs_path) - { + if !ignore_stack.is_all() && !self.snapshot.is_abs_path_excluded(&abs_path) { if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) { if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) { containing_repository = Some(( @@ -3327,6 +3325,10 @@ impl BackgroundScanner { log::debug!("ignoring event {relative_path:?} within unloaded directory"); return false; } + if snapshot.is_abs_path_excluded(abs_path) { + log::debug!("ignoring event {relative_path:?} within excluded directory"); + return false; + } relative_paths.push(relative_path); true @@ -3678,7 +3680,9 @@ impl BackgroundScanner { for entry in &mut new_entries { state.reuse_entry_id(entry); if entry.is_dir() { - if state.should_scan_directory(&entry, &root_abs_path.join(&entry.path)) { + if !ignore_stack.is_all() + && state.should_scan_directory(&entry, &root_abs_path.join(&entry.path)) + { job_ix += 1; } else { log::debug!("defer scanning directory {:?}", entry.path); diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index f66a71ee7d59eae38ffe93c71b599a00f3a751c6..cd7f3cff81d0d0101cdf871c3c5e255d675a2e71 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -2268,14 +2268,18 @@ fn check_worktree_entries( ); } for path in expected_ignored_paths { - let entry = tree.entry_for_path(path).unwrap(); + let entry = tree + .entry_for_path(path) + .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'")); assert!( entry.is_ignored, "expected path '{path}' to be ignored, but got entry: {entry:?}", ); } for path in expected_tracked_paths { - let entry = tree.entry_for_path(path).unwrap(); + let entry = tree + .entry_for_path(path) + .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'")); assert!( !entry.is_ignored, "expected path '{path}' to be tracked, but got entry: {entry:?}", From 6028cd90d4ae36578ea8451cf14ce59abe378846 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 16 Nov 2023 22:09:50 +0200 Subject: [PATCH 24/75] Retract back to original scanning strategy Do not descend into ignored directories, to avoid tracking their state. --- crates/project/src/ignore.rs | 4 --- crates/project/src/worktree.rs | 51 ++++++++++++++-------------- crates/project/src/worktree_tests.rs | 5 +-- 3 files changed, 29 insertions(+), 31 deletions(-) diff --git a/crates/project/src/ignore.rs b/crates/project/src/ignore.rs index a29e00501960367c72beca7e364b418cfbac8901..ed1ce4006d9ed90269d5d3ca184b0ff9c5fde590 100644 --- a/crates/project/src/ignore.rs +++ b/crates/project/src/ignore.rs @@ -20,10 +20,6 @@ impl IgnoreStack { Arc::new(Self::All) } - pub fn is_all(&self) -> bool { - matches!(self, Self::All) - } - pub fn append(self: Arc, abs_base_path: Arc, ignore: Arc) -> Arc { match self.as_ref() { IgnoreStack::All => self, diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index e8114832c24576fe888ae1d8e72630e6269e467d..1dd0771cfe3deb1fb43bacd1c05f8f0e171335bb 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -2234,8 +2234,8 @@ impl LocalSnapshot { } impl BackgroundScannerState { - fn should_scan_directory(&self, entry: &Entry, entry_abs_path: &Path) -> bool { - !entry.is_external && !self.snapshot.is_abs_path_excluded(entry_abs_path) + fn should_scan_directory(&self, entry: &Entry) -> bool { + (!entry.is_external && !entry.is_ignored) || entry.path.file_name() == Some(&*DOT_GIT) || self.scanned_dirs.contains(&entry.id) // If we've ever scanned it, keep scanning || self @@ -2253,7 +2253,7 @@ impl BackgroundScannerState { let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true); let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path); let mut containing_repository = None; - if !ignore_stack.is_all() && !self.snapshot.is_abs_path_excluded(&abs_path) { + if !ignore_stack.is_abs_path_ignored(&abs_path, true) { if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) { if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) { containing_repository = Some(( @@ -2338,6 +2338,7 @@ impl BackgroundScannerState { self.scanned_dirs.insert(parent_entry_id); let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)]; let mut entries_by_id_edits = Vec::new(); + for entry in entries { entries_by_id_edits.push(Edit::Insert(PathEntry { id: entry.id, @@ -2852,7 +2853,7 @@ pub struct Entry { pub mtime: SystemTime, pub is_symlink: bool, - /// Whether this entry is ignored by Zed. + /// Whether this entry is ignored by Git. /// /// We only scan ignored entries once the directory is expanded and /// exclude them from searches. @@ -3326,7 +3327,9 @@ impl BackgroundScanner { return false; } if snapshot.is_abs_path_excluded(abs_path) { - log::debug!("ignoring event {relative_path:?} within excluded directory"); + log::debug!( + "ignoring FS event for path {relative_path:?} within excluded directory" + ); return false; } @@ -3638,6 +3641,7 @@ impl BackgroundScanner { ancestor_inodes.insert(child_entry.inode); new_jobs.push(Some(ScanJob { + abs_path: child_abs_path, path: child_path, is_external: child_entry.is_external, ignore_stack: if child_entry.is_ignored { @@ -3646,7 +3650,6 @@ impl BackgroundScanner { ignore_stack.clone() }, ancestor_inodes, - abs_path: child_abs_path, scan_queue: job.scan_queue.clone(), containing_repository: job.containing_repository.clone(), })); @@ -3655,17 +3658,19 @@ impl BackgroundScanner { } } else { child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false); - if let Some((repository_dir, repository, staged_statuses)) = - &job.containing_repository - { - if let Ok(repo_path) = child_entry.path.strip_prefix(&repository_dir.0) { - let repo_path = RepoPath(repo_path.into()); - child_entry.git_status = combine_git_statuses( - staged_statuses.get(&repo_path).copied(), - repository - .lock() - .unstaged_status(&repo_path, child_entry.mtime), - ); + if !child_entry.is_ignored { + if let Some((repository_dir, repository, staged_statuses)) = + &job.containing_repository + { + if let Ok(repo_path) = child_entry.path.strip_prefix(&repository_dir.0) { + let repo_path = RepoPath(repo_path.into()); + child_entry.git_status = combine_git_statuses( + staged_statuses.get(&repo_path).copied(), + repository + .lock() + .unstaged_status(&repo_path, child_entry.mtime), + ); + } } } } @@ -3680,9 +3685,7 @@ impl BackgroundScanner { for entry in &mut new_entries { state.reuse_entry_id(entry); if entry.is_dir() { - if !ignore_stack.is_all() - && state.should_scan_directory(&entry, &root_abs_path.join(&entry.path)) - { + if state.should_scan_directory(&entry) { job_ix += 1; } else { log::debug!("defer scanning directory {:?}", entry.path); @@ -3773,9 +3776,7 @@ impl BackgroundScanner { fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir); fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path); - if !is_dir - && !(fs_entry.is_ignored || state.snapshot.is_abs_path_excluded(&abs_path)) - { + if !is_dir && !fs_entry.is_ignored { if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) { if let Ok(repo_path) = path.strip_prefix(work_dir.0) { let repo_path = RepoPath(repo_path.into()); @@ -3786,7 +3787,7 @@ impl BackgroundScanner { } if let (Some(scan_queue_tx), true) = (&scan_queue_tx, fs_entry.is_dir()) { - if state.should_scan_directory(&fs_entry, &abs_path) { + if state.should_scan_directory(&fs_entry) { state.enqueue_scan_dir(abs_path, &fs_entry, scan_queue_tx); } else { fs_entry.kind = EntryKind::UnloadedDir; @@ -3942,7 +3943,7 @@ impl BackgroundScanner { // Scan any directories that were previously ignored and weren't previously scanned. if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() { let state = self.state.lock(); - if state.should_scan_directory(&entry, &abs_path) { + if state.should_scan_directory(&entry) { state.enqueue_scan_dir(abs_path.clone(), &entry, &job.scan_queue); } } diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index cd7f3cff81d0d0101cdf871c3c5e255d675a2e71..f2b519021cdb48ac17b0646a88b94bf76ad75ffa 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -946,7 +946,7 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) { "src/.DS_Store", ".DS_Store", ], - &["target/index", "node_modules/prettier/package.json"], + &["target", "node_modules"], &["src/lib.rs", "src/bar/bar.rs", ".gitignore"], ) }); @@ -967,8 +967,9 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) { &[ "node_modules/prettier/package.json", "node_modules/.DS_Store", + "node_modules", ], - &["target/index"], + &["target"], &[ ".gitignore", "src/lib.rs", From 81809384016b846db4a137e698280d189e638a32 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 16 Nov 2023 22:48:26 +0200 Subject: [PATCH 25/75] Fix most of the TODOs --- assets/settings/default.json | 4 +++- crates/collab/src/tests/integration_tests.rs | 2 -- crates/project/src/project_settings.rs | 1 - crates/project/src/worktree.rs | 1 + crates/search/src/project_search.rs | 6 ++++-- 5 files changed, 8 insertions(+), 6 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index b47f0dc2e7e17decf3ff76de32eccd3d713daa4c..bf2acc708e9fe49653f02ab607059ea1b8a48725 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -268,7 +268,9 @@ // Whether to show warnings or not by default. "include_warnings": true }, - // TODO kb docs + // Add files or globs of files that will be excluded by Zed entirely: + // they will be skipped during FS scan(s), file tree and file search + // will lack the corresponding file entries. "file_scan_exclusions": [ "**/.git", "**/.svn", diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 61bbabccddfd2cb96596c854682c3f8d5e918154..fa7c4fe67df4fed4645e8c6552e242b3d7662276 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -1381,8 +1381,6 @@ async fn test_share_project( Path::new("a.txt"), Path::new("b.txt"), Path::new("ignored-dir"), - Path::new("ignored-dir/c.txt"), - Path::new("ignored-dir/d.txt"), ] ); }); diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index cda37be601c8e67b96eda2b708074b638e240f30..a7acc7bba8d8e98cbecc832d042e545133dc5058 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -10,7 +10,6 @@ pub struct ProjectSettings { pub lsp: HashMap, LspSettings>, #[serde(default)] pub git: GitSettings, - // TODO kb docs and project_search test #[serde(default)] pub file_scan_exclusions: Option>, } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 1dd0771cfe3deb1fb43bacd1c05f8f0e171335bb..aa6341f330f726a42b15644a04f24a5ede187531 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -3502,6 +3502,7 @@ impl BackgroundScanner { let state = self.state.lock(); let snapshot = &state.snapshot; root_abs_path = snapshot.abs_path().clone(); + // TODO kb we need `DOT_GIT` and `GITIGNORE` entries always processed. if snapshot.is_abs_path_excluded(&job.abs_path) { log::error!("skipping excluded directory {:?}", job.path); return Ok(()); diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 8f19e7480259e67e7affdfd7b476d750de8545e4..5f3a6db6d49cd089ef9b3bc1855888b462a674fd 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -1767,14 +1767,16 @@ impl View for ProjectSearchBar { render_option_button_icon("icons/word_search.svg", SearchOptions::WHOLE_WORD, cx) }); - let include_ignored = is_semantic_disabled.then(|| { + let mut include_ignored = is_semantic_disabled.then(|| { render_option_button_icon( - // TODO kb icon + // TODO proper icon "icons/case_insensitive.svg", SearchOptions::INCLUDE_IGNORED, cx, ) }); + // TODO not implemented yet + let _ = include_ignored.take(); let search_button_for_mode = |mode, side, cx: &mut ViewContext| { let is_active = if let Some(search) = self.active_project_search.as_ref() { From 2759ed4d006bb46c50ce1df00f5b5a3fd3acdefb Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 17 Nov 2023 14:30:07 +0200 Subject: [PATCH 26/75] An attempt to ignore git regularly --- crates/project/src/worktree.rs | 71 ++++++++++++++++------------ crates/project/src/worktree_tests.rs | 15 ++++++ 2 files changed, 57 insertions(+), 29 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index aa6341f330f726a42b15644a04f24a5ede187531..a169e062f384c6cca27cc9994da2433fade902ee 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -2464,14 +2464,17 @@ impl BackgroundScannerState { // Remove any git repositories whose .git entry no longer exists. let snapshot = &mut self.snapshot; + // TODO kb stop cleaning those up here? let mut repositories = mem::take(&mut snapshot.git_repositories); let mut repository_entries = mem::take(&mut snapshot.repository_entries); - repositories.retain(|work_directory_id, _| { - snapshot - .entry_for_id(*work_directory_id) - .map_or(false, |entry| { - snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some() - }) + repositories.retain(|_, entry| { + // TODO kb use fs + snapshot.abs_path().join(&entry.git_dir_path).exists() + // snapshot + // .entry_for_id(*work_directory_id) + // .map_or(false, |entry| { + // snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some() + // }) }); repository_entries.retain(|_, entry| repositories.get(&entry.work_directory.0).is_some()); snapshot.git_repositories = repositories; @@ -3322,11 +3325,11 @@ impl BackgroundScanner { .entry_for_path(parent) .map_or(false, |entry| entry.kind == EntryKind::Dir) }); - if !parent_dir_is_loaded { + if !parent_dir_is_loaded && !is_git_related(&abs_path) { log::debug!("ignoring event {relative_path:?} within unloaded directory"); return false; } - if snapshot.is_abs_path_excluded(abs_path) { + if snapshot.is_abs_path_excluded(abs_path) && !is_git_related(&abs_path) { log::debug!( "ignoring FS event for path {relative_path:?} within excluded directory" ); @@ -3502,7 +3505,6 @@ impl BackgroundScanner { let state = self.state.lock(); let snapshot = &state.snapshot; root_abs_path = snapshot.abs_path().clone(); - // TODO kb we need `DOT_GIT` and `GITIGNORE` entries always processed. if snapshot.is_abs_path_excluded(&job.abs_path) { log::error!("skipping excluded directory {:?}", job.path); return Ok(()); @@ -3529,27 +3531,7 @@ impl BackgroundScanner { } }; let child_name = child_abs_path.file_name().unwrap(); - { - let mut state = self.state.lock(); - if state.snapshot.is_abs_path_excluded(&child_abs_path) { - let relative_path = job.path.join(child_name); - log::debug!("skipping excluded child entry {relative_path:?}"); - state.remove_path(&relative_path); - continue; - } - drop(state); - } - let child_path: Arc = job.path.join(child_name).into(); - let child_metadata = match self.fs.metadata(&child_abs_path).await { - Ok(Some(metadata)) => metadata, - Ok(None) => continue, - Err(err) => { - log::error!("error processing {:?}: {:?}", child_abs_path, err); - continue; - } - }; - // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored if child_name == *GITIGNORE { match build_gitignore(&child_abs_path, self.fs.as_ref()).await { @@ -3591,8 +3573,33 @@ impl BackgroundScanner { // If we find a .git, we'll need to load the repository. else if child_name == *DOT_GIT { dotgit_path = Some(child_path.clone()); + { + let mut state = self.state.lock(); + state.build_git_repository(child_path.clone(), self.fs.as_ref()); + drop(state); + } } + { + let mut state = self.state.lock(); + if state.snapshot.is_abs_path_excluded(&child_abs_path) { + let relative_path = job.path.join(child_name); + log::debug!("skipping excluded child entry {relative_path:?}"); + state.remove_path(&relative_path); + continue; + } + drop(state); + } + + let child_metadata = match self.fs.metadata(&child_abs_path).await { + Ok(Some(metadata)) => metadata, + Ok(None) => continue, + Err(err) => { + log::error!("error processing {:?}: {:?}", child_abs_path, err); + continue; + } + }; + let mut child_entry = Entry::new( child_path.clone(), &child_metadata, @@ -4117,6 +4124,12 @@ impl BackgroundScanner { } } +fn is_git_related(abs_path: &&PathBuf) -> bool { + abs_path + .components() + .any(|c| c.as_os_str() == *DOT_GIT || c.as_os_str() == *GITIGNORE) +} + fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag { let mut result = root_char_bag; result.extend( diff --git a/crates/project/src/worktree_tests.rs b/crates/project/src/worktree_tests.rs index f2b519021cdb48ac17b0646a88b94bf76ad75ffa..22a5cc1e016e80227898f6404646f54a5fb14ee4 100644 --- a/crates/project/src/worktree_tests.rs +++ b/crates/project/src/worktree_tests.rs @@ -731,6 +731,13 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) { #[gpui::test(iterations = 10)] async fn test_rescan_with_gitignore(cx: &mut TestAppContext) { init_test(cx); + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = Some(Vec::new()); + }); + }); + }); let fs = FakeFs::new(cx.background()); fs.insert_tree( "/root", @@ -1860,6 +1867,14 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) { #[gpui::test] async fn test_git_status(deterministic: Arc, cx: &mut TestAppContext) { init_test(cx); + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = + Some(vec!["**/.git".to_string(), "**/.gitignore".to_string()]); + }); + }); + }); const IGNORE_RULE: &'static str = "**/target"; let root = temp_tree(json!({ From 616bda85e9405c8e20cfb9e37ac18d197b96803b Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 17 Nov 2023 19:21:25 +0200 Subject: [PATCH 27/75] Fix the tests --- crates/project/src/worktree.rs | 74 +++++++------ crates/project_panel/src/project_panel.rs | 126 +++++++++++++++++++++- 2 files changed, 165 insertions(+), 35 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index a169e062f384c6cca27cc9994da2433fade902ee..82fa5d60207b6ba7a5e3ed9ac552f39a3ec3ac65 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -2464,21 +2464,30 @@ impl BackgroundScannerState { // Remove any git repositories whose .git entry no longer exists. let snapshot = &mut self.snapshot; - // TODO kb stop cleaning those up here? - let mut repositories = mem::take(&mut snapshot.git_repositories); - let mut repository_entries = mem::take(&mut snapshot.repository_entries); - repositories.retain(|_, entry| { - // TODO kb use fs - snapshot.abs_path().join(&entry.git_dir_path).exists() - // snapshot - // .entry_for_id(*work_directory_id) - // .map_or(false, |entry| { - // snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some() - // }) - }); - repository_entries.retain(|_, entry| repositories.get(&entry.work_directory.0).is_some()); - snapshot.git_repositories = repositories; - snapshot.repository_entries = repository_entries; + let mut ids_to_preserve = HashSet::default(); + for (&work_directory_id, entry) in snapshot.git_repositories.iter() { + let exists_in_snapshot = snapshot + .entry_for_id(work_directory_id) + .map_or(false, |entry| { + snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some() + }); + if exists_in_snapshot { + ids_to_preserve.insert(work_directory_id); + } else { + let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path); + if snapshot.is_abs_path_excluded(&git_dir_abs_path) + && !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None)) + { + ids_to_preserve.insert(work_directory_id); + } + } + } + snapshot + .git_repositories + .retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id)); + snapshot + .repository_entries + .retain(|_, entry| ids_to_preserve.contains(&entry.work_directory.0)); } fn build_git_repository( @@ -3320,20 +3329,22 @@ impl BackgroundScanner { return false; }; - let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| { - snapshot - .entry_for_path(parent) - .map_or(false, |entry| entry.kind == EntryKind::Dir) - }); - if !parent_dir_is_loaded && !is_git_related(&abs_path) { - log::debug!("ignoring event {relative_path:?} within unloaded directory"); - return false; - } - if snapshot.is_abs_path_excluded(abs_path) && !is_git_related(&abs_path) { - log::debug!( + if !is_git_related(&abs_path) { + let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| { + snapshot + .entry_for_path(parent) + .map_or(false, |entry| entry.kind == EntryKind::Dir) + }); + if !parent_dir_is_loaded { + log::debug!("ignoring event {relative_path:?} within unloaded directory"); + return false; + } + if snapshot.is_abs_path_excluded(abs_path) { + log::debug!( "ignoring FS event for path {relative_path:?} within excluded directory" ); - return false; + return false; + } } relative_paths.push(relative_path); @@ -3573,11 +3584,6 @@ impl BackgroundScanner { // If we find a .git, we'll need to load the repository. else if child_name == *DOT_GIT { dotgit_path = Some(child_path.clone()); - { - let mut state = self.state.lock(); - state.build_git_repository(child_path.clone(), self.fs.as_ref()); - drop(state); - } } { @@ -3595,7 +3601,7 @@ impl BackgroundScanner { Ok(Some(metadata)) => metadata, Ok(None) => continue, Err(err) => { - log::error!("error processing {:?}: {:?}", child_abs_path, err); + log::error!("error processing {child_abs_path:?}: {err:?}"); continue; } }; @@ -4124,7 +4130,7 @@ impl BackgroundScanner { } } -fn is_git_related(abs_path: &&PathBuf) -> bool { +fn is_git_related(abs_path: &Path) -> bool { abs_path .components() .any(|c| c.as_os_str() == *DOT_GIT || c.as_os_str() == *GITIGNORE) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index d66de1ad2edc365e0d83f05829aba8fbc72a90ac..e43423073c2831ee1ce2a90803693a8b8c5e5a84 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -1732,7 +1732,7 @@ mod tests { use super::*; use gpui::{AnyWindowHandle, TestAppContext, ViewHandle, WindowHandle}; use pretty_assertions::assert_eq; - use project::FakeFs; + use project::{project_settings::ProjectSettings, FakeFs}; use serde_json::json; use settings::SettingsStore; use std::{ @@ -1832,6 +1832,123 @@ mod tests { ); } + #[gpui::test] + async fn test_exclusions_in_visible_list(cx: &mut gpui::TestAppContext) { + init_test(cx); + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = + Some(vec!["**/.git".to_string(), "**/4/**".to_string()]); + }); + }); + }); + + let fs = FakeFs::new(cx.background()); + fs.insert_tree( + "/root1", + json!({ + ".dockerignore": "", + ".git": { + "HEAD": "", + }, + "a": { + "0": { "q": "", "r": "", "s": "" }, + "1": { "t": "", "u": "" }, + "2": { "v": "", "w": "", "x": "", "y": "" }, + }, + "b": { + "3": { "Q": "" }, + "4": { "R": "", "S": "", "T": "", "U": "" }, + }, + "C": { + "5": {}, + "6": { "V": "", "W": "" }, + "7": { "X": "" }, + "8": { "Y": {}, "Z": "" } + } + }), + ) + .await; + fs.insert_tree( + "/root2", + json!({ + "d": { + "4": "" + }, + "e": {} + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await; + let workspace = cx + .add_window(|cx| Workspace::test_new(project.clone(), cx)) + .root(cx); + let panel = workspace.update(cx, |workspace, cx| ProjectPanel::new(workspace, cx)); + assert_eq!( + visible_entries_as_strings(&panel, 0..50, cx), + &[ + "v root1", + " > a", + " > b", + " > C", + " .dockerignore", + "v root2", + " > d", + " > e", + ] + ); + + toggle_expand_dir(&panel, "root1/b", cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..50, cx), + &[ + "v root1", + " > a", + " v b <== selected", + " > 3", + " > C", + " .dockerignore", + "v root2", + " > d", + " > e", + ] + ); + + toggle_expand_dir(&panel, "root2/d", cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..50, cx), + &[ + "v root1", + " > a", + " v b", + " > 3", + " > C", + " .dockerignore", + "v root2", + " v d <== selected", + " > e", + ] + ); + + toggle_expand_dir(&panel, "root2/e", cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..50, cx), + &[ + "v root1", + " > a", + " v b", + " > 3", + " > C", + " .dockerignore", + "v root2", + " v d", + " v e <== selected", + ] + ); + } + #[gpui::test(iterations = 30)] async fn test_editing_files(cx: &mut gpui::TestAppContext) { init_test(cx); @@ -2930,6 +3047,13 @@ mod tests { client::init_settings(cx); Project::init_settings(cx); }); + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = Some(Vec::new()); + }); + }); + }); } fn init_test_with_editor(cx: &mut TestAppContext) { From 6976af502975ddb094ada8698383cf20ea7b9ab9 Mon Sep 17 00:00:00 2001 From: Mikayla Date: Fri, 17 Nov 2023 12:16:55 -0800 Subject: [PATCH 28/75] Push some sketches --- .github/workflows/release_nightly.yml | 163 ++++++++++++++++++++++++++ crates/zed/RELEASE_CHANNEL | 2 +- script/upload-nightly | 42 +++++++ 3 files changed, 206 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/release_nightly.yml create mode 100644 script/upload-nightly diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml new file mode 100644 index 0000000000000000000000000000000000000000..bb743505da0decf26e3f25625cc6b07855543ee6 --- /dev/null +++ b/.github/workflows/release_nightly.yml @@ -0,0 +1,163 @@ +name: Release Nightly + +on: + schedule: + # Fire every night at 1:00am + - cron: "0 1 * * *" + push: + tags: + - "nightly*" + +env: + CARGO_TERM_COLOR: always + CARGO_INCREMENTAL: 0 + RUST_BACKTRACE: 1 + +jobs: + rustfmt: + name: Check formatting + runs-on: + - self-hosted + - test + steps: + - name: Install Rust + run: | + rustup set profile minimal + rustup update stable + + - name: Checkout repo + uses: actions/checkout@v3 + with: + clean: false + submodules: "recursive" + + - name: cargo fmt + run: cargo fmt --all -- --check + + tests: + name: Run tests + runs-on: + - self-hosted + - test + needs: rustfmt + env: + RUSTFLAGS: -D warnings + steps: + - name: Install Rust + run: | + rustup set profile minimal + rustup update stable + rustup target add wasm32-wasi + cargo install cargo-nextest + + - name: Install Node + uses: actions/setup-node@v3 + with: + node-version: "18" + + - name: Checkout repo + uses: actions/checkout@v3 + with: + clean: false + submodules: "recursive" + + - name: Limit target directory size + run: script/clear-target-dir-if-larger-than 70 + + - name: Run check + run: cargo check --workspace + + - name: Run tests + run: cargo nextest run --workspace --no-fail-fast + + - name: Build collab + run: cargo build -p collab + + - name: Build other binaries + run: cargo build --workspace --bins --all-features + + bundle: + name: Bundle app + runs-on: + - self-hosted + - bundle + needs: tests + env: + MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} + MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} + APPLE_NOTARIZATION_USERNAME: ${{ secrets.APPLE_NOTARIZATION_USERNAME }} + APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }} + steps: + - name: Install Rust + run: | + rustup set profile minimal + rustup update stable + rustup target add aarch64-apple-darwin + rustup target add x86_64-apple-darwin + rustup target add wasm32-wasi + + - name: Install Node + uses: actions/setup-node@v3 + with: + node-version: "18" + + - name: Checkout repo + uses: actions/checkout@v3 + with: + clean: false + submodules: "recursive" + + - name: Limit target directory size + run: script/clear-target-dir-if-larger-than 70 + + - name: Determine version and release channel + run: | + set -eu + + version=$(git rev-parse --short HEAD) + channel=$(cat crates/zed/RELEASE_CHANNEL) + echo "Publishing version: ${version} on release channel ${channel}" + echo "RELEASE_CHANNEL=${channel}" >> $GITHUB_ENV + + case ${channel} in + nightly) + exit 0;; + *) + echo "can't publish a release on channel ${channel} with this action" + exit 1;; + esac + + - name: Generate license file + run: script/generate-licenses + + - name: Create app bundle + run: script/bundle + + # So, here's an example of how this _could_ be done. + # Problem: Need to setup some docker secrets + # Problem: This action is very old + # Problem: Need to add stuff for interacting with our API + # - uses: BetaHuhn/do-spaces-action@v2 + # name: Upload app bundle to nightly + # id: spaces-upload + # with: + # # Need to put this stuff in kuberenetes I think + # access_key: ${{ secrets.ACCESS_KEY}} + # secret_key: ${{ secrets.SECRET_KEY }} + # space_name: ${{ secrets.SPACE_NAME }} + # space_region: ${{ secrets.SPACE_REGION }} + # source: target/release/Zed.dmg + # env: + # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Upload Zed Nightly + run: script/upload-nightly #something something + with: + do_secret: ${{ secrets.DO_SPACES_SECRET }} + do_access_key: ${{ secrets.DO_SPACES_ACCESS_KEY }} + + # Upload to zed.dev? + - name: Upload new release URL to zed.dev + run: ??? #something something + with: + nightly_release_key: ${{ secrets.NIGHTLY_RELEASE_KEY }} + deployment_url: ${{ steps.spaces-upload.outputs.output_url }} diff --git a/crates/zed/RELEASE_CHANNEL b/crates/zed/RELEASE_CHANNEL index 90012116c03db04344ab10d50348553aa94f1ea0..bf867e0ae5b6c08df1118a2ece970677bc479f1b 100644 --- a/crates/zed/RELEASE_CHANNEL +++ b/crates/zed/RELEASE_CHANNEL @@ -1 +1 @@ -dev \ No newline at end of file +nightly diff --git a/script/upload-nightly b/script/upload-nightly new file mode 100644 index 0000000000000000000000000000000000000000..e98516a7dc20755ea22b697ea8d53fab15626264 --- /dev/null +++ b/script/upload-nightly @@ -0,0 +1,42 @@ +#!/bin/bash + +# Based on the template in: https://docs.digitalocean.com/reference/api/spaces-api/ + + +# Step 1: Define the parameters for the Space you want to upload to. +SPACE="zed-nightly-host" # Find your endpoint in the control panel, under Settings. +REGION="nyc3" # Must be "us-east-1" when creating new Spaces. Otherwise, use the region in your endpoint (e.g. nyc3). +STORAGETYPE="STANDARD" # Storage type, can be STANDARD, REDUCED_REDUNDANCY, etc. +KEY="???????" # Access key pair. You can create access key pairs using the control panel or API. +SECRET="$SECRET" # Secret access key defined through an environment variable. + +# Step 2: Define a function that uploads your object via cURL. +function putS3 +{ + path="." # The local path to the file you want to upload. + file="hello-world.txt" # The file you want to upload. + space_path="/" # The path within your Space where you want to upload the new file. + space="${SPACE}" + date=$(date +"%a, %d %b %Y %T %z") + acl="x-amz-acl:private" # Defines Access-control List (ACL) permissions, such as private or public. + content_type="text/plain" # Defines the type of content you are uploading. + storage_type="x-amz-storage-class:${STORAGETYPE}" + string="PUT\n\n$content_type\n$date\n$acl\n$storage_type\n/$space$space_path$file" + signature=$(echo -en "${string}" | openssl sha1 -hmac "${SECRET}" -binary | base64) + curl -s -X PUT -T "$path/$file" \ # The cURL command that uploads your file. + -H "Host: $space.${REGION}.digitaloceanspaces.com" \ + -H "Date: $date" \ + -H "Content-Type: $content_type" \ + -H "$storage_type" \ + -H "$acl" \ + -H "Authorization: AWS ${KEY}:$signature" \ + "https://$space.${REGION}.digitaloceanspaces.com$space_path$file" +} + +# Step 3: mkdir for file based on release sha +# Step 4: Put Zed.dmg in that directory +for file in "$path"/*; do + putS3 "$path" "${file##*/}" "nyc-tutorial-space/" +done + +# Step 5: Output that directory for next step From ceb20dea969bbfbe498ff24d165081bb540757a2 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 17 Nov 2023 13:23:12 -0700 Subject: [PATCH 29/75] Refactorings --- crates/gpui2/src/view.rs | 4 + crates/project_panel2/src/project_panel.rs | 28 ++-- crates/settings2/src/settings_file.rs | 1 + crates/workspace2/src/dock.rs | 144 ++++++++++++++------- crates/workspace2/src/workspace2.rs | 4 +- 5 files changed, 118 insertions(+), 63 deletions(-) diff --git a/crates/gpui2/src/view.rs b/crates/gpui2/src/view.rs index 6b8c8a3eefcfcd79c955ecfbed170212bbe0b12c..a26812c7ae142d7fa82f6d1eb5927ec12cc986cf 100644 --- a/crates/gpui2/src/view.rs +++ b/crates/gpui2/src/view.rs @@ -191,6 +191,10 @@ impl AnyView { self.model.entity_type } + pub fn entity_id(&self) -> EntityId { + self.model.entity_id() + } + pub(crate) fn draw( &self, origin: Point, diff --git a/crates/project_panel2/src/project_panel.rs b/crates/project_panel2/src/project_panel.rs index 87edabab5217084dffa8693dc5121de4bc6166fa..7f36cffd603fd643520a6eb3c5d2c8b49b98f12f 100644 --- a/crates/project_panel2/src/project_panel.rs +++ b/crates/project_panel2/src/project_panel.rs @@ -1,6 +1,6 @@ pub mod file_associations; mod project_panel_settings; -use settings::Settings; +use settings::{Settings, SettingsStore}; use db::kvp::KEY_VALUE_STORE; use editor::{scroll::autoscroll::Autoscroll, Cancel, Editor}; @@ -34,7 +34,7 @@ use ui::{h_stack, v_stack, IconElement, Label}; use unicase::UniCase; use util::{maybe, ResultExt, TryFutureExt}; use workspace::{ - dock::{DockPosition, PanelEvent}, + dock::{DockPosition, Panel, PanelEvent}, Workspace, }; @@ -148,7 +148,6 @@ pub enum Event { SplitEntry { entry_id: ProjectEntryId, }, - DockPositionChanged, Focus, NewSearchInDirectory { dir_entry: Entry, @@ -244,16 +243,17 @@ impl ProjectPanel { this.update_visible_entries(None, cx); // Update the dock position when the setting changes. - // todo!() - // let mut old_dock_position = this.position(cx); - // cx.observe_global::(move |this, cx| { - // let new_dock_position = this.position(cx); - // if new_dock_position != old_dock_position { - // old_dock_position = new_dock_position; - // cx.emit(Event::DockPositionChanged); - // } - // }) - // .detach(); + let mut old_dock_position = this.position(cx); + ProjectPanelSettings::register(cx); + cx.observe_global::(move |this, cx| { + dbg!("OLA!"); + let new_dock_position = this.position(cx); + if new_dock_position != old_dock_position { + old_dock_position = new_dock_position; + cx.emit(PanelEvent::ChangePosition); + } + }) + .detach(); this }); @@ -1485,7 +1485,7 @@ impl EventEmitter for ProjectPanel {} impl EventEmitter for ProjectPanel {} -impl workspace::dock::Panel for ProjectPanel { +impl Panel for ProjectPanel { fn position(&self, cx: &WindowContext) -> DockPosition { match ProjectPanelSettings::get_global(cx).dock { ProjectPanelDockPosition::Left => DockPosition::Left, diff --git a/crates/settings2/src/settings_file.rs b/crates/settings2/src/settings_file.rs index fc4ad5882e25b6c452a412f107e7535000a6cff8..c28e281895771a398e8a214961687df5002b1ccd 100644 --- a/crates/settings2/src/settings_file.rs +++ b/crates/settings2/src/settings_file.rs @@ -77,6 +77,7 @@ pub fn handle_settings_file_changes( }); cx.spawn(move |mut cx| async move { while let Some(user_settings_content) = user_settings_file_rx.next().await { + eprintln!("settings file changed"); let result = cx.update_global(|store: &mut SettingsStore, cx| { store .set_user_settings(&user_settings_content, cx) diff --git a/crates/workspace2/src/dock.rs b/crates/workspace2/src/dock.rs index 07237d6f62dd3c8ec102fcf979c9da40a7862583..1d6b8b3a2a94062ba6372f7a4cbc0539b0ab483c 100644 --- a/crates/workspace2/src/dock.rs +++ b/crates/workspace2/src/dock.rs @@ -42,7 +42,7 @@ pub trait Panel: FocusableView + EventEmitter { } pub trait PanelHandle: Send + Sync { - fn id(&self) -> EntityId; + fn entity_id(&self) -> EntityId; fn persistent_name(&self) -> &'static str; fn position(&self, cx: &WindowContext) -> DockPosition; fn position_is_valid(&self, position: DockPosition, cx: &WindowContext) -> bool; @@ -64,8 +64,8 @@ impl PanelHandle for View where T: Panel, { - fn id(&self) -> EntityId { - self.entity_id() + fn entity_id(&self) -> EntityId { + Entity::entity_id(self) } fn persistent_name(&self) -> &'static str { @@ -256,20 +256,19 @@ impl Dock { } } - // todo!() - // pub fn set_panel_zoomed(&mut self, panel: &AnyView, zoomed: bool, cx: &mut ViewContext) { - // for entry in &mut self.panel_entries { - // if entry.panel.as_any() == panel { - // if zoomed != entry.panel.is_zoomed(cx) { - // entry.panel.set_zoomed(zoomed, cx); - // } - // } else if entry.panel.is_zoomed(cx) { - // entry.panel.set_zoomed(false, cx); - // } - // } + pub fn set_panel_zoomed(&mut self, panel: &AnyView, zoomed: bool, cx: &mut ViewContext) { + for entry in &mut self.panel_entries { + if entry.panel.entity_id() == panel.entity_id() { + if zoomed != entry.panel.is_zoomed(cx) { + entry.panel.set_zoomed(zoomed, cx); + } + } else if entry.panel.is_zoomed(cx) { + entry.panel.set_zoomed(false, cx); + } + } - // cx.notify(); - // } + cx.notify(); + } pub fn zoom_out(&mut self, cx: &mut ViewContext) { for entry in &mut self.panel_entries { @@ -279,42 +278,91 @@ impl Dock { } } - pub(crate) fn add_panel(&mut self, panel: View, cx: &mut ViewContext) { + pub(crate) fn add_panel( + &mut self, + panel: View, + workspace: WeakView, + cx: &mut ViewContext, + ) { let subscriptions = [ cx.observe(&panel, |_, _, cx| cx.notify()), - cx.subscribe(&panel, |this, panel, event, cx| { - match event { - PanelEvent::ChangePosition => { - //todo!() - // see: Workspace::add_panel_with_extra_event_handler - } - PanelEvent::ZoomIn => { - //todo!() - // see: Workspace::add_panel_with_extra_event_handler - } - PanelEvent::ZoomOut => { - // todo!() - // // see: Workspace::add_panel_with_extra_event_handler - } - PanelEvent::Activate => { - if let Some(ix) = this - .panel_entries - .iter() - .position(|entry| entry.panel.id() == panel.id()) - { - this.set_open(true, cx); - this.activate_panel(ix, cx); - //` todo!() - // cx.focus(&panel); + cx.subscribe(&panel, move |this, panel, event, cx| match event { + PanelEvent::ChangePosition => { + let new_position = panel.read(cx).position(cx); + + let Ok(new_dock) = workspace.update(cx, |workspace, cx| { + if panel.is_zoomed(cx) { + workspace.zoomed_position = Some(new_position); } - } - PanelEvent::Close => { - if this.visible_panel().map_or(false, |p| p.id() == panel.id()) { - this.set_open(false, cx); + match new_position { + DockPosition::Left => &workspace.left_dock, + DockPosition::Bottom => &workspace.bottom_dock, + DockPosition::Right => &workspace.right_dock, } + .clone() + }) else { + return; + }; + + let was_visible = this.is_open() + && this.visible_panel().map_or(false, |active_panel| { + active_panel.entity_id() == Entity::entity_id(&panel) + }); + + this.remove_panel(&panel, cx); + + new_dock.update(cx, |new_dock, cx| { + new_dock.add_panel(panel.clone(), workspace.clone(), cx); + if was_visible { + new_dock.set_open(true, cx); + new_dock.activate_panel(this.panels_len() - 1, cx); + } + }); + } + PanelEvent::ZoomIn => { + this.set_panel_zoomed(&panel.to_any(), true, cx); + if !panel.has_focus(cx) { + cx.focus_view(&panel); + } + workspace + .update(cx, |workspace, cx| { + workspace.zoomed = Some(panel.downgrade().into()); + workspace.zoomed_position = Some(panel.read(cx).position(cx)); + }) + .ok(); + } + PanelEvent::ZoomOut => { + this.set_panel_zoomed(&panel.to_any(), false, cx); + workspace + .update(cx, |workspace, cx| { + if workspace.zoomed_position == Some(this.position) { + workspace.zoomed = None; + workspace.zoomed_position = None; + } + cx.notify(); + }) + .ok(); + } + PanelEvent::Activate => { + if let Some(ix) = this + .panel_entries + .iter() + .position(|entry| entry.panel.entity_id() == Entity::entity_id(&panel)) + { + this.set_open(true, cx); + this.activate_panel(ix, cx); + cx.focus_view(&panel); + } + } + PanelEvent::Close => { + if this + .visible_panel() + .map_or(false, |p| p.entity_id() == Entity::entity_id(&panel)) + { + this.set_open(false, cx); } - PanelEvent::Focus => todo!(), } + PanelEvent::Focus => todo!(), }), ]; @@ -337,7 +385,7 @@ impl Dock { if let Some(panel_ix) = self .panel_entries .iter() - .position(|entry| entry.panel.id() == panel.id()) + .position(|entry| entry.panel.entity_id() == Entity::entity_id(panel)) { if panel_ix == self.active_panel_index { self.active_panel_index = 0; @@ -398,7 +446,7 @@ impl Dock { pub fn panel_size(&self, panel: &dyn PanelHandle, cx: &WindowContext) -> Option { self.panel_entries .iter() - .find(|entry| entry.panel.id() == panel.id()) + .find(|entry| entry.panel.entity_id() == panel.entity_id()) .map(|entry| entry.panel.size(cx)) } diff --git a/crates/workspace2/src/workspace2.rs b/crates/workspace2/src/workspace2.rs index dc69280c1ebcf165a7d8d9168e9bcc744194b79e..ae85f0a868de10fd38fc7aedd05e2c0369e4909a 100644 --- a/crates/workspace2/src/workspace2.rs +++ b/crates/workspace2/src/workspace2.rs @@ -831,7 +831,9 @@ impl Workspace { DockPosition::Right => &self.right_dock, }; - dock.update(cx, |dock, cx| dock.add_panel(panel, cx)); + dock.update(cx, |dock, cx| { + dock.add_panel(panel, self.weak_self.clone(), cx) + }); } pub fn status_bar(&self) -> &View { From 6bfe6fa0e10b651a9d1e1b8743ba4399e8643d79 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 17 Nov 2023 13:45:05 -0700 Subject: [PATCH 30/75] Fix image errors * Firstly only log one error per image load, not per frame * Secondly use an Icon not an image for rendering Icons --- crates/gpui2/src/elements/img.rs | 4 ++-- crates/gpui2/src/image_cache.rs | 10 +++++++++- crates/terminal_view2/src/terminal_view.rs | 13 ++++++------- 3 files changed, 17 insertions(+), 10 deletions(-) diff --git a/crates/gpui2/src/elements/img.rs b/crates/gpui2/src/elements/img.rs index 1080135fe16edcabdf6c2176e264bd551be5011d..5376c4001227f952709d3075e8149c0f1fe3008c 100644 --- a/crates/gpui2/src/elements/img.rs +++ b/crates/gpui2/src/elements/img.rs @@ -81,7 +81,7 @@ impl Element for Img { if let Some(data) = image_future .clone() .now_or_never() - .and_then(ResultExt::log_err) + .and_then(|result| result.ok()) { let corner_radii = corner_radii.to_pixels(bounds.size, cx.rem_size()); cx.with_z_index(1, |cx| { @@ -90,7 +90,7 @@ impl Element for Img { }); } else { cx.spawn(|_, mut cx| async move { - if image_future.await.log_err().is_some() { + if image_future.await.ok().is_some() { cx.on_next_frame(|cx| cx.notify()); } }) diff --git a/crates/gpui2/src/image_cache.rs b/crates/gpui2/src/image_cache.rs index 6417f7d5e133991c41593c81bc7088fb46a04129..f80b0f0c2f71a60fa91dbf87a13ffa3b86f43abf 100644 --- a/crates/gpui2/src/image_cache.rs +++ b/crates/gpui2/src/image_cache.rs @@ -2,7 +2,7 @@ use crate::{ImageData, ImageId, SharedString}; use collections::HashMap; use futures::{ future::{BoxFuture, Shared}, - AsyncReadExt, FutureExt, + AsyncReadExt, FutureExt, TryFutureExt, }; use image::ImageError; use parking_lot::Mutex; @@ -88,6 +88,14 @@ impl ImageCache { Ok(Arc::new(ImageData::new(image))) } } + .map_err({ + let uri = uri.clone(); + + move |error| { + log::log!(log::Level::Error, "{:?} {:?}", &uri, &error); + error + } + }) .boxed() .shared(); diff --git a/crates/terminal_view2/src/terminal_view.rs b/crates/terminal_view2/src/terminal_view.rs index 56de1ee7efef0497e691be607f8e56859ecd9c11..27e55602fb8562c793447220e3849e953f177b8a 100644 --- a/crates/terminal_view2/src/terminal_view.rs +++ b/crates/terminal_view2/src/terminal_view.rs @@ -9,11 +9,10 @@ pub mod terminal_panel; // use crate::terminal_element::TerminalElement; use editor::{scroll::autoscroll::Autoscroll, Editor}; use gpui::{ - actions, div, img, red, Action, AnyElement, AppContext, Component, DispatchPhase, Div, - EventEmitter, FocusEvent, FocusHandle, Focusable, FocusableComponent, FocusableView, - InputHandler, InteractiveComponent, KeyDownEvent, Keystroke, Model, MouseButton, - ParentComponent, Pixels, Render, SharedString, Styled, Task, View, ViewContext, VisualContext, - WeakView, + actions, div, Action, AnyElement, AppContext, Component, DispatchPhase, Div, EventEmitter, + FocusEvent, FocusHandle, Focusable, FocusableComponent, FocusableView, InputHandler, + InteractiveComponent, KeyDownEvent, Keystroke, Model, MouseButton, ParentComponent, Pixels, + Render, SharedString, Styled, Task, View, ViewContext, VisualContext, WeakView, }; use language::Bias; use persistence::TERMINAL_DB; @@ -32,7 +31,7 @@ use workspace::{ notifications::NotifyResultExt, register_deserializable_item, searchable::{SearchEvent, SearchOptions, SearchableItem}, - ui::{ContextMenu, Label, ListEntry}, + ui::{ContextMenu, Icon, IconElement, Label, ListEntry}, CloseActiveItem, NewCenterTerminal, Pane, ToolbarItemLocation, Workspace, WorkspaceId, }; @@ -755,7 +754,7 @@ impl Item for TerminalView { let title = self.terminal().read(cx).title(); div() - .child(img().uri("icons/terminal.svg").bg(red())) + .child(IconElement::new(Icon::Terminal)) .child(title) .render() } From f44f60c6e6b4b8f9f9f9ee9d9072e4aebc9f8424 Mon Sep 17 00:00:00 2001 From: Mikayla Date: Fri, 17 Nov 2023 12:54:16 -0800 Subject: [PATCH 31/75] WIP: --- crates/auto_update/src/auto_update.rs | 4 +--- script/upload-nightly | 2 ++ 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 36690bcd2555ce9497b85ee6fd7f0396eef7c08b..cf285ac7cfede2b6adeb264fd6dfe566c2e12e0d 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -121,9 +121,7 @@ fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) { if cx.has_global::() { match cx.global::() { ReleaseChannel::Dev => {} - ReleaseChannel::Nightly => cx - .platform() - .open_url(&format!("{server_url}/releases/nightly/{current_version}")), + ReleaseChannel::Nightly => {} ReleaseChannel::Preview => cx .platform() .open_url(&format!("{server_url}/releases/preview/{current_version}")), diff --git a/script/upload-nightly b/script/upload-nightly index e98516a7dc20755ea22b697ea8d53fab15626264..56d00dca52e4d2c50699c2c106302a05f8ec245a 100644 --- a/script/upload-nightly +++ b/script/upload-nightly @@ -34,6 +34,8 @@ function putS3 } # Step 3: mkdir for file based on release sha +# :/sha-of- -commit/Zed.dmg + # Step 4: Put Zed.dmg in that directory for file in "$path"/*; do putS3 "$path" "${file##*/}" "nyc-tutorial-space/" From d352a63d9d5d9b0eba4ece72c5eb0cee7cc3ae5b Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 17 Nov 2023 22:54:19 +0200 Subject: [PATCH 32/75] Port new workspace logic to gpui2, uncomment most of the workspace2 tests --- .../src/chat_panel/message_editor.rs | 10 +- crates/project/src/ignore.rs | 1 + crates/project2/src/ignore.rs | 4 - crates/project2/src/project2.rs | 12 +- crates/project2/src/project_settings.rs | 2 + crates/project2/src/worktree.rs | 342 +- crates/project2/src/worktree_tests.rs | 4451 +++++++++-------- crates/project_panel/src/project_panel.rs | 3 +- crates/project_panel2/src/project_panel.rs | 126 +- 9 files changed, 2684 insertions(+), 2267 deletions(-) diff --git a/crates/collab_ui2/src/chat_panel/message_editor.rs b/crates/collab_ui2/src/chat_panel/message_editor.rs index 6dbe3aa204e9edf19d605ef880e52dacf4fe627d..9e6bfb553ebf1afd9c070162b25f589aa012762e 100644 --- a/crates/collab_ui2/src/chat_panel/message_editor.rs +++ b/crates/collab_ui2/src/chat_panel/message_editor.rs @@ -14,14 +14,8 @@ use std::{sync::Arc, time::Duration}; const MENTIONS_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(50); lazy_static! { - static ref MENTIONS_SEARCH: SearchQuery = SearchQuery::regex( - "@[-_\\w]+", - false, - false, - Default::default(), - Default::default() - ) - .unwrap(); + static ref MENTIONS_SEARCH: SearchQuery = + SearchQuery::regex("@[-_\\w]+", false, false, false, Vec::new(), Vec::new()).unwrap(); } pub struct MessageEditor { diff --git a/crates/project/src/ignore.rs b/crates/project/src/ignore.rs index ed1ce4006d9ed90269d5d3ca184b0ff9c5fde590..41e5746f13c3b372a186813d5863683acf4b9996 100644 --- a/crates/project/src/ignore.rs +++ b/crates/project/src/ignore.rs @@ -35,6 +35,7 @@ impl IgnoreStack { if is_dir && abs_path.file_name() == Some(OsStr::new(".git")) { return true; } + match self { Self::None => false, Self::All => true, diff --git a/crates/project2/src/ignore.rs b/crates/project2/src/ignore.rs index 8bac08b96c3a7b920328d946723ae423404b529e..41e5746f13c3b372a186813d5863683acf4b9996 100644 --- a/crates/project2/src/ignore.rs +++ b/crates/project2/src/ignore.rs @@ -20,10 +20,6 @@ impl IgnoreStack { Arc::new(Self::All) } - pub fn is_all(&self) -> bool { - matches!(self, IgnoreStack::All) - } - pub fn append(self: Arc, abs_base_path: Arc, ignore: Arc) -> Arc { match self.as_ref() { IgnoreStack::All => self, diff --git a/crates/project2/src/project2.rs b/crates/project2/src/project2.rs index 3cc4e8a2932611379a2c49cd55e99a32d508fea0..3f7c9b7188bcbca247839cf70fe9bb2568e4461d 100644 --- a/crates/project2/src/project2.rs +++ b/crates/project2/src/project2.rs @@ -5640,8 +5640,16 @@ impl Project { .iter() .filter_map(|(_, b)| { let buffer = b.upgrade()?; - let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()); - if let Some(path) = snapshot.file().map(|file| file.path()) { + let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| { + let is_ignored = buffer + .project_path(cx) + .and_then(|path| self.entry_for_path(&path, cx)) + .map_or(false, |entry| entry.is_ignored); + (is_ignored, buffer.snapshot()) + }); + if is_ignored && !query.include_ignored() { + return None; + } else if let Some(path) = snapshot.file().map(|file| file.path()) { Some((path.clone(), (buffer, snapshot))) } else { unnamed_files.push(buffer); diff --git a/crates/project2/src/project_settings.rs b/crates/project2/src/project_settings.rs index 028a564b9c0b54534572fbccaae44cce0b1a9693..2a8df47e67a5b7ad7e540e581b055da546c3f7cf 100644 --- a/crates/project2/src/project_settings.rs +++ b/crates/project2/src/project_settings.rs @@ -11,6 +11,8 @@ pub struct ProjectSettings { pub lsp: HashMap, LspSettings>, #[serde(default)] pub git: GitSettings, + #[serde(default)] + pub file_scan_exclusions: Option>, } #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] diff --git a/crates/project2/src/worktree.rs b/crates/project2/src/worktree.rs index a020e8db4c311a78404169b5a31a23db8563b570..fcb64c40b42d09ec30169946a2526d48aa774a0f 100644 --- a/crates/project2/src/worktree.rs +++ b/crates/project2/src/worktree.rs @@ -1,5 +1,6 @@ use crate::{ - copy_recursive, ignore::IgnoreStack, DiagnosticSummary, ProjectEntryId, RemoveOptions, + copy_recursive, ignore::IgnoreStack, project_settings::ProjectSettings, DiagnosticSummary, + ProjectEntryId, RemoveOptions, }; use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; use anyhow::{anyhow, Context as _, Result}; @@ -25,6 +26,7 @@ use gpui::{ AppContext, AsyncAppContext, BackgroundExecutor, Context, EventEmitter, Model, ModelContext, Task, }; +use itertools::Itertools; use language::{ proto::{ deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending, @@ -39,6 +41,7 @@ use postage::{ prelude::{Sink as _, Stream as _}, watch, }; +use settings::{Settings, SettingsStore}; use smol::channel::{self, Sender}; use std::{ any::Any, @@ -58,7 +61,10 @@ use std::{ time::{Duration, SystemTime}, }; use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet}; -use util::{paths::HOME, ResultExt}; +use util::{ + paths::{PathMatcher, HOME}, + ResultExt, +}; #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] pub struct WorktreeId(usize); @@ -73,7 +79,7 @@ pub struct LocalWorktree { scan_requests_tx: channel::Sender, path_prefixes_to_scan_tx: channel::Sender>, is_scanning: (watch::Sender, watch::Receiver), - _background_scanner_task: Task<()>, + _background_scanner_tasks: Vec>, share: Option, diagnostics: HashMap< Arc, @@ -219,6 +225,7 @@ pub struct LocalSnapshot { /// All of the git repositories in the worktree, indexed by the project entry /// id of their parent directory. git_repositories: TreeMap, + file_scan_exclusions: Vec, } struct BackgroundScannerState { @@ -302,17 +309,56 @@ impl Worktree { .await .context("failed to stat worktree path")?; + let closure_fs = Arc::clone(&fs); + let closure_next_entry_id = Arc::clone(&next_entry_id); + let closure_abs_path = abs_path.to_path_buf(); cx.build_model(move |cx: &mut ModelContext| { + cx.observe_global::(move |this, cx| { + if let Self::Local(this) = this { + let new_file_scan_exclusions = + file_scan_exclusions(ProjectSettings::get_global(cx)); + if new_file_scan_exclusions != this.snapshot.file_scan_exclusions { + this.snapshot.file_scan_exclusions = new_file_scan_exclusions; + log::info!( + "Re-scanning directories, new scan exclude files: {:?}", + this.snapshot + .file_scan_exclusions + .iter() + .map(ToString::to_string) + .collect::>() + ); + + let (scan_requests_tx, scan_requests_rx) = channel::unbounded(); + let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = + channel::unbounded(); + this.scan_requests_tx = scan_requests_tx; + this.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx; + this._background_scanner_tasks = start_background_scan_tasks( + &closure_abs_path, + this.snapshot(), + scan_requests_rx, + path_prefixes_to_scan_rx, + Arc::clone(&closure_next_entry_id), + Arc::clone(&closure_fs), + cx, + ); + this.is_scanning = watch::channel_with(true); + } + } + }) + .detach(); + let root_name = abs_path .file_name() .map_or(String::new(), |f| f.to_string_lossy().to_string()); let mut snapshot = LocalSnapshot { + file_scan_exclusions: file_scan_exclusions(ProjectSettings::get_global(cx)), ignores_by_parent_abs_path: Default::default(), git_repositories: Default::default(), snapshot: Snapshot { id: WorktreeId::from_usize(cx.entity_id().as_u64() as usize), - abs_path: abs_path.clone(), + abs_path: abs_path.to_path_buf().into(), root_name: root_name.clone(), root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(), entries_by_path: Default::default(), @@ -337,61 +383,22 @@ impl Worktree { let (scan_requests_tx, scan_requests_rx) = channel::unbounded(); let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded(); - let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded(); - - cx.spawn(|this, mut cx| async move { - while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade()) { - this.update(&mut cx, |this, cx| { - let this = this.as_local_mut().unwrap(); - match state { - ScanState::Started => { - *this.is_scanning.0.borrow_mut() = true; - } - ScanState::Updated { - snapshot, - changes, - barrier, - scanning, - } => { - *this.is_scanning.0.borrow_mut() = scanning; - this.set_snapshot(snapshot, changes, cx); - drop(barrier); - } - } - cx.notify(); - }) - .ok(); - } - }) - .detach(); - - let background_scanner_task = cx.background_executor().spawn({ - let fs = fs.clone(); - let snapshot = snapshot.clone(); - let background = cx.background_executor().clone(); - async move { - let events = fs.watch(&abs_path, Duration::from_millis(100)).await; - BackgroundScanner::new( - snapshot, - next_entry_id, - fs, - scan_states_tx, - background, - scan_requests_rx, - path_prefixes_to_scan_rx, - ) - .run(events) - .await; - } - }); - + let task_snapshot = snapshot.clone(); Worktree::Local(LocalWorktree { snapshot, is_scanning: watch::channel_with(true), share: None, scan_requests_tx, path_prefixes_to_scan_tx, - _background_scanner_task: background_scanner_task, + _background_scanner_tasks: start_background_scan_tasks( + &abs_path, + task_snapshot, + scan_requests_rx, + path_prefixes_to_scan_rx, + Arc::clone(&next_entry_id), + Arc::clone(&fs), + cx, + ), diagnostics: Default::default(), diagnostic_summaries: Default::default(), client, @@ -584,6 +591,77 @@ impl Worktree { } } +fn start_background_scan_tasks( + abs_path: &Path, + snapshot: LocalSnapshot, + scan_requests_rx: channel::Receiver, + path_prefixes_to_scan_rx: channel::Receiver>, + next_entry_id: Arc, + fs: Arc, + cx: &mut ModelContext<'_, Worktree>, +) -> Vec> { + let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded(); + let background_scanner = cx.background_executor().spawn({ + let abs_path = abs_path.to_path_buf(); + let background = cx.background_executor().clone(); + async move { + let events = fs.watch(&abs_path, Duration::from_millis(100)).await; + BackgroundScanner::new( + snapshot, + next_entry_id, + fs, + scan_states_tx, + background, + scan_requests_rx, + path_prefixes_to_scan_rx, + ) + .run(events) + .await; + } + }); + let scan_state_updater = cx.spawn(|this, mut cx| async move { + while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade()) { + this.update(&mut cx, |this, cx| { + let this = this.as_local_mut().unwrap(); + match state { + ScanState::Started => { + *this.is_scanning.0.borrow_mut() = true; + } + ScanState::Updated { + snapshot, + changes, + barrier, + scanning, + } => { + *this.is_scanning.0.borrow_mut() = scanning; + this.set_snapshot(snapshot, changes, cx); + drop(barrier); + } + } + cx.notify(); + }) + .ok(); + } + }); + vec![background_scanner, scan_state_updater] +} + +fn file_scan_exclusions(project_settings: &ProjectSettings) -> Vec { + project_settings.file_scan_exclusions.as_deref().unwrap_or(&[]).iter() + .sorted() + .filter_map(|pattern| { + PathMatcher::new(pattern) + .map(Some) + .unwrap_or_else(|e| { + log::error!( + "Skipping pattern {pattern} in `file_scan_exclusions` project settings due to parsing error: {e:#}" + ); + None + }) + }) + .collect() +} + impl LocalWorktree { pub fn contains_abs_path(&self, path: &Path) -> bool { path.starts_with(&self.abs_path) @@ -1482,7 +1560,7 @@ impl Snapshot { self.entries_by_id.get(&entry_id, &()).is_some() } - pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result { + fn insert_entry(&mut self, entry: proto::Entry) -> Result { let entry = Entry::try_from((&self.root_char_bag, entry))?; let old_entry = self.entries_by_id.insert_or_replace( PathEntry { @@ -2143,6 +2221,12 @@ impl LocalSnapshot { paths.sort_by(|a, b| a.0.cmp(b.0)); paths } + + fn is_abs_path_excluded(&self, abs_path: &Path) -> bool { + self.file_scan_exclusions + .iter() + .any(|exclude_matcher| exclude_matcher.is_match(abs_path)) + } } impl BackgroundScannerState { @@ -2165,7 +2249,7 @@ impl BackgroundScannerState { let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true); let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path); let mut containing_repository = None; - if !ignore_stack.is_all() { + if !ignore_stack.is_abs_path_ignored(&abs_path, true) { if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) { if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) { containing_repository = Some(( @@ -2376,18 +2460,30 @@ impl BackgroundScannerState { // Remove any git repositories whose .git entry no longer exists. let snapshot = &mut self.snapshot; - let mut repositories = mem::take(&mut snapshot.git_repositories); - let mut repository_entries = mem::take(&mut snapshot.repository_entries); - repositories.retain(|work_directory_id, _| { - snapshot - .entry_for_id(*work_directory_id) + let mut ids_to_preserve = HashSet::default(); + for (&work_directory_id, entry) in snapshot.git_repositories.iter() { + let exists_in_snapshot = snapshot + .entry_for_id(work_directory_id) .map_or(false, |entry| { snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some() - }) - }); - repository_entries.retain(|_, entry| repositories.get(&entry.work_directory.0).is_some()); - snapshot.git_repositories = repositories; - snapshot.repository_entries = repository_entries; + }); + if exists_in_snapshot { + ids_to_preserve.insert(work_directory_id); + } else { + let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path); + if snapshot.is_abs_path_excluded(&git_dir_abs_path) + && !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None)) + { + ids_to_preserve.insert(work_directory_id); + } + } + } + snapshot + .git_repositories + .retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id)); + snapshot + .repository_entries + .retain(|_, entry| ids_to_preserve.contains(&entry.work_directory.0)); } fn build_git_repository( @@ -3085,7 +3181,7 @@ impl BackgroundScanner { let ignore_stack = state .snapshot .ignore_stack_for_abs_path(&root_abs_path, true); - if ignore_stack.is_all() { + if ignore_stack.is_abs_path_ignored(&root_abs_path, true) { root_entry.is_ignored = true; state.insert_entry(root_entry.clone(), self.fs.as_ref()); } @@ -3222,14 +3318,22 @@ impl BackgroundScanner { return false; }; - let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| { - snapshot - .entry_for_path(parent) - .map_or(false, |entry| entry.kind == EntryKind::Dir) - }); - if !parent_dir_is_loaded { - log::debug!("ignoring event {relative_path:?} within unloaded directory"); - return false; + if !is_git_related(&abs_path) { + let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| { + snapshot + .entry_for_path(parent) + .map_or(false, |entry| entry.kind == EntryKind::Dir) + }); + if !parent_dir_is_loaded { + log::debug!("ignoring event {relative_path:?} within unloaded directory"); + return false; + } + if snapshot.is_abs_path_excluded(abs_path) { + log::debug!( + "ignoring FS event for path {relative_path:?} within excluded directory" + ); + return false; + } } relative_paths.push(relative_path); @@ -3392,18 +3496,26 @@ impl BackgroundScanner { } async fn scan_dir(&self, job: &ScanJob) -> Result<()> { - log::debug!("scan directory {:?}", job.path); - - let mut ignore_stack = job.ignore_stack.clone(); - let mut new_ignore = None; - let (root_abs_path, root_char_bag, next_entry_id) = { - let snapshot = &self.state.lock().snapshot; - ( - snapshot.abs_path().clone(), - snapshot.root_char_bag, - self.next_entry_id.clone(), - ) - }; + let root_abs_path; + let mut ignore_stack; + let mut new_ignore; + let root_char_bag; + let next_entry_id; + { + let state = self.state.lock(); + let snapshot = &state.snapshot; + root_abs_path = snapshot.abs_path().clone(); + if snapshot.is_abs_path_excluded(&job.abs_path) { + log::error!("skipping excluded directory {:?}", job.path); + return Ok(()); + } + log::debug!("scanning directory {:?}", job.path); + ignore_stack = job.ignore_stack.clone(); + new_ignore = None; + root_char_bag = snapshot.root_char_bag; + next_entry_id = self.next_entry_id.clone(); + drop(state); + } let mut dotgit_path = None; let mut root_canonical_path = None; @@ -3418,18 +3530,8 @@ impl BackgroundScanner { continue; } }; - let child_name = child_abs_path.file_name().unwrap(); let child_path: Arc = job.path.join(child_name).into(); - let child_metadata = match self.fs.metadata(&child_abs_path).await { - Ok(Some(metadata)) => metadata, - Ok(None) => continue, - Err(err) => { - log::error!("error processing {:?}: {:?}", child_abs_path, err); - continue; - } - }; - // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored if child_name == *GITIGNORE { match build_gitignore(&child_abs_path, self.fs.as_ref()).await { @@ -3473,6 +3575,26 @@ impl BackgroundScanner { dotgit_path = Some(child_path.clone()); } + { + let mut state = self.state.lock(); + if state.snapshot.is_abs_path_excluded(&child_abs_path) { + let relative_path = job.path.join(child_name); + log::debug!("skipping excluded child entry {relative_path:?}"); + state.remove_path(&relative_path); + continue; + } + drop(state); + } + + let child_metadata = match self.fs.metadata(&child_abs_path).await { + Ok(Some(metadata)) => metadata, + Ok(None) => continue, + Err(err) => { + log::error!("error processing {child_abs_path:?}: {err:?}"); + continue; + } + }; + let mut child_entry = Entry::new( child_path.clone(), &child_metadata, @@ -3653,19 +3775,16 @@ impl BackgroundScanner { self.next_entry_id.as_ref(), state.snapshot.root_char_bag, ); - fs_entry.is_ignored = ignore_stack.is_all(); + let is_dir = fs_entry.is_dir(); + fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir); fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path); - if !fs_entry.is_ignored { - if !fs_entry.is_dir() { - if let Some((work_dir, repo)) = - state.snapshot.local_repo_for_path(&path) - { - if let Ok(repo_path) = path.strip_prefix(work_dir.0) { - let repo_path = RepoPath(repo_path.into()); - let repo = repo.repo_ptr.lock(); - fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime); - } + if !is_dir && !fs_entry.is_ignored { + if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) { + if let Ok(repo_path) = path.strip_prefix(work_dir.0) { + let repo_path = RepoPath(repo_path.into()); + let repo = repo.repo_ptr.lock(); + fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime); } } } @@ -3824,8 +3943,7 @@ impl BackgroundScanner { ignore_stack.clone() }; - // Scan any directories that were previously ignored and weren't - // previously scanned. + // Scan any directories that were previously ignored and weren't previously scanned. if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() { let state = self.state.lock(); if state.should_scan_directory(&entry) { @@ -4001,6 +4119,12 @@ impl BackgroundScanner { } } +fn is_git_related(abs_path: &Path) -> bool { + abs_path + .components() + .any(|c| c.as_os_str() == *DOT_GIT || c.as_os_str() == *GITIGNORE) +} + fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag { let mut result = root_char_bag; result.extend( diff --git a/crates/project2/src/worktree_tests.rs b/crates/project2/src/worktree_tests.rs index bf195f24c4cae69b3e0cbc62e584757375490adb..df7307f694cbead126690e6fa270023ff4847926 100644 --- a/crates/project2/src/worktree_tests.rs +++ b/crates/project2/src/worktree_tests.rs @@ -1,2141 +1,2310 @@ -// use crate::{ -// worktree::{Event, Snapshot, WorktreeModelHandle}, -// Entry, EntryKind, PathChange, Worktree, -// }; -// use anyhow::Result; -// use client2::Client; -// use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions}; -// use git::GITIGNORE; -// use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext}; -// use parking_lot::Mutex; -// use postage::stream::Stream; -// use pretty_assertions::assert_eq; -// use rand::prelude::*; -// use serde_json::json; -// use std::{ -// env, -// fmt::Write, -// mem, -// path::{Path, PathBuf}, -// sync::Arc, -// }; -// use util::{http::FakeHttpClient, test::temp_tree, ResultExt}; - -// #[gpui::test] -// async fn test_traversal(cx: &mut TestAppContext) { -// let fs = FakeFs::new(cx.background()); -// fs.insert_tree( -// "/root", -// json!({ -// ".gitignore": "a/b\n", -// "a": { -// "b": "", -// "c": "", -// } -// }), -// ) -// .await; - -// let tree = Worktree::local( -// build_client(cx), -// Path::new("/root"), -// true, -// fs, -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; - -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.entries(false) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// vec![ -// Path::new(""), -// Path::new(".gitignore"), -// Path::new("a"), -// Path::new("a/c"), -// ] -// ); -// assert_eq!( -// tree.entries(true) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// vec![ -// Path::new(""), -// Path::new(".gitignore"), -// Path::new("a"), -// Path::new("a/b"), -// Path::new("a/c"), -// ] -// ); -// }) -// } - -// #[gpui::test] -// async fn test_descendent_entries(cx: &mut TestAppContext) { -// let fs = FakeFs::new(cx.background()); -// fs.insert_tree( -// "/root", -// json!({ -// "a": "", -// "b": { -// "c": { -// "d": "" -// }, -// "e": {} -// }, -// "f": "", -// "g": { -// "h": {} -// }, -// "i": { -// "j": { -// "k": "" -// }, -// "l": { - -// } -// }, -// ".gitignore": "i/j\n", -// }), -// ) -// .await; - -// let tree = Worktree::local( -// build_client(cx), -// Path::new("/root"), -// true, -// fs, -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; - -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.descendent_entries(false, false, Path::new("b")) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// vec![Path::new("b/c/d"),] -// ); -// assert_eq!( -// tree.descendent_entries(true, false, Path::new("b")) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// vec![ -// Path::new("b"), -// Path::new("b/c"), -// Path::new("b/c/d"), -// Path::new("b/e"), -// ] -// ); - -// assert_eq!( -// tree.descendent_entries(false, false, Path::new("g")) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// Vec::::new() -// ); -// assert_eq!( -// tree.descendent_entries(true, false, Path::new("g")) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// vec![Path::new("g"), Path::new("g/h"),] -// ); -// }); - -// // Expand gitignored directory. -// tree.read_with(cx, |tree, _| { -// tree.as_local() -// .unwrap() -// .refresh_entries_for_paths(vec![Path::new("i/j").into()]) -// }) -// .recv() -// .await; - -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.descendent_entries(false, false, Path::new("i")) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// Vec::::new() -// ); -// assert_eq!( -// tree.descendent_entries(false, true, Path::new("i")) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// vec![Path::new("i/j/k")] -// ); -// assert_eq!( -// tree.descendent_entries(true, false, Path::new("i")) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// vec![Path::new("i"), Path::new("i/l"),] -// ); -// }) -// } - -// #[gpui::test(iterations = 10)] -// async fn test_circular_symlinks(executor: Arc, cx: &mut TestAppContext) { -// let fs = FakeFs::new(cx.background()); -// fs.insert_tree( -// "/root", -// json!({ -// "lib": { -// "a": { -// "a.txt": "" -// }, -// "b": { -// "b.txt": "" -// } -// } -// }), -// ) -// .await; -// fs.insert_symlink("/root/lib/a/lib", "..".into()).await; -// fs.insert_symlink("/root/lib/b/lib", "..".into()).await; - -// let tree = Worktree::local( -// build_client(cx), -// Path::new("/root"), -// true, -// fs.clone(), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; - -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.entries(false) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// vec![ -// Path::new(""), -// Path::new("lib"), -// Path::new("lib/a"), -// Path::new("lib/a/a.txt"), -// Path::new("lib/a/lib"), -// Path::new("lib/b"), -// Path::new("lib/b/b.txt"), -// Path::new("lib/b/lib"), -// ] -// ); -// }); - -// fs.rename( -// Path::new("/root/lib/a/lib"), -// Path::new("/root/lib/a/lib-2"), -// Default::default(), -// ) -// .await -// .unwrap(); -// executor.run_until_parked(); -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.entries(false) -// .map(|entry| entry.path.as_ref()) -// .collect::>(), -// vec![ -// Path::new(""), -// Path::new("lib"), -// Path::new("lib/a"), -// Path::new("lib/a/a.txt"), -// Path::new("lib/a/lib-2"), -// Path::new("lib/b"), -// Path::new("lib/b/b.txt"), -// Path::new("lib/b/lib"), -// ] -// ); -// }); -// } - -// #[gpui::test] -// async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) { -// let fs = FakeFs::new(cx.background()); -// fs.insert_tree( -// "/root", -// json!({ -// "dir1": { -// "deps": { -// // symlinks here -// }, -// "src": { -// "a.rs": "", -// "b.rs": "", -// }, -// }, -// "dir2": { -// "src": { -// "c.rs": "", -// "d.rs": "", -// } -// }, -// "dir3": { -// "deps": {}, -// "src": { -// "e.rs": "", -// "f.rs": "", -// }, -// } -// }), -// ) -// .await; - -// // These symlinks point to directories outside of the worktree's root, dir1. -// fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into()) -// .await; -// fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into()) -// .await; - -// let tree = Worktree::local( -// build_client(cx), -// Path::new("/root/dir1"), -// true, -// fs.clone(), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; - -// let tree_updates = Arc::new(Mutex::new(Vec::new())); -// tree.update(cx, |_, cx| { -// let tree_updates = tree_updates.clone(); -// cx.subscribe(&tree, move |_, _, event, _| { -// if let Event::UpdatedEntries(update) = event { -// tree_updates.lock().extend( -// update -// .iter() -// .map(|(path, _, change)| (path.clone(), *change)), -// ); -// } -// }) -// .detach(); -// }); - -// // The symlinked directories are not scanned by default. -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.entries(true) -// .map(|entry| (entry.path.as_ref(), entry.is_external)) -// .collect::>(), -// vec![ -// (Path::new(""), false), -// (Path::new("deps"), false), -// (Path::new("deps/dep-dir2"), true), -// (Path::new("deps/dep-dir3"), true), -// (Path::new("src"), false), -// (Path::new("src/a.rs"), false), -// (Path::new("src/b.rs"), false), -// ] -// ); - -// assert_eq!( -// tree.entry_for_path("deps/dep-dir2").unwrap().kind, -// EntryKind::UnloadedDir -// ); -// }); - -// // Expand one of the symlinked directories. -// tree.read_with(cx, |tree, _| { -// tree.as_local() -// .unwrap() -// .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()]) -// }) -// .recv() -// .await; - -// // The expanded directory's contents are loaded. Subdirectories are -// // not scanned yet. -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.entries(true) -// .map(|entry| (entry.path.as_ref(), entry.is_external)) -// .collect::>(), -// vec![ -// (Path::new(""), false), -// (Path::new("deps"), false), -// (Path::new("deps/dep-dir2"), true), -// (Path::new("deps/dep-dir3"), true), -// (Path::new("deps/dep-dir3/deps"), true), -// (Path::new("deps/dep-dir3/src"), true), -// (Path::new("src"), false), -// (Path::new("src/a.rs"), false), -// (Path::new("src/b.rs"), false), -// ] -// ); -// }); -// assert_eq!( -// mem::take(&mut *tree_updates.lock()), -// &[ -// (Path::new("deps/dep-dir3").into(), PathChange::Loaded), -// (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded), -// (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded) -// ] -// ); - -// // Expand a subdirectory of one of the symlinked directories. -// tree.read_with(cx, |tree, _| { -// tree.as_local() -// .unwrap() -// .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()]) -// }) -// .recv() -// .await; - -// // The expanded subdirectory's contents are loaded. -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.entries(true) -// .map(|entry| (entry.path.as_ref(), entry.is_external)) -// .collect::>(), -// vec![ -// (Path::new(""), false), -// (Path::new("deps"), false), -// (Path::new("deps/dep-dir2"), true), -// (Path::new("deps/dep-dir3"), true), -// (Path::new("deps/dep-dir3/deps"), true), -// (Path::new("deps/dep-dir3/src"), true), -// (Path::new("deps/dep-dir3/src/e.rs"), true), -// (Path::new("deps/dep-dir3/src/f.rs"), true), -// (Path::new("src"), false), -// (Path::new("src/a.rs"), false), -// (Path::new("src/b.rs"), false), -// ] -// ); -// }); - -// assert_eq!( -// mem::take(&mut *tree_updates.lock()), -// &[ -// (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded), -// ( -// Path::new("deps/dep-dir3/src/e.rs").into(), -// PathChange::Loaded -// ), -// ( -// Path::new("deps/dep-dir3/src/f.rs").into(), -// PathChange::Loaded -// ) -// ] -// ); -// } - -// #[gpui::test] -// async fn test_open_gitignored_files(cx: &mut TestAppContext) { -// let fs = FakeFs::new(cx.background()); -// fs.insert_tree( -// "/root", -// json!({ -// ".gitignore": "node_modules\n", -// "one": { -// "node_modules": { -// "a": { -// "a1.js": "a1", -// "a2.js": "a2", -// }, -// "b": { -// "b1.js": "b1", -// "b2.js": "b2", -// }, -// "c": { -// "c1.js": "c1", -// "c2.js": "c2", -// } -// }, -// }, -// "two": { -// "x.js": "", -// "y.js": "", -// }, -// }), -// ) -// .await; - -// let tree = Worktree::local( -// build_client(cx), -// Path::new("/root"), -// true, -// fs.clone(), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; - -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.entries(true) -// .map(|entry| (entry.path.as_ref(), entry.is_ignored)) -// .collect::>(), -// vec![ -// (Path::new(""), false), -// (Path::new(".gitignore"), false), -// (Path::new("one"), false), -// (Path::new("one/node_modules"), true), -// (Path::new("two"), false), -// (Path::new("two/x.js"), false), -// (Path::new("two/y.js"), false), -// ] -// ); -// }); - -// // Open a file that is nested inside of a gitignored directory that -// // has not yet been expanded. -// let prev_read_dir_count = fs.read_dir_call_count(); -// let buffer = tree -// .update(cx, |tree, cx| { -// tree.as_local_mut() -// .unwrap() -// .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx) -// }) -// .await -// .unwrap(); - -// tree.read_with(cx, |tree, cx| { -// assert_eq!( -// tree.entries(true) -// .map(|entry| (entry.path.as_ref(), entry.is_ignored)) -// .collect::>(), -// vec![ -// (Path::new(""), false), -// (Path::new(".gitignore"), false), -// (Path::new("one"), false), -// (Path::new("one/node_modules"), true), -// (Path::new("one/node_modules/a"), true), -// (Path::new("one/node_modules/b"), true), -// (Path::new("one/node_modules/b/b1.js"), true), -// (Path::new("one/node_modules/b/b2.js"), true), -// (Path::new("one/node_modules/c"), true), -// (Path::new("two"), false), -// (Path::new("two/x.js"), false), -// (Path::new("two/y.js"), false), -// ] -// ); - -// assert_eq!( -// buffer.read(cx).file().unwrap().path().as_ref(), -// Path::new("one/node_modules/b/b1.js") -// ); - -// // Only the newly-expanded directories are scanned. -// assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2); -// }); - -// // Open another file in a different subdirectory of the same -// // gitignored directory. -// let prev_read_dir_count = fs.read_dir_call_count(); -// let buffer = tree -// .update(cx, |tree, cx| { -// tree.as_local_mut() -// .unwrap() -// .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx) -// }) -// .await -// .unwrap(); - -// tree.read_with(cx, |tree, cx| { -// assert_eq!( -// tree.entries(true) -// .map(|entry| (entry.path.as_ref(), entry.is_ignored)) -// .collect::>(), -// vec![ -// (Path::new(""), false), -// (Path::new(".gitignore"), false), -// (Path::new("one"), false), -// (Path::new("one/node_modules"), true), -// (Path::new("one/node_modules/a"), true), -// (Path::new("one/node_modules/a/a1.js"), true), -// (Path::new("one/node_modules/a/a2.js"), true), -// (Path::new("one/node_modules/b"), true), -// (Path::new("one/node_modules/b/b1.js"), true), -// (Path::new("one/node_modules/b/b2.js"), true), -// (Path::new("one/node_modules/c"), true), -// (Path::new("two"), false), -// (Path::new("two/x.js"), false), -// (Path::new("two/y.js"), false), -// ] -// ); - -// assert_eq!( -// buffer.read(cx).file().unwrap().path().as_ref(), -// Path::new("one/node_modules/a/a2.js") -// ); - -// // Only the newly-expanded directory is scanned. -// assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1); -// }); - -// // No work happens when files and directories change within an unloaded directory. -// let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count(); -// fs.create_dir("/root/one/node_modules/c/lib".as_ref()) -// .await -// .unwrap(); -// cx.foreground().run_until_parked(); -// assert_eq!( -// fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count, -// 0 -// ); -// } - -// #[gpui::test] -// async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) { -// let fs = FakeFs::new(cx.background()); -// fs.insert_tree( -// "/root", -// json!({ -// ".gitignore": "node_modules\n", -// "a": { -// "a.js": "", -// }, -// "b": { -// "b.js": "", -// }, -// "node_modules": { -// "c": { -// "c.js": "", -// }, -// "d": { -// "d.js": "", -// "e": { -// "e1.js": "", -// "e2.js": "", -// }, -// "f": { -// "f1.js": "", -// "f2.js": "", -// } -// }, -// }, -// }), -// ) -// .await; - -// let tree = Worktree::local( -// build_client(cx), -// Path::new("/root"), -// true, -// fs.clone(), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; - -// // Open a file within the gitignored directory, forcing some of its -// // subdirectories to be read, but not all. -// let read_dir_count_1 = fs.read_dir_call_count(); -// tree.read_with(cx, |tree, _| { -// tree.as_local() -// .unwrap() -// .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()]) -// }) -// .recv() -// .await; - -// // Those subdirectories are now loaded. -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.entries(true) -// .map(|e| (e.path.as_ref(), e.is_ignored)) -// .collect::>(), -// &[ -// (Path::new(""), false), -// (Path::new(".gitignore"), false), -// (Path::new("a"), false), -// (Path::new("a/a.js"), false), -// (Path::new("b"), false), -// (Path::new("b/b.js"), false), -// (Path::new("node_modules"), true), -// (Path::new("node_modules/c"), true), -// (Path::new("node_modules/d"), true), -// (Path::new("node_modules/d/d.js"), true), -// (Path::new("node_modules/d/e"), true), -// (Path::new("node_modules/d/f"), true), -// ] -// ); -// }); -// let read_dir_count_2 = fs.read_dir_call_count(); -// assert_eq!(read_dir_count_2 - read_dir_count_1, 2); - -// // Update the gitignore so that node_modules is no longer ignored, -// // but a subdirectory is ignored -// fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default()) -// .await -// .unwrap(); -// cx.foreground().run_until_parked(); - -// // All of the directories that are no longer ignored are now loaded. -// tree.read_with(cx, |tree, _| { -// assert_eq!( -// tree.entries(true) -// .map(|e| (e.path.as_ref(), e.is_ignored)) -// .collect::>(), -// &[ -// (Path::new(""), false), -// (Path::new(".gitignore"), false), -// (Path::new("a"), false), -// (Path::new("a/a.js"), false), -// (Path::new("b"), false), -// (Path::new("b/b.js"), false), -// // This directory is no longer ignored -// (Path::new("node_modules"), false), -// (Path::new("node_modules/c"), false), -// (Path::new("node_modules/c/c.js"), false), -// (Path::new("node_modules/d"), false), -// (Path::new("node_modules/d/d.js"), false), -// // This subdirectory is now ignored -// (Path::new("node_modules/d/e"), true), -// (Path::new("node_modules/d/f"), false), -// (Path::new("node_modules/d/f/f1.js"), false), -// (Path::new("node_modules/d/f/f2.js"), false), -// ] -// ); -// }); - -// // Each of the newly-loaded directories is scanned only once. -// let read_dir_count_3 = fs.read_dir_call_count(); -// assert_eq!(read_dir_count_3 - read_dir_count_2, 2); -// } - -// #[gpui::test(iterations = 10)] -// async fn test_rescan_with_gitignore(cx: &mut TestAppContext) { -// let fs = FakeFs::new(cx.background()); -// fs.insert_tree( -// "/root", -// json!({ -// ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n", -// "tree": { -// ".git": {}, -// ".gitignore": "ignored-dir\n", -// "tracked-dir": { -// "tracked-file1": "", -// "ancestor-ignored-file1": "", -// }, -// "ignored-dir": { -// "ignored-file1": "" -// } -// } -// }), -// ) -// .await; - -// let tree = Worktree::local( -// build_client(cx), -// "/root/tree".as_ref(), -// true, -// fs.clone(), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; - -// tree.read_with(cx, |tree, _| { -// tree.as_local() -// .unwrap() -// .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()]) -// }) -// .recv() -// .await; - -// cx.read(|cx| { -// let tree = tree.read(cx); -// assert!( -// !tree -// .entry_for_path("tracked-dir/tracked-file1") -// .unwrap() -// .is_ignored -// ); -// assert!( -// tree.entry_for_path("tracked-dir/ancestor-ignored-file1") -// .unwrap() -// .is_ignored -// ); -// assert!( -// tree.entry_for_path("ignored-dir/ignored-file1") -// .unwrap() -// .is_ignored -// ); -// }); - -// fs.create_file( -// "/root/tree/tracked-dir/tracked-file2".as_ref(), -// Default::default(), -// ) -// .await -// .unwrap(); -// fs.create_file( -// "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(), -// Default::default(), -// ) -// .await -// .unwrap(); -// fs.create_file( -// "/root/tree/ignored-dir/ignored-file2".as_ref(), -// Default::default(), -// ) -// .await -// .unwrap(); - -// cx.foreground().run_until_parked(); -// cx.read(|cx| { -// let tree = tree.read(cx); -// assert!( -// !tree -// .entry_for_path("tracked-dir/tracked-file2") -// .unwrap() -// .is_ignored -// ); -// assert!( -// tree.entry_for_path("tracked-dir/ancestor-ignored-file2") -// .unwrap() -// .is_ignored -// ); -// assert!( -// tree.entry_for_path("ignored-dir/ignored-file2") -// .unwrap() -// .is_ignored -// ); -// assert!(tree.entry_for_path(".git").unwrap().is_ignored); -// }); -// } - -// #[gpui::test] -// async fn test_write_file(cx: &mut TestAppContext) { -// let dir = temp_tree(json!({ -// ".git": {}, -// ".gitignore": "ignored-dir\n", -// "tracked-dir": {}, -// "ignored-dir": {} -// })); - -// let tree = Worktree::local( -// build_client(cx), -// dir.path(), -// true, -// Arc::new(RealFs), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; -// tree.flush_fs_events(cx).await; - -// tree.update(cx, |tree, cx| { -// tree.as_local().unwrap().write_file( -// Path::new("tracked-dir/file.txt"), -// "hello".into(), -// Default::default(), -// cx, -// ) -// }) -// .await -// .unwrap(); -// tree.update(cx, |tree, cx| { -// tree.as_local().unwrap().write_file( -// Path::new("ignored-dir/file.txt"), -// "world".into(), -// Default::default(), -// cx, -// ) -// }) -// .await -// .unwrap(); - -// tree.read_with(cx, |tree, _| { -// let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap(); -// let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap(); -// assert!(!tracked.is_ignored); -// assert!(ignored.is_ignored); -// }); -// } - -// #[gpui::test(iterations = 30)] -// async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) { -// let fs = FakeFs::new(cx.background()); -// fs.insert_tree( -// "/root", -// json!({ -// "b": {}, -// "c": {}, -// "d": {}, -// }), -// ) -// .await; - -// let tree = Worktree::local( -// build_client(cx), -// "/root".as_ref(), -// true, -// fs, -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// let snapshot1 = tree.update(cx, |tree, cx| { -// let tree = tree.as_local_mut().unwrap(); -// let snapshot = Arc::new(Mutex::new(tree.snapshot())); -// let _ = tree.observe_updates(0, cx, { -// let snapshot = snapshot.clone(); -// move |update| { -// snapshot.lock().apply_remote_update(update).unwrap(); -// async { true } -// } -// }); -// snapshot -// }); - -// let entry = tree -// .update(cx, |tree, cx| { -// tree.as_local_mut() -// .unwrap() -// .create_entry("a/e".as_ref(), true, cx) -// }) -// .await -// .unwrap(); -// assert!(entry.is_dir()); - -// cx.foreground().run_until_parked(); -// tree.read_with(cx, |tree, _| { -// assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir); -// }); - -// let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot()); -// assert_eq!( -// snapshot1.lock().entries(true).collect::>(), -// snapshot2.entries(true).collect::>() -// ); -// } - -// #[gpui::test] -// async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) { -// let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); - -// let fs_fake = FakeFs::new(cx.background()); -// fs_fake -// .insert_tree( -// "/root", -// json!({ -// "a": {}, -// }), -// ) -// .await; - -// let tree_fake = Worktree::local( -// client_fake, -// "/root".as_ref(), -// true, -// fs_fake, -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// let entry = tree_fake -// .update(cx, |tree, cx| { -// tree.as_local_mut() -// .unwrap() -// .create_entry("a/b/c/d.txt".as_ref(), false, cx) -// }) -// .await -// .unwrap(); -// assert!(entry.is_file()); - -// cx.foreground().run_until_parked(); -// tree_fake.read_with(cx, |tree, _| { -// assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file()); -// assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir()); -// assert!(tree.entry_for_path("a/b/").unwrap().is_dir()); -// }); - -// let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); - -// let fs_real = Arc::new(RealFs); -// let temp_root = temp_tree(json!({ -// "a": {} -// })); - -// let tree_real = Worktree::local( -// client_real, -// temp_root.path(), -// true, -// fs_real, -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// let entry = tree_real -// .update(cx, |tree, cx| { -// tree.as_local_mut() -// .unwrap() -// .create_entry("a/b/c/d.txt".as_ref(), false, cx) -// }) -// .await -// .unwrap(); -// assert!(entry.is_file()); - -// cx.foreground().run_until_parked(); -// tree_real.read_with(cx, |tree, _| { -// assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file()); -// assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir()); -// assert!(tree.entry_for_path("a/b/").unwrap().is_dir()); -// }); - -// // Test smallest change -// let entry = tree_real -// .update(cx, |tree, cx| { -// tree.as_local_mut() -// .unwrap() -// .create_entry("a/b/c/e.txt".as_ref(), false, cx) -// }) -// .await -// .unwrap(); -// assert!(entry.is_file()); - -// cx.foreground().run_until_parked(); -// tree_real.read_with(cx, |tree, _| { -// assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file()); -// }); - -// // Test largest change -// let entry = tree_real -// .update(cx, |tree, cx| { -// tree.as_local_mut() -// .unwrap() -// .create_entry("d/e/f/g.txt".as_ref(), false, cx) -// }) -// .await -// .unwrap(); -// assert!(entry.is_file()); - -// cx.foreground().run_until_parked(); -// tree_real.read_with(cx, |tree, _| { -// assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file()); -// assert!(tree.entry_for_path("d/e/f").unwrap().is_dir()); -// assert!(tree.entry_for_path("d/e/").unwrap().is_dir()); -// assert!(tree.entry_for_path("d/").unwrap().is_dir()); -// }); -// } - -// #[gpui::test(iterations = 100)] -// async fn test_random_worktree_operations_during_initial_scan( -// cx: &mut TestAppContext, -// mut rng: StdRng, -// ) { -// let operations = env::var("OPERATIONS") -// .map(|o| o.parse().unwrap()) -// .unwrap_or(5); -// let initial_entries = env::var("INITIAL_ENTRIES") -// .map(|o| o.parse().unwrap()) -// .unwrap_or(20); - -// let root_dir = Path::new("/test"); -// let fs = FakeFs::new(cx.background()) as Arc; -// fs.as_fake().insert_tree(root_dir, json!({})).await; -// for _ in 0..initial_entries { -// randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; -// } -// log::info!("generated initial tree"); - -// let worktree = Worktree::local( -// build_client(cx), -// root_dir, -// true, -// fs.clone(), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())]; -// let updates = Arc::new(Mutex::new(Vec::new())); -// worktree.update(cx, |tree, cx| { -// check_worktree_change_events(tree, cx); - -// let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, { -// let updates = updates.clone(); -// move |update| { -// updates.lock().push(update); -// async { true } -// } -// }); -// }); - -// for _ in 0..operations { -// worktree -// .update(cx, |worktree, cx| { -// randomly_mutate_worktree(worktree, &mut rng, cx) -// }) -// .await -// .log_err(); -// worktree.read_with(cx, |tree, _| { -// tree.as_local().unwrap().snapshot().check_invariants(true) -// }); - -// if rng.gen_bool(0.6) { -// snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())); -// } -// } - -// worktree -// .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete()) -// .await; - -// cx.foreground().run_until_parked(); - -// let final_snapshot = worktree.read_with(cx, |tree, _| { -// let tree = tree.as_local().unwrap(); -// let snapshot = tree.snapshot(); -// snapshot.check_invariants(true); -// snapshot -// }); - -// for (i, snapshot) in snapshots.into_iter().enumerate().rev() { -// let mut updated_snapshot = snapshot.clone(); -// for update in updates.lock().iter() { -// if update.scan_id >= updated_snapshot.scan_id() as u64 { -// updated_snapshot -// .apply_remote_update(update.clone()) -// .unwrap(); -// } -// } - -// assert_eq!( -// updated_snapshot.entries(true).collect::>(), -// final_snapshot.entries(true).collect::>(), -// "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}", -// ); -// } -// } - -// #[gpui::test(iterations = 100)] -// async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) { -// let operations = env::var("OPERATIONS") -// .map(|o| o.parse().unwrap()) -// .unwrap_or(40); -// let initial_entries = env::var("INITIAL_ENTRIES") -// .map(|o| o.parse().unwrap()) -// .unwrap_or(20); - -// let root_dir = Path::new("/test"); -// let fs = FakeFs::new(cx.background()) as Arc; -// fs.as_fake().insert_tree(root_dir, json!({})).await; -// for _ in 0..initial_entries { -// randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; -// } -// log::info!("generated initial tree"); - -// let worktree = Worktree::local( -// build_client(cx), -// root_dir, -// true, -// fs.clone(), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// let updates = Arc::new(Mutex::new(Vec::new())); -// worktree.update(cx, |tree, cx| { -// check_worktree_change_events(tree, cx); - -// let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, { -// let updates = updates.clone(); -// move |update| { -// updates.lock().push(update); -// async { true } -// } -// }); -// }); - -// worktree -// .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete()) -// .await; - -// fs.as_fake().pause_events(); -// let mut snapshots = Vec::new(); -// let mut mutations_len = operations; -// while mutations_len > 1 { -// if rng.gen_bool(0.2) { -// worktree -// .update(cx, |worktree, cx| { -// randomly_mutate_worktree(worktree, &mut rng, cx) -// }) -// .await -// .log_err(); -// } else { -// randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; -// } - -// let buffered_event_count = fs.as_fake().buffered_event_count(); -// if buffered_event_count > 0 && rng.gen_bool(0.3) { -// let len = rng.gen_range(0..=buffered_event_count); -// log::info!("flushing {} events", len); -// fs.as_fake().flush_events(len); -// } else { -// randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await; -// mutations_len -= 1; -// } - -// cx.foreground().run_until_parked(); -// if rng.gen_bool(0.2) { -// log::info!("storing snapshot {}", snapshots.len()); -// let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()); -// snapshots.push(snapshot); -// } -// } - -// log::info!("quiescing"); -// fs.as_fake().flush_events(usize::MAX); -// cx.foreground().run_until_parked(); - -// let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()); -// snapshot.check_invariants(true); -// let expanded_paths = snapshot -// .expanded_entries() -// .map(|e| e.path.clone()) -// .collect::>(); - -// { -// let new_worktree = Worktree::local( -// build_client(cx), -// root_dir, -// true, -// fs.clone(), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); -// new_worktree -// .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete()) -// .await; -// new_worktree -// .update(cx, |tree, _| { -// tree.as_local_mut() -// .unwrap() -// .refresh_entries_for_paths(expanded_paths) -// }) -// .recv() -// .await; -// let new_snapshot = -// new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()); -// assert_eq!( -// snapshot.entries_without_ids(true), -// new_snapshot.entries_without_ids(true) -// ); -// } - -// for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() { -// for update in updates.lock().iter() { -// if update.scan_id >= prev_snapshot.scan_id() as u64 { -// prev_snapshot.apply_remote_update(update.clone()).unwrap(); -// } -// } - -// assert_eq!( -// prev_snapshot -// .entries(true) -// .map(ignore_pending_dir) -// .collect::>(), -// snapshot -// .entries(true) -// .map(ignore_pending_dir) -// .collect::>(), -// "wrong updates after snapshot {i}: {updates:#?}", -// ); -// } - -// fn ignore_pending_dir(entry: &Entry) -> Entry { -// let mut entry = entry.clone(); -// if entry.kind.is_dir() { -// entry.kind = EntryKind::Dir -// } -// entry -// } -// } - -// // The worktree's `UpdatedEntries` event can be used to follow along with -// // all changes to the worktree's snapshot. -// fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext) { -// let mut entries = tree.entries(true).cloned().collect::>(); -// cx.subscribe(&cx.handle(), move |tree, _, event, _| { -// if let Event::UpdatedEntries(changes) = event { -// for (path, _, change_type) in changes.iter() { -// let entry = tree.entry_for_path(&path).cloned(); -// let ix = match entries.binary_search_by_key(&path, |e| &e.path) { -// Ok(ix) | Err(ix) => ix, -// }; -// match change_type { -// PathChange::Added => entries.insert(ix, entry.unwrap()), -// PathChange::Removed => drop(entries.remove(ix)), -// PathChange::Updated => { -// let entry = entry.unwrap(); -// let existing_entry = entries.get_mut(ix).unwrap(); -// assert_eq!(existing_entry.path, entry.path); -// *existing_entry = entry; -// } -// PathChange::AddedOrUpdated | PathChange::Loaded => { -// let entry = entry.unwrap(); -// if entries.get(ix).map(|e| &e.path) == Some(&entry.path) { -// *entries.get_mut(ix).unwrap() = entry; -// } else { -// entries.insert(ix, entry); -// } -// } -// } -// } - -// let new_entries = tree.entries(true).cloned().collect::>(); -// assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes); -// } -// }) -// .detach(); -// } - -// fn randomly_mutate_worktree( -// worktree: &mut Worktree, -// rng: &mut impl Rng, -// cx: &mut ModelContext, -// ) -> Task> { -// log::info!("mutating worktree"); -// let worktree = worktree.as_local_mut().unwrap(); -// let snapshot = worktree.snapshot(); -// let entry = snapshot.entries(false).choose(rng).unwrap(); - -// match rng.gen_range(0_u32..100) { -// 0..=33 if entry.path.as_ref() != Path::new("") => { -// log::info!("deleting entry {:?} ({})", entry.path, entry.id.0); -// worktree.delete_entry(entry.id, cx).unwrap() -// } -// ..=66 if entry.path.as_ref() != Path::new("") => { -// let other_entry = snapshot.entries(false).choose(rng).unwrap(); -// let new_parent_path = if other_entry.is_dir() { -// other_entry.path.clone() -// } else { -// other_entry.path.parent().unwrap().into() -// }; -// let mut new_path = new_parent_path.join(random_filename(rng)); -// if new_path.starts_with(&entry.path) { -// new_path = random_filename(rng).into(); -// } - -// log::info!( -// "renaming entry {:?} ({}) to {:?}", -// entry.path, -// entry.id.0, -// new_path -// ); -// let task = worktree.rename_entry(entry.id, new_path, cx).unwrap(); -// cx.foreground().spawn(async move { -// task.await?; -// Ok(()) -// }) -// } -// _ => { -// let task = if entry.is_dir() { -// let child_path = entry.path.join(random_filename(rng)); -// let is_dir = rng.gen_bool(0.3); -// log::info!( -// "creating {} at {:?}", -// if is_dir { "dir" } else { "file" }, -// child_path, -// ); -// worktree.create_entry(child_path, is_dir, cx) -// } else { -// log::info!("overwriting file {:?} ({})", entry.path, entry.id.0); -// worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx) -// }; -// cx.foreground().spawn(async move { -// task.await?; -// Ok(()) -// }) -// } -// } -// } - -// async fn randomly_mutate_fs( -// fs: &Arc, -// root_path: &Path, -// insertion_probability: f64, -// rng: &mut impl Rng, -// ) { -// log::info!("mutating fs"); -// let mut files = Vec::new(); -// let mut dirs = Vec::new(); -// for path in fs.as_fake().paths(false) { -// if path.starts_with(root_path) { -// if fs.is_file(&path).await { -// files.push(path); -// } else { -// dirs.push(path); -// } -// } -// } - -// if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) { -// let path = dirs.choose(rng).unwrap(); -// let new_path = path.join(random_filename(rng)); - -// if rng.gen() { -// log::info!( -// "creating dir {:?}", -// new_path.strip_prefix(root_path).unwrap() -// ); -// fs.create_dir(&new_path).await.unwrap(); -// } else { -// log::info!( -// "creating file {:?}", -// new_path.strip_prefix(root_path).unwrap() -// ); -// fs.create_file(&new_path, Default::default()).await.unwrap(); -// } -// } else if rng.gen_bool(0.05) { -// let ignore_dir_path = dirs.choose(rng).unwrap(); -// let ignore_path = ignore_dir_path.join(&*GITIGNORE); - -// let subdirs = dirs -// .iter() -// .filter(|d| d.starts_with(&ignore_dir_path)) -// .cloned() -// .collect::>(); -// let subfiles = files -// .iter() -// .filter(|d| d.starts_with(&ignore_dir_path)) -// .cloned() -// .collect::>(); -// let files_to_ignore = { -// let len = rng.gen_range(0..=subfiles.len()); -// subfiles.choose_multiple(rng, len) -// }; -// let dirs_to_ignore = { -// let len = rng.gen_range(0..subdirs.len()); -// subdirs.choose_multiple(rng, len) -// }; - -// let mut ignore_contents = String::new(); -// for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) { -// writeln!( -// ignore_contents, -// "{}", -// path_to_ignore -// .strip_prefix(&ignore_dir_path) -// .unwrap() -// .to_str() -// .unwrap() -// ) -// .unwrap(); -// } -// log::info!( -// "creating gitignore {:?} with contents:\n{}", -// ignore_path.strip_prefix(&root_path).unwrap(), -// ignore_contents -// ); -// fs.save( -// &ignore_path, -// &ignore_contents.as_str().into(), -// Default::default(), -// ) -// .await -// .unwrap(); -// } else { -// let old_path = { -// let file_path = files.choose(rng); -// let dir_path = dirs[1..].choose(rng); -// file_path.into_iter().chain(dir_path).choose(rng).unwrap() -// }; - -// let is_rename = rng.gen(); -// if is_rename { -// let new_path_parent = dirs -// .iter() -// .filter(|d| !d.starts_with(old_path)) -// .choose(rng) -// .unwrap(); - -// let overwrite_existing_dir = -// !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3); -// let new_path = if overwrite_existing_dir { -// fs.remove_dir( -// &new_path_parent, -// RemoveOptions { -// recursive: true, -// ignore_if_not_exists: true, -// }, -// ) -// .await -// .unwrap(); -// new_path_parent.to_path_buf() -// } else { -// new_path_parent.join(random_filename(rng)) -// }; - -// log::info!( -// "renaming {:?} to {}{:?}", -// old_path.strip_prefix(&root_path).unwrap(), -// if overwrite_existing_dir { -// "overwrite " -// } else { -// "" -// }, -// new_path.strip_prefix(&root_path).unwrap() -// ); -// fs.rename( -// &old_path, -// &new_path, -// fs::RenameOptions { -// overwrite: true, -// ignore_if_exists: true, -// }, -// ) -// .await -// .unwrap(); -// } else if fs.is_file(&old_path).await { -// log::info!( -// "deleting file {:?}", -// old_path.strip_prefix(&root_path).unwrap() -// ); -// fs.remove_file(old_path, Default::default()).await.unwrap(); -// } else { -// log::info!( -// "deleting dir {:?}", -// old_path.strip_prefix(&root_path).unwrap() -// ); -// fs.remove_dir( -// &old_path, -// RemoveOptions { -// recursive: true, -// ignore_if_not_exists: true, -// }, -// ) -// .await -// .unwrap(); -// } -// } -// } - -// fn random_filename(rng: &mut impl Rng) -> String { -// (0..6) -// .map(|_| rng.sample(rand::distributions::Alphanumeric)) -// .map(char::from) -// .collect() -// } - -// #[gpui::test] -// async fn test_rename_work_directory(cx: &mut TestAppContext) { -// let root = temp_tree(json!({ -// "projects": { -// "project1": { -// "a": "", -// "b": "", -// } -// }, - -// })); -// let root_path = root.path(); - -// let tree = Worktree::local( -// build_client(cx), -// root_path, -// true, -// Arc::new(RealFs), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// let repo = git_init(&root_path.join("projects/project1")); -// git_add("a", &repo); -// git_commit("init", &repo); -// std::fs::write(root_path.join("projects/project1/a"), "aa").ok(); - -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; - -// tree.flush_fs_events(cx).await; - -// cx.read(|cx| { -// let tree = tree.read(cx); -// let (work_dir, _) = tree.repositories().next().unwrap(); -// assert_eq!(work_dir.as_ref(), Path::new("projects/project1")); -// assert_eq!( -// tree.status_for_file(Path::new("projects/project1/a")), -// Some(GitFileStatus::Modified) -// ); -// assert_eq!( -// tree.status_for_file(Path::new("projects/project1/b")), -// Some(GitFileStatus::Added) -// ); -// }); - -// std::fs::rename( -// root_path.join("projects/project1"), -// root_path.join("projects/project2"), -// ) -// .ok(); -// tree.flush_fs_events(cx).await; - -// cx.read(|cx| { -// let tree = tree.read(cx); -// let (work_dir, _) = tree.repositories().next().unwrap(); -// assert_eq!(work_dir.as_ref(), Path::new("projects/project2")); -// assert_eq!( -// tree.status_for_file(Path::new("projects/project2/a")), -// Some(GitFileStatus::Modified) -// ); -// assert_eq!( -// tree.status_for_file(Path::new("projects/project2/b")), -// Some(GitFileStatus::Added) -// ); -// }); -// } - -// #[gpui::test] -// async fn test_git_repository_for_path(cx: &mut TestAppContext) { -// let root = temp_tree(json!({ -// "c.txt": "", -// "dir1": { -// ".git": {}, -// "deps": { -// "dep1": { -// ".git": {}, -// "src": { -// "a.txt": "" -// } -// } -// }, -// "src": { -// "b.txt": "" -// } -// }, -// })); - -// let tree = Worktree::local( -// build_client(cx), -// root.path(), -// true, -// Arc::new(RealFs), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; -// tree.flush_fs_events(cx).await; - -// tree.read_with(cx, |tree, _cx| { -// let tree = tree.as_local().unwrap(); - -// assert!(tree.repository_for_path("c.txt".as_ref()).is_none()); - -// let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap(); -// assert_eq!( -// entry -// .work_directory(tree) -// .map(|directory| directory.as_ref().to_owned()), -// Some(Path::new("dir1").to_owned()) -// ); - -// let entry = tree -// .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref()) -// .unwrap(); -// assert_eq!( -// entry -// .work_directory(tree) -// .map(|directory| directory.as_ref().to_owned()), -// Some(Path::new("dir1/deps/dep1").to_owned()) -// ); - -// let entries = tree.files(false, 0); - -// let paths_with_repos = tree -// .entries_with_repositories(entries) -// .map(|(entry, repo)| { -// ( -// entry.path.as_ref(), -// repo.and_then(|repo| { -// repo.work_directory(&tree) -// .map(|work_directory| work_directory.0.to_path_buf()) -// }), -// ) -// }) -// .collect::>(); - -// assert_eq!( -// paths_with_repos, -// &[ -// (Path::new("c.txt"), None), -// ( -// Path::new("dir1/deps/dep1/src/a.txt"), -// Some(Path::new("dir1/deps/dep1").into()) -// ), -// (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())), -// ] -// ); -// }); - -// let repo_update_events = Arc::new(Mutex::new(vec![])); -// tree.update(cx, |_, cx| { -// let repo_update_events = repo_update_events.clone(); -// cx.subscribe(&tree, move |_, _, event, _| { -// if let Event::UpdatedGitRepositories(update) = event { -// repo_update_events.lock().push(update.clone()); -// } -// }) -// .detach(); -// }); - -// std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap(); -// tree.flush_fs_events(cx).await; - -// assert_eq!( -// repo_update_events.lock()[0] -// .iter() -// .map(|e| e.0.clone()) -// .collect::>>(), -// vec![Path::new("dir1").into()] -// ); - -// std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap(); -// tree.flush_fs_events(cx).await; - -// tree.read_with(cx, |tree, _cx| { -// let tree = tree.as_local().unwrap(); - -// assert!(tree -// .repository_for_path("dir1/src/b.txt".as_ref()) -// .is_none()); -// }); -// } - -// #[gpui::test] -// async fn test_git_status(deterministic: Arc, cx: &mut TestAppContext) { -// const IGNORE_RULE: &'static str = "**/target"; - -// let root = temp_tree(json!({ -// "project": { -// "a.txt": "a", -// "b.txt": "bb", -// "c": { -// "d": { -// "e.txt": "eee" -// } -// }, -// "f.txt": "ffff", -// "target": { -// "build_file": "???" -// }, -// ".gitignore": IGNORE_RULE -// }, - -// })); - -// const A_TXT: &'static str = "a.txt"; -// const B_TXT: &'static str = "b.txt"; -// const E_TXT: &'static str = "c/d/e.txt"; -// const F_TXT: &'static str = "f.txt"; -// const DOTGITIGNORE: &'static str = ".gitignore"; -// const BUILD_FILE: &'static str = "target/build_file"; -// let project_path = Path::new("project"); - -// // Set up git repository before creating the worktree. -// let work_dir = root.path().join("project"); -// let mut repo = git_init(work_dir.as_path()); -// repo.add_ignore_rule(IGNORE_RULE).unwrap(); -// git_add(A_TXT, &repo); -// git_add(E_TXT, &repo); -// git_add(DOTGITIGNORE, &repo); -// git_commit("Initial commit", &repo); - -// let tree = Worktree::local( -// build_client(cx), -// root.path(), -// true, -// Arc::new(RealFs), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// tree.flush_fs_events(cx).await; -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; -// deterministic.run_until_parked(); - -// // Check that the right git state is observed on startup -// tree.read_with(cx, |tree, _cx| { -// let snapshot = tree.snapshot(); -// assert_eq!(snapshot.repositories().count(), 1); -// let (dir, _) = snapshot.repositories().next().unwrap(); -// assert_eq!(dir.as_ref(), Path::new("project")); - -// assert_eq!( -// snapshot.status_for_file(project_path.join(B_TXT)), -// Some(GitFileStatus::Added) -// ); -// assert_eq!( -// snapshot.status_for_file(project_path.join(F_TXT)), -// Some(GitFileStatus::Added) -// ); -// }); - -// // Modify a file in the working copy. -// std::fs::write(work_dir.join(A_TXT), "aa").unwrap(); -// tree.flush_fs_events(cx).await; -// deterministic.run_until_parked(); - -// // The worktree detects that the file's git status has changed. -// tree.read_with(cx, |tree, _cx| { -// let snapshot = tree.snapshot(); -// assert_eq!( -// snapshot.status_for_file(project_path.join(A_TXT)), -// Some(GitFileStatus::Modified) -// ); -// }); - -// // Create a commit in the git repository. -// git_add(A_TXT, &repo); -// git_add(B_TXT, &repo); -// git_commit("Committing modified and added", &repo); -// tree.flush_fs_events(cx).await; -// deterministic.run_until_parked(); - -// // The worktree detects that the files' git status have changed. -// tree.read_with(cx, |tree, _cx| { -// let snapshot = tree.snapshot(); -// assert_eq!( -// snapshot.status_for_file(project_path.join(F_TXT)), -// Some(GitFileStatus::Added) -// ); -// assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None); -// assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None); -// }); - -// // Modify files in the working copy and perform git operations on other files. -// git_reset(0, &repo); -// git_remove_index(Path::new(B_TXT), &repo); -// git_stash(&mut repo); -// std::fs::write(work_dir.join(E_TXT), "eeee").unwrap(); -// std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap(); -// tree.flush_fs_events(cx).await; -// deterministic.run_until_parked(); - -// // Check that more complex repo changes are tracked -// tree.read_with(cx, |tree, _cx| { -// let snapshot = tree.snapshot(); - -// assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None); -// assert_eq!( -// snapshot.status_for_file(project_path.join(B_TXT)), -// Some(GitFileStatus::Added) -// ); -// assert_eq!( -// snapshot.status_for_file(project_path.join(E_TXT)), -// Some(GitFileStatus::Modified) -// ); -// }); - -// std::fs::remove_file(work_dir.join(B_TXT)).unwrap(); -// std::fs::remove_dir_all(work_dir.join("c")).unwrap(); -// std::fs::write( -// work_dir.join(DOTGITIGNORE), -// [IGNORE_RULE, "f.txt"].join("\n"), -// ) -// .unwrap(); - -// git_add(Path::new(DOTGITIGNORE), &repo); -// git_commit("Committing modified git ignore", &repo); - -// tree.flush_fs_events(cx).await; -// deterministic.run_until_parked(); - -// let mut renamed_dir_name = "first_directory/second_directory"; -// const RENAMED_FILE: &'static str = "rf.txt"; - -// std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap(); -// std::fs::write( -// work_dir.join(renamed_dir_name).join(RENAMED_FILE), -// "new-contents", -// ) -// .unwrap(); - -// tree.flush_fs_events(cx).await; -// deterministic.run_until_parked(); - -// tree.read_with(cx, |tree, _cx| { -// let snapshot = tree.snapshot(); -// assert_eq!( -// snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)), -// Some(GitFileStatus::Added) -// ); -// }); - -// renamed_dir_name = "new_first_directory/second_directory"; - -// std::fs::rename( -// work_dir.join("first_directory"), -// work_dir.join("new_first_directory"), -// ) -// .unwrap(); - -// tree.flush_fs_events(cx).await; -// deterministic.run_until_parked(); - -// tree.read_with(cx, |tree, _cx| { -// let snapshot = tree.snapshot(); - -// assert_eq!( -// snapshot.status_for_file( -// project_path -// .join(Path::new(renamed_dir_name)) -// .join(RENAMED_FILE) -// ), -// Some(GitFileStatus::Added) -// ); -// }); -// } - -// #[gpui::test] -// async fn test_propagate_git_statuses(cx: &mut TestAppContext) { -// let fs = FakeFs::new(cx.background()); -// fs.insert_tree( -// "/root", -// json!({ -// ".git": {}, -// "a": { -// "b": { -// "c1.txt": "", -// "c2.txt": "", -// }, -// "d": { -// "e1.txt": "", -// "e2.txt": "", -// "e3.txt": "", -// } -// }, -// "f": { -// "no-status.txt": "" -// }, -// "g": { -// "h1.txt": "", -// "h2.txt": "" -// }, - -// }), -// ) -// .await; - -// fs.set_status_for_repo_via_git_operation( -// &Path::new("/root/.git"), -// &[ -// (Path::new("a/b/c1.txt"), GitFileStatus::Added), -// (Path::new("a/d/e2.txt"), GitFileStatus::Modified), -// (Path::new("g/h2.txt"), GitFileStatus::Conflict), -// ], -// ); - -// let tree = Worktree::local( -// build_client(cx), -// Path::new("/root"), -// true, -// fs.clone(), -// Default::default(), -// &mut cx.to_async(), -// ) -// .await -// .unwrap(); - -// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) -// .await; - -// cx.foreground().run_until_parked(); -// let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); - -// check_propagated_statuses( -// &snapshot, -// &[ -// (Path::new(""), Some(GitFileStatus::Conflict)), -// (Path::new("a"), Some(GitFileStatus::Modified)), -// (Path::new("a/b"), Some(GitFileStatus::Added)), -// (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)), -// (Path::new("a/b/c2.txt"), None), -// (Path::new("a/d"), Some(GitFileStatus::Modified)), -// (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)), -// (Path::new("f"), None), -// (Path::new("f/no-status.txt"), None), -// (Path::new("g"), Some(GitFileStatus::Conflict)), -// (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)), -// ], -// ); - -// check_propagated_statuses( -// &snapshot, -// &[ -// (Path::new("a/b"), Some(GitFileStatus::Added)), -// (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)), -// (Path::new("a/b/c2.txt"), None), -// (Path::new("a/d"), Some(GitFileStatus::Modified)), -// (Path::new("a/d/e1.txt"), None), -// (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)), -// (Path::new("f"), None), -// (Path::new("f/no-status.txt"), None), -// (Path::new("g"), Some(GitFileStatus::Conflict)), -// ], -// ); - -// check_propagated_statuses( -// &snapshot, -// &[ -// (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)), -// (Path::new("a/b/c2.txt"), None), -// (Path::new("a/d/e1.txt"), None), -// (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)), -// (Path::new("f/no-status.txt"), None), -// ], -// ); - -// #[track_caller] -// fn check_propagated_statuses( -// snapshot: &Snapshot, -// expected_statuses: &[(&Path, Option)], -// ) { -// let mut entries = expected_statuses -// .iter() -// .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone()) -// .collect::>(); -// snapshot.propagate_git_statuses(&mut entries); -// assert_eq!( -// entries -// .iter() -// .map(|e| (e.path.as_ref(), e.git_status)) -// .collect::>(), -// expected_statuses -// ); -// } -// } - -// fn build_client(cx: &mut TestAppContext) -> Arc { -// let http_client = FakeHttpClient::with_404_response(); -// cx.read(|cx| Client::new(http_client, cx)) -// } - -// #[track_caller] -// fn git_init(path: &Path) -> git2::Repository { -// git2::Repository::init(path).expect("Failed to initialize git repository") -// } - -// #[track_caller] -// fn git_add>(path: P, repo: &git2::Repository) { -// let path = path.as_ref(); -// let mut index = repo.index().expect("Failed to get index"); -// index.add_path(path).expect("Failed to add a.txt"); -// index.write().expect("Failed to write index"); -// } - -// #[track_caller] -// fn git_remove_index(path: &Path, repo: &git2::Repository) { -// let mut index = repo.index().expect("Failed to get index"); -// index.remove_path(path).expect("Failed to add a.txt"); -// index.write().expect("Failed to write index"); -// } - -// #[track_caller] -// fn git_commit(msg: &'static str, repo: &git2::Repository) { -// use git2::Signature; - -// let signature = Signature::now("test", "test@zed.dev").unwrap(); -// let oid = repo.index().unwrap().write_tree().unwrap(); -// let tree = repo.find_tree(oid).unwrap(); -// if let Some(head) = repo.head().ok() { -// let parent_obj = head.peel(git2::ObjectType::Commit).unwrap(); - -// let parent_commit = parent_obj.as_commit().unwrap(); - -// repo.commit( -// Some("HEAD"), -// &signature, -// &signature, -// msg, -// &tree, -// &[parent_commit], -// ) -// .expect("Failed to commit with parent"); -// } else { -// repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[]) -// .expect("Failed to commit"); -// } -// } - -// #[track_caller] -// fn git_stash(repo: &mut git2::Repository) { -// use git2::Signature; - -// let signature = Signature::now("test", "test@zed.dev").unwrap(); -// repo.stash_save(&signature, "N/A", None) -// .expect("Failed to stash"); -// } - -// #[track_caller] -// fn git_reset(offset: usize, repo: &git2::Repository) { -// let head = repo.head().expect("Couldn't get repo head"); -// let object = head.peel(git2::ObjectType::Commit).unwrap(); -// let commit = object.as_commit().unwrap(); -// let new_head = commit -// .parents() -// .inspect(|parnet| { -// parnet.message(); -// }) -// .skip(offset) -// .next() -// .expect("Not enough history"); -// repo.reset(&new_head.as_object(), git2::ResetType::Soft, None) -// .expect("Could not reset"); -// } - -// #[allow(dead_code)] -// #[track_caller] -// fn git_status(repo: &git2::Repository) -> collections::HashMap { -// repo.statuses(None) -// .unwrap() -// .iter() -// .map(|status| (status.path().unwrap().to_string(), status.status())) -// .collect() -// } +use crate::{ + project_settings::ProjectSettings, + worktree::{Event, Snapshot, WorktreeModelHandle}, + Entry, EntryKind, PathChange, Project, Worktree, +}; +use anyhow::Result; +use client::Client; +use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions}; +use git::GITIGNORE; +use gpui::{ModelContext, Task, TestAppContext}; +use parking_lot::Mutex; +use postage::stream::Stream; +use pretty_assertions::assert_eq; +use rand::prelude::*; +use serde_json::json; +use settings::SettingsStore; +use std::{ + env, + fmt::Write, + mem, + path::{Path, PathBuf}, + sync::Arc, +}; +use util::{http::FakeHttpClient, test::temp_tree, ResultExt}; + +#[gpui::test] +async fn test_traversal(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + ".gitignore": "a/b\n", + "a": { + "b": "", + "c": "", + } + }), + ) + .await; + + let tree = Worktree::local( + build_client(cx), + Path::new("/root"), + true, + fs, + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.entries(false) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![ + Path::new(""), + Path::new(".gitignore"), + Path::new("a"), + Path::new("a/c"), + ] + ); + assert_eq!( + tree.entries(true) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![ + Path::new(""), + Path::new(".gitignore"), + Path::new("a"), + Path::new("a/b"), + Path::new("a/c"), + ] + ); + }) +} + +#[gpui::test] +async fn test_descendent_entries(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "a": "", + "b": { + "c": { + "d": "" + }, + "e": {} + }, + "f": "", + "g": { + "h": {} + }, + "i": { + "j": { + "k": "" + }, + "l": { + + } + }, + ".gitignore": "i/j\n", + }), + ) + .await; + + let tree = Worktree::local( + build_client(cx), + Path::new("/root"), + true, + fs, + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.descendent_entries(false, false, Path::new("b")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![Path::new("b/c/d"),] + ); + assert_eq!( + tree.descendent_entries(true, false, Path::new("b")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![ + Path::new("b"), + Path::new("b/c"), + Path::new("b/c/d"), + Path::new("b/e"), + ] + ); + + assert_eq!( + tree.descendent_entries(false, false, Path::new("g")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + Vec::::new() + ); + assert_eq!( + tree.descendent_entries(true, false, Path::new("g")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![Path::new("g"), Path::new("g/h"),] + ); + }); + + // Expand gitignored directory. + tree.read_with(cx, |tree, _| { + tree.as_local() + .unwrap() + .refresh_entries_for_paths(vec![Path::new("i/j").into()]) + }) + .recv() + .await; + + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.descendent_entries(false, false, Path::new("i")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + Vec::::new() + ); + assert_eq!( + tree.descendent_entries(false, true, Path::new("i")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![Path::new("i/j/k")] + ); + assert_eq!( + tree.descendent_entries(true, false, Path::new("i")) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![Path::new("i"), Path::new("i/l"),] + ); + }) +} + +#[gpui::test(iterations = 10)] +async fn test_circular_symlinks(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "lib": { + "a": { + "a.txt": "" + }, + "b": { + "b.txt": "" + } + } + }), + ) + .await; + fs.insert_symlink("/root/lib/a/lib", "..".into()).await; + fs.insert_symlink("/root/lib/b/lib", "..".into()).await; + + let tree = Worktree::local( + build_client(cx), + Path::new("/root"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.entries(false) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![ + Path::new(""), + Path::new("lib"), + Path::new("lib/a"), + Path::new("lib/a/a.txt"), + Path::new("lib/a/lib"), + Path::new("lib/b"), + Path::new("lib/b/b.txt"), + Path::new("lib/b/lib"), + ] + ); + }); + + fs.rename( + Path::new("/root/lib/a/lib"), + Path::new("/root/lib/a/lib-2"), + Default::default(), + ) + .await + .unwrap(); + cx.executor().run_until_parked(); + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.entries(false) + .map(|entry| entry.path.as_ref()) + .collect::>(), + vec![ + Path::new(""), + Path::new("lib"), + Path::new("lib/a"), + Path::new("lib/a/a.txt"), + Path::new("lib/a/lib-2"), + Path::new("lib/b"), + Path::new("lib/b/b.txt"), + Path::new("lib/b/lib"), + ] + ); + }); +} + +#[gpui::test] +async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "dir1": { + "deps": { + // symlinks here + }, + "src": { + "a.rs": "", + "b.rs": "", + }, + }, + "dir2": { + "src": { + "c.rs": "", + "d.rs": "", + } + }, + "dir3": { + "deps": {}, + "src": { + "e.rs": "", + "f.rs": "", + }, + } + }), + ) + .await; + + // These symlinks point to directories outside of the worktree's root, dir1. + fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into()) + .await; + fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into()) + .await; + + let tree = Worktree::local( + build_client(cx), + Path::new("/root/dir1"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + let tree_updates = Arc::new(Mutex::new(Vec::new())); + tree.update(cx, |_, cx| { + let tree_updates = tree_updates.clone(); + cx.subscribe(&tree, move |_, _, event, _| { + if let Event::UpdatedEntries(update) = event { + tree_updates.lock().extend( + update + .iter() + .map(|(path, _, change)| (path.clone(), *change)), + ); + } + }) + .detach(); + }); + + // The symlinked directories are not scanned by default. + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.entries(true) + .map(|entry| (entry.path.as_ref(), entry.is_external)) + .collect::>(), + vec![ + (Path::new(""), false), + (Path::new("deps"), false), + (Path::new("deps/dep-dir2"), true), + (Path::new("deps/dep-dir3"), true), + (Path::new("src"), false), + (Path::new("src/a.rs"), false), + (Path::new("src/b.rs"), false), + ] + ); + + assert_eq!( + tree.entry_for_path("deps/dep-dir2").unwrap().kind, + EntryKind::UnloadedDir + ); + }); + + // Expand one of the symlinked directories. + tree.read_with(cx, |tree, _| { + tree.as_local() + .unwrap() + .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()]) + }) + .recv() + .await; + + // The expanded directory's contents are loaded. Subdirectories are + // not scanned yet. + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.entries(true) + .map(|entry| (entry.path.as_ref(), entry.is_external)) + .collect::>(), + vec![ + (Path::new(""), false), + (Path::new("deps"), false), + (Path::new("deps/dep-dir2"), true), + (Path::new("deps/dep-dir3"), true), + (Path::new("deps/dep-dir3/deps"), true), + (Path::new("deps/dep-dir3/src"), true), + (Path::new("src"), false), + (Path::new("src/a.rs"), false), + (Path::new("src/b.rs"), false), + ] + ); + }); + assert_eq!( + mem::take(&mut *tree_updates.lock()), + &[ + (Path::new("deps/dep-dir3").into(), PathChange::Loaded), + (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded), + (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded) + ] + ); + + // Expand a subdirectory of one of the symlinked directories. + tree.read_with(cx, |tree, _| { + tree.as_local() + .unwrap() + .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()]) + }) + .recv() + .await; + + // The expanded subdirectory's contents are loaded. + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.entries(true) + .map(|entry| (entry.path.as_ref(), entry.is_external)) + .collect::>(), + vec![ + (Path::new(""), false), + (Path::new("deps"), false), + (Path::new("deps/dep-dir2"), true), + (Path::new("deps/dep-dir3"), true), + (Path::new("deps/dep-dir3/deps"), true), + (Path::new("deps/dep-dir3/src"), true), + (Path::new("deps/dep-dir3/src/e.rs"), true), + (Path::new("deps/dep-dir3/src/f.rs"), true), + (Path::new("src"), false), + (Path::new("src/a.rs"), false), + (Path::new("src/b.rs"), false), + ] + ); + }); + + assert_eq!( + mem::take(&mut *tree_updates.lock()), + &[ + (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded), + ( + Path::new("deps/dep-dir3/src/e.rs").into(), + PathChange::Loaded + ), + ( + Path::new("deps/dep-dir3/src/f.rs").into(), + PathChange::Loaded + ) + ] + ); +} + +#[gpui::test] +async fn test_open_gitignored_files(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + ".gitignore": "node_modules\n", + "one": { + "node_modules": { + "a": { + "a1.js": "a1", + "a2.js": "a2", + }, + "b": { + "b1.js": "b1", + "b2.js": "b2", + }, + "c": { + "c1.js": "c1", + "c2.js": "c2", + } + }, + }, + "two": { + "x.js": "", + "y.js": "", + }, + }), + ) + .await; + + let tree = Worktree::local( + build_client(cx), + Path::new("/root"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.entries(true) + .map(|entry| (entry.path.as_ref(), entry.is_ignored)) + .collect::>(), + vec![ + (Path::new(""), false), + (Path::new(".gitignore"), false), + (Path::new("one"), false), + (Path::new("one/node_modules"), true), + (Path::new("two"), false), + (Path::new("two/x.js"), false), + (Path::new("two/y.js"), false), + ] + ); + }); + + // Open a file that is nested inside of a gitignored directory that + // has not yet been expanded. + let prev_read_dir_count = fs.read_dir_call_count(); + let buffer = tree + .update(cx, |tree, cx| { + tree.as_local_mut() + .unwrap() + .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx) + }) + .await + .unwrap(); + + tree.read_with(cx, |tree, cx| { + assert_eq!( + tree.entries(true) + .map(|entry| (entry.path.as_ref(), entry.is_ignored)) + .collect::>(), + vec![ + (Path::new(""), false), + (Path::new(".gitignore"), false), + (Path::new("one"), false), + (Path::new("one/node_modules"), true), + (Path::new("one/node_modules/a"), true), + (Path::new("one/node_modules/b"), true), + (Path::new("one/node_modules/b/b1.js"), true), + (Path::new("one/node_modules/b/b2.js"), true), + (Path::new("one/node_modules/c"), true), + (Path::new("two"), false), + (Path::new("two/x.js"), false), + (Path::new("two/y.js"), false), + ] + ); + + assert_eq!( + buffer.read(cx).file().unwrap().path().as_ref(), + Path::new("one/node_modules/b/b1.js") + ); + + // Only the newly-expanded directories are scanned. + assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2); + }); + + // Open another file in a different subdirectory of the same + // gitignored directory. + let prev_read_dir_count = fs.read_dir_call_count(); + let buffer = tree + .update(cx, |tree, cx| { + tree.as_local_mut() + .unwrap() + .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx) + }) + .await + .unwrap(); + + tree.read_with(cx, |tree, cx| { + assert_eq!( + tree.entries(true) + .map(|entry| (entry.path.as_ref(), entry.is_ignored)) + .collect::>(), + vec![ + (Path::new(""), false), + (Path::new(".gitignore"), false), + (Path::new("one"), false), + (Path::new("one/node_modules"), true), + (Path::new("one/node_modules/a"), true), + (Path::new("one/node_modules/a/a1.js"), true), + (Path::new("one/node_modules/a/a2.js"), true), + (Path::new("one/node_modules/b"), true), + (Path::new("one/node_modules/b/b1.js"), true), + (Path::new("one/node_modules/b/b2.js"), true), + (Path::new("one/node_modules/c"), true), + (Path::new("two"), false), + (Path::new("two/x.js"), false), + (Path::new("two/y.js"), false), + ] + ); + + assert_eq!( + buffer.read(cx).file().unwrap().path().as_ref(), + Path::new("one/node_modules/a/a2.js") + ); + + // Only the newly-expanded directory is scanned. + assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1); + }); + + // No work happens when files and directories change within an unloaded directory. + let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count(); + fs.create_dir("/root/one/node_modules/c/lib".as_ref()) + .await + .unwrap(); + cx.executor().run_until_parked(); + assert_eq!( + fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count, + 0 + ); +} + +#[gpui::test] +async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + ".gitignore": "node_modules\n", + "a": { + "a.js": "", + }, + "b": { + "b.js": "", + }, + "node_modules": { + "c": { + "c.js": "", + }, + "d": { + "d.js": "", + "e": { + "e1.js": "", + "e2.js": "", + }, + "f": { + "f1.js": "", + "f2.js": "", + } + }, + }, + }), + ) + .await; + + let tree = Worktree::local( + build_client(cx), + Path::new("/root"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + // Open a file within the gitignored directory, forcing some of its + // subdirectories to be read, but not all. + let read_dir_count_1 = fs.read_dir_call_count(); + tree.read_with(cx, |tree, _| { + tree.as_local() + .unwrap() + .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()]) + }) + .recv() + .await; + + // Those subdirectories are now loaded. + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.entries(true) + .map(|e| (e.path.as_ref(), e.is_ignored)) + .collect::>(), + &[ + (Path::new(""), false), + (Path::new(".gitignore"), false), + (Path::new("a"), false), + (Path::new("a/a.js"), false), + (Path::new("b"), false), + (Path::new("b/b.js"), false), + (Path::new("node_modules"), true), + (Path::new("node_modules/c"), true), + (Path::new("node_modules/d"), true), + (Path::new("node_modules/d/d.js"), true), + (Path::new("node_modules/d/e"), true), + (Path::new("node_modules/d/f"), true), + ] + ); + }); + let read_dir_count_2 = fs.read_dir_call_count(); + assert_eq!(read_dir_count_2 - read_dir_count_1, 2); + + // Update the gitignore so that node_modules is no longer ignored, + // but a subdirectory is ignored + fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default()) + .await + .unwrap(); + cx.executor().run_until_parked(); + + // All of the directories that are no longer ignored are now loaded. + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.entries(true) + .map(|e| (e.path.as_ref(), e.is_ignored)) + .collect::>(), + &[ + (Path::new(""), false), + (Path::new(".gitignore"), false), + (Path::new("a"), false), + (Path::new("a/a.js"), false), + (Path::new("b"), false), + (Path::new("b/b.js"), false), + // This directory is no longer ignored + (Path::new("node_modules"), false), + (Path::new("node_modules/c"), false), + (Path::new("node_modules/c/c.js"), false), + (Path::new("node_modules/d"), false), + (Path::new("node_modules/d/d.js"), false), + // This subdirectory is now ignored + (Path::new("node_modules/d/e"), true), + (Path::new("node_modules/d/f"), false), + (Path::new("node_modules/d/f/f1.js"), false), + (Path::new("node_modules/d/f/f2.js"), false), + ] + ); + }); + + // Each of the newly-loaded directories is scanned only once. + let read_dir_count_3 = fs.read_dir_call_count(); + assert_eq!(read_dir_count_3 - read_dir_count_2, 2); +} + +#[gpui::test(iterations = 10)] +async fn test_rescan_with_gitignore(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = Some(Vec::new()); + }); + }); + }); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n", + "tree": { + ".git": {}, + ".gitignore": "ignored-dir\n", + "tracked-dir": { + "tracked-file1": "", + "ancestor-ignored-file1": "", + }, + "ignored-dir": { + "ignored-file1": "" + } + } + }), + ) + .await; + + let tree = Worktree::local( + build_client(cx), + "/root/tree".as_ref(), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + tree.read_with(cx, |tree, _| { + tree.as_local() + .unwrap() + .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()]) + }) + .recv() + .await; + + cx.read(|cx| { + let tree = tree.read(cx); + assert!( + !tree + .entry_for_path("tracked-dir/tracked-file1") + .unwrap() + .is_ignored + ); + assert!( + tree.entry_for_path("tracked-dir/ancestor-ignored-file1") + .unwrap() + .is_ignored + ); + assert!( + tree.entry_for_path("ignored-dir/ignored-file1") + .unwrap() + .is_ignored + ); + }); + + fs.create_file( + "/root/tree/tracked-dir/tracked-file2".as_ref(), + Default::default(), + ) + .await + .unwrap(); + fs.create_file( + "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(), + Default::default(), + ) + .await + .unwrap(); + fs.create_file( + "/root/tree/ignored-dir/ignored-file2".as_ref(), + Default::default(), + ) + .await + .unwrap(); + + cx.executor().run_until_parked(); + cx.read(|cx| { + let tree = tree.read(cx); + assert!( + !tree + .entry_for_path("tracked-dir/tracked-file2") + .unwrap() + .is_ignored + ); + assert!( + tree.entry_for_path("tracked-dir/ancestor-ignored-file2") + .unwrap() + .is_ignored + ); + assert!( + tree.entry_for_path("ignored-dir/ignored-file2") + .unwrap() + .is_ignored + ); + assert!(tree.entry_for_path(".git").unwrap().is_ignored); + }); +} + +#[gpui::test] +async fn test_write_file(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + let dir = temp_tree(json!({ + ".git": {}, + ".gitignore": "ignored-dir\n", + "tracked-dir": {}, + "ignored-dir": {} + })); + + let tree = Worktree::local( + build_client(cx), + dir.path(), + true, + Arc::new(RealFs), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + tree.flush_fs_events(cx).await; + + tree.update(cx, |tree, cx| { + tree.as_local().unwrap().write_file( + Path::new("tracked-dir/file.txt"), + "hello".into(), + Default::default(), + cx, + ) + }) + .await + .unwrap(); + tree.update(cx, |tree, cx| { + tree.as_local().unwrap().write_file( + Path::new("ignored-dir/file.txt"), + "world".into(), + Default::default(), + cx, + ) + }) + .await + .unwrap(); + + tree.read_with(cx, |tree, _| { + let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap(); + let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap(); + assert!(!tracked.is_ignored); + assert!(ignored.is_ignored); + }); +} + +#[gpui::test] +async fn test_file_scan_exclusions(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + let dir = temp_tree(json!({ + ".gitignore": "**/target\n/node_modules\n", + "target": { + "index": "blah2" + }, + "node_modules": { + ".DS_Store": "", + "prettier": { + "package.json": "{}", + }, + }, + "src": { + ".DS_Store": "", + "foo": { + "foo.rs": "mod another;\n", + "another.rs": "// another", + }, + "bar": { + "bar.rs": "// bar", + }, + "lib.rs": "mod foo;\nmod bar;\n", + }, + ".DS_Store": "", + })); + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = + Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]); + }); + }); + }); + + let tree = Worktree::local( + build_client(cx), + dir.path(), + true, + Arc::new(RealFs), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + tree.flush_fs_events(cx).await; + tree.read_with(cx, |tree, _| { + check_worktree_entries( + tree, + &[ + "src/foo/foo.rs", + "src/foo/another.rs", + "node_modules/.DS_Store", + "src/.DS_Store", + ".DS_Store", + ], + &["target", "node_modules"], + &["src/lib.rs", "src/bar/bar.rs", ".gitignore"], + ) + }); + + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = + Some(vec!["**/node_modules/**".to_string()]); + }); + }); + }); + tree.flush_fs_events(cx).await; + cx.executor().run_until_parked(); + tree.read_with(cx, |tree, _| { + check_worktree_entries( + tree, + &[ + "node_modules/prettier/package.json", + "node_modules/.DS_Store", + "node_modules", + ], + &["target"], + &[ + ".gitignore", + "src/lib.rs", + "src/bar/bar.rs", + "src/foo/foo.rs", + "src/foo/another.rs", + "src/.DS_Store", + ".DS_Store", + ], + ) + }); +} + +#[gpui::test(iterations = 30)] +async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + "b": {}, + "c": {}, + "d": {}, + }), + ) + .await; + + let tree = Worktree::local( + build_client(cx), + "/root".as_ref(), + true, + fs, + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + let snapshot1 = tree.update(cx, |tree, cx| { + let tree = tree.as_local_mut().unwrap(); + let snapshot = Arc::new(Mutex::new(tree.snapshot())); + let _ = tree.observe_updates(0, cx, { + let snapshot = snapshot.clone(); + move |update| { + snapshot.lock().apply_remote_update(update).unwrap(); + async { true } + } + }); + snapshot + }); + + let entry = tree + .update(cx, |tree, cx| { + tree.as_local_mut() + .unwrap() + .create_entry("a/e".as_ref(), true, cx) + }) + .await + .unwrap(); + assert!(entry.is_dir()); + + cx.executor().run_until_parked(); + tree.read_with(cx, |tree, _| { + assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir); + }); + + let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot()); + assert_eq!( + snapshot1.lock().entries(true).collect::>(), + snapshot2.entries(true).collect::>() + ); +} + +#[gpui::test] +async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); + + let fs_fake = FakeFs::new(cx.background_executor.clone()); + fs_fake + .insert_tree( + "/root", + json!({ + "a": {}, + }), + ) + .await; + + let tree_fake = Worktree::local( + client_fake, + "/root".as_ref(), + true, + fs_fake, + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + let entry = tree_fake + .update(cx, |tree, cx| { + tree.as_local_mut() + .unwrap() + .create_entry("a/b/c/d.txt".as_ref(), false, cx) + }) + .await + .unwrap(); + assert!(entry.is_file()); + + cx.executor().run_until_parked(); + tree_fake.read_with(cx, |tree, _| { + assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file()); + assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir()); + assert!(tree.entry_for_path("a/b/").unwrap().is_dir()); + }); + + let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx)); + + let fs_real = Arc::new(RealFs); + let temp_root = temp_tree(json!({ + "a": {} + })); + + let tree_real = Worktree::local( + client_real, + temp_root.path(), + true, + fs_real, + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + let entry = tree_real + .update(cx, |tree, cx| { + tree.as_local_mut() + .unwrap() + .create_entry("a/b/c/d.txt".as_ref(), false, cx) + }) + .await + .unwrap(); + assert!(entry.is_file()); + + cx.executor().run_until_parked(); + tree_real.read_with(cx, |tree, _| { + assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file()); + assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir()); + assert!(tree.entry_for_path("a/b/").unwrap().is_dir()); + }); + + // Test smallest change + let entry = tree_real + .update(cx, |tree, cx| { + tree.as_local_mut() + .unwrap() + .create_entry("a/b/c/e.txt".as_ref(), false, cx) + }) + .await + .unwrap(); + assert!(entry.is_file()); + + cx.executor().run_until_parked(); + tree_real.read_with(cx, |tree, _| { + assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file()); + }); + + // Test largest change + let entry = tree_real + .update(cx, |tree, cx| { + tree.as_local_mut() + .unwrap() + .create_entry("d/e/f/g.txt".as_ref(), false, cx) + }) + .await + .unwrap(); + assert!(entry.is_file()); + + cx.executor().run_until_parked(); + tree_real.read_with(cx, |tree, _| { + assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file()); + assert!(tree.entry_for_path("d/e/f").unwrap().is_dir()); + assert!(tree.entry_for_path("d/e/").unwrap().is_dir()); + assert!(tree.entry_for_path("d/").unwrap().is_dir()); + }); +} + +#[gpui::test(iterations = 100)] +async fn test_random_worktree_operations_during_initial_scan( + cx: &mut TestAppContext, + mut rng: StdRng, +) { + init_test(cx); + let operations = env::var("OPERATIONS") + .map(|o| o.parse().unwrap()) + .unwrap_or(5); + let initial_entries = env::var("INITIAL_ENTRIES") + .map(|o| o.parse().unwrap()) + .unwrap_or(20); + + let root_dir = Path::new("/test"); + let fs = FakeFs::new(cx.background_executor.clone()) as Arc; + fs.as_fake().insert_tree(root_dir, json!({})).await; + for _ in 0..initial_entries { + randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; + } + log::info!("generated initial tree"); + + let worktree = Worktree::local( + build_client(cx), + root_dir, + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())]; + let updates = Arc::new(Mutex::new(Vec::new())); + worktree.update(cx, |tree, cx| { + check_worktree_change_events(tree, cx); + + let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, { + let updates = updates.clone(); + move |update| { + updates.lock().push(update); + async { true } + } + }); + }); + + for _ in 0..operations { + worktree + .update(cx, |worktree, cx| { + randomly_mutate_worktree(worktree, &mut rng, cx) + }) + .await + .log_err(); + worktree.read_with(cx, |tree, _| { + tree.as_local().unwrap().snapshot().check_invariants(true) + }); + + if rng.gen_bool(0.6) { + snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())); + } + } + + worktree + .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete()) + .await; + + cx.executor().run_until_parked(); + + let final_snapshot = worktree.read_with(cx, |tree, _| { + let tree = tree.as_local().unwrap(); + let snapshot = tree.snapshot(); + snapshot.check_invariants(true); + snapshot + }); + + for (i, snapshot) in snapshots.into_iter().enumerate().rev() { + let mut updated_snapshot = snapshot.clone(); + for update in updates.lock().iter() { + if update.scan_id >= updated_snapshot.scan_id() as u64 { + updated_snapshot + .apply_remote_update(update.clone()) + .unwrap(); + } + } + + assert_eq!( + updated_snapshot.entries(true).collect::>(), + final_snapshot.entries(true).collect::>(), + "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}", + ); + } +} + +#[gpui::test(iterations = 100)] +async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) { + init_test(cx); + let operations = env::var("OPERATIONS") + .map(|o| o.parse().unwrap()) + .unwrap_or(40); + let initial_entries = env::var("INITIAL_ENTRIES") + .map(|o| o.parse().unwrap()) + .unwrap_or(20); + + let root_dir = Path::new("/test"); + let fs = FakeFs::new(cx.background_executor.clone()) as Arc; + fs.as_fake().insert_tree(root_dir, json!({})).await; + for _ in 0..initial_entries { + randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; + } + log::info!("generated initial tree"); + + let worktree = Worktree::local( + build_client(cx), + root_dir, + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + let updates = Arc::new(Mutex::new(Vec::new())); + worktree.update(cx, |tree, cx| { + check_worktree_change_events(tree, cx); + + let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, { + let updates = updates.clone(); + move |update| { + updates.lock().push(update); + async { true } + } + }); + }); + + worktree + .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete()) + .await; + + fs.as_fake().pause_events(); + let mut snapshots = Vec::new(); + let mut mutations_len = operations; + while mutations_len > 1 { + if rng.gen_bool(0.2) { + worktree + .update(cx, |worktree, cx| { + randomly_mutate_worktree(worktree, &mut rng, cx) + }) + .await + .log_err(); + } else { + randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; + } + + let buffered_event_count = fs.as_fake().buffered_event_count(); + if buffered_event_count > 0 && rng.gen_bool(0.3) { + let len = rng.gen_range(0..=buffered_event_count); + log::info!("flushing {} events", len); + fs.as_fake().flush_events(len); + } else { + randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await; + mutations_len -= 1; + } + + cx.executor().run_until_parked(); + if rng.gen_bool(0.2) { + log::info!("storing snapshot {}", snapshots.len()); + let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()); + snapshots.push(snapshot); + } + } + + log::info!("quiescing"); + fs.as_fake().flush_events(usize::MAX); + cx.executor().run_until_parked(); + + let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()); + snapshot.check_invariants(true); + let expanded_paths = snapshot + .expanded_entries() + .map(|e| e.path.clone()) + .collect::>(); + + { + let new_worktree = Worktree::local( + build_client(cx), + root_dir, + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + new_worktree + .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete()) + .await; + new_worktree + .update(cx, |tree, _| { + tree.as_local_mut() + .unwrap() + .refresh_entries_for_paths(expanded_paths) + }) + .recv() + .await; + let new_snapshot = + new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()); + assert_eq!( + snapshot.entries_without_ids(true), + new_snapshot.entries_without_ids(true) + ); + } + + for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() { + for update in updates.lock().iter() { + if update.scan_id >= prev_snapshot.scan_id() as u64 { + prev_snapshot.apply_remote_update(update.clone()).unwrap(); + } + } + + assert_eq!( + prev_snapshot + .entries(true) + .map(ignore_pending_dir) + .collect::>(), + snapshot + .entries(true) + .map(ignore_pending_dir) + .collect::>(), + "wrong updates after snapshot {i}: {updates:#?}", + ); + } + + fn ignore_pending_dir(entry: &Entry) -> Entry { + let mut entry = entry.clone(); + if entry.kind.is_dir() { + entry.kind = EntryKind::Dir + } + entry + } +} + +// The worktree's `UpdatedEntries` event can be used to follow along with +// all changes to the worktree's snapshot. +fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext) { + let mut entries = tree.entries(true).cloned().collect::>(); + cx.subscribe(&cx.handle(), move |tree, _, event, _| { + if let Event::UpdatedEntries(changes) = event { + for (path, _, change_type) in changes.iter() { + let entry = tree.entry_for_path(&path).cloned(); + let ix = match entries.binary_search_by_key(&path, |e| &e.path) { + Ok(ix) | Err(ix) => ix, + }; + match change_type { + PathChange::Added => entries.insert(ix, entry.unwrap()), + PathChange::Removed => drop(entries.remove(ix)), + PathChange::Updated => { + let entry = entry.unwrap(); + let existing_entry = entries.get_mut(ix).unwrap(); + assert_eq!(existing_entry.path, entry.path); + *existing_entry = entry; + } + PathChange::AddedOrUpdated | PathChange::Loaded => { + let entry = entry.unwrap(); + if entries.get(ix).map(|e| &e.path) == Some(&entry.path) { + *entries.get_mut(ix).unwrap() = entry; + } else { + entries.insert(ix, entry); + } + } + } + } + + let new_entries = tree.entries(true).cloned().collect::>(); + assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes); + } + }) + .detach(); +} + +fn randomly_mutate_worktree( + worktree: &mut Worktree, + rng: &mut impl Rng, + cx: &mut ModelContext, +) -> Task> { + log::info!("mutating worktree"); + let worktree = worktree.as_local_mut().unwrap(); + let snapshot = worktree.snapshot(); + let entry = snapshot.entries(false).choose(rng).unwrap(); + + match rng.gen_range(0_u32..100) { + 0..=33 if entry.path.as_ref() != Path::new("") => { + log::info!("deleting entry {:?} ({})", entry.path, entry.id.0); + worktree.delete_entry(entry.id, cx).unwrap() + } + ..=66 if entry.path.as_ref() != Path::new("") => { + let other_entry = snapshot.entries(false).choose(rng).unwrap(); + let new_parent_path = if other_entry.is_dir() { + other_entry.path.clone() + } else { + other_entry.path.parent().unwrap().into() + }; + let mut new_path = new_parent_path.join(random_filename(rng)); + if new_path.starts_with(&entry.path) { + new_path = random_filename(rng).into(); + } + + log::info!( + "renaming entry {:?} ({}) to {:?}", + entry.path, + entry.id.0, + new_path + ); + let task = worktree.rename_entry(entry.id, new_path, cx).unwrap(); + cx.background_executor().spawn(async move { + task.await?; + Ok(()) + }) + } + _ => { + let task = if entry.is_dir() { + let child_path = entry.path.join(random_filename(rng)); + let is_dir = rng.gen_bool(0.3); + log::info!( + "creating {} at {:?}", + if is_dir { "dir" } else { "file" }, + child_path, + ); + worktree.create_entry(child_path, is_dir, cx) + } else { + log::info!("overwriting file {:?} ({})", entry.path, entry.id.0); + worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx) + }; + cx.background_executor().spawn(async move { + task.await?; + Ok(()) + }) + } + } +} + +async fn randomly_mutate_fs( + fs: &Arc, + root_path: &Path, + insertion_probability: f64, + rng: &mut impl Rng, +) { + log::info!("mutating fs"); + let mut files = Vec::new(); + let mut dirs = Vec::new(); + for path in fs.as_fake().paths(false) { + if path.starts_with(root_path) { + if fs.is_file(&path).await { + files.push(path); + } else { + dirs.push(path); + } + } + } + + if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) { + let path = dirs.choose(rng).unwrap(); + let new_path = path.join(random_filename(rng)); + + if rng.gen() { + log::info!( + "creating dir {:?}", + new_path.strip_prefix(root_path).unwrap() + ); + fs.create_dir(&new_path).await.unwrap(); + } else { + log::info!( + "creating file {:?}", + new_path.strip_prefix(root_path).unwrap() + ); + fs.create_file(&new_path, Default::default()).await.unwrap(); + } + } else if rng.gen_bool(0.05) { + let ignore_dir_path = dirs.choose(rng).unwrap(); + let ignore_path = ignore_dir_path.join(&*GITIGNORE); + + let subdirs = dirs + .iter() + .filter(|d| d.starts_with(&ignore_dir_path)) + .cloned() + .collect::>(); + let subfiles = files + .iter() + .filter(|d| d.starts_with(&ignore_dir_path)) + .cloned() + .collect::>(); + let files_to_ignore = { + let len = rng.gen_range(0..=subfiles.len()); + subfiles.choose_multiple(rng, len) + }; + let dirs_to_ignore = { + let len = rng.gen_range(0..subdirs.len()); + subdirs.choose_multiple(rng, len) + }; + + let mut ignore_contents = String::new(); + for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) { + writeln!( + ignore_contents, + "{}", + path_to_ignore + .strip_prefix(&ignore_dir_path) + .unwrap() + .to_str() + .unwrap() + ) + .unwrap(); + } + log::info!( + "creating gitignore {:?} with contents:\n{}", + ignore_path.strip_prefix(&root_path).unwrap(), + ignore_contents + ); + fs.save( + &ignore_path, + &ignore_contents.as_str().into(), + Default::default(), + ) + .await + .unwrap(); + } else { + let old_path = { + let file_path = files.choose(rng); + let dir_path = dirs[1..].choose(rng); + file_path.into_iter().chain(dir_path).choose(rng).unwrap() + }; + + let is_rename = rng.gen(); + if is_rename { + let new_path_parent = dirs + .iter() + .filter(|d| !d.starts_with(old_path)) + .choose(rng) + .unwrap(); + + let overwrite_existing_dir = + !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3); + let new_path = if overwrite_existing_dir { + fs.remove_dir( + &new_path_parent, + RemoveOptions { + recursive: true, + ignore_if_not_exists: true, + }, + ) + .await + .unwrap(); + new_path_parent.to_path_buf() + } else { + new_path_parent.join(random_filename(rng)) + }; + + log::info!( + "renaming {:?} to {}{:?}", + old_path.strip_prefix(&root_path).unwrap(), + if overwrite_existing_dir { + "overwrite " + } else { + "" + }, + new_path.strip_prefix(&root_path).unwrap() + ); + fs.rename( + &old_path, + &new_path, + fs::RenameOptions { + overwrite: true, + ignore_if_exists: true, + }, + ) + .await + .unwrap(); + } else if fs.is_file(&old_path).await { + log::info!( + "deleting file {:?}", + old_path.strip_prefix(&root_path).unwrap() + ); + fs.remove_file(old_path, Default::default()).await.unwrap(); + } else { + log::info!( + "deleting dir {:?}", + old_path.strip_prefix(&root_path).unwrap() + ); + fs.remove_dir( + &old_path, + RemoveOptions { + recursive: true, + ignore_if_not_exists: true, + }, + ) + .await + .unwrap(); + } + } +} + +fn random_filename(rng: &mut impl Rng) -> String { + (0..6) + .map(|_| rng.sample(rand::distributions::Alphanumeric)) + .map(char::from) + .collect() +} + +#[gpui::test] +async fn test_rename_work_directory(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + let root = temp_tree(json!({ + "projects": { + "project1": { + "a": "", + "b": "", + } + }, + + })); + let root_path = root.path(); + + let tree = Worktree::local( + build_client(cx), + root_path, + true, + Arc::new(RealFs), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + let repo = git_init(&root_path.join("projects/project1")); + git_add("a", &repo); + git_commit("init", &repo); + std::fs::write(root_path.join("projects/project1/a"), "aa").ok(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + tree.flush_fs_events(cx).await; + + cx.read(|cx| { + let tree = tree.read(cx); + let (work_dir, _) = tree.repositories().next().unwrap(); + assert_eq!(work_dir.as_ref(), Path::new("projects/project1")); + assert_eq!( + tree.status_for_file(Path::new("projects/project1/a")), + Some(GitFileStatus::Modified) + ); + assert_eq!( + tree.status_for_file(Path::new("projects/project1/b")), + Some(GitFileStatus::Added) + ); + }); + + std::fs::rename( + root_path.join("projects/project1"), + root_path.join("projects/project2"), + ) + .ok(); + tree.flush_fs_events(cx).await; + + cx.read(|cx| { + let tree = tree.read(cx); + let (work_dir, _) = tree.repositories().next().unwrap(); + assert_eq!(work_dir.as_ref(), Path::new("projects/project2")); + assert_eq!( + tree.status_for_file(Path::new("projects/project2/a")), + Some(GitFileStatus::Modified) + ); + assert_eq!( + tree.status_for_file(Path::new("projects/project2/b")), + Some(GitFileStatus::Added) + ); + }); +} + +#[gpui::test] +async fn test_git_repository_for_path(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + let root = temp_tree(json!({ + "c.txt": "", + "dir1": { + ".git": {}, + "deps": { + "dep1": { + ".git": {}, + "src": { + "a.txt": "" + } + } + }, + "src": { + "b.txt": "" + } + }, + })); + + let tree = Worktree::local( + build_client(cx), + root.path(), + true, + Arc::new(RealFs), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + tree.flush_fs_events(cx).await; + + tree.read_with(cx, |tree, _cx| { + let tree = tree.as_local().unwrap(); + + assert!(tree.repository_for_path("c.txt".as_ref()).is_none()); + + let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap(); + assert_eq!( + entry + .work_directory(tree) + .map(|directory| directory.as_ref().to_owned()), + Some(Path::new("dir1").to_owned()) + ); + + let entry = tree + .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref()) + .unwrap(); + assert_eq!( + entry + .work_directory(tree) + .map(|directory| directory.as_ref().to_owned()), + Some(Path::new("dir1/deps/dep1").to_owned()) + ); + + let entries = tree.files(false, 0); + + let paths_with_repos = tree + .entries_with_repositories(entries) + .map(|(entry, repo)| { + ( + entry.path.as_ref(), + repo.and_then(|repo| { + repo.work_directory(&tree) + .map(|work_directory| work_directory.0.to_path_buf()) + }), + ) + }) + .collect::>(); + + assert_eq!( + paths_with_repos, + &[ + (Path::new("c.txt"), None), + ( + Path::new("dir1/deps/dep1/src/a.txt"), + Some(Path::new("dir1/deps/dep1").into()) + ), + (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())), + ] + ); + }); + + let repo_update_events = Arc::new(Mutex::new(vec![])); + tree.update(cx, |_, cx| { + let repo_update_events = repo_update_events.clone(); + cx.subscribe(&tree, move |_, _, event, _| { + if let Event::UpdatedGitRepositories(update) = event { + repo_update_events.lock().push(update.clone()); + } + }) + .detach(); + }); + + std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap(); + tree.flush_fs_events(cx).await; + + assert_eq!( + repo_update_events.lock()[0] + .iter() + .map(|e| e.0.clone()) + .collect::>>(), + vec![Path::new("dir1").into()] + ); + + std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap(); + tree.flush_fs_events(cx).await; + + tree.read_with(cx, |tree, _cx| { + let tree = tree.as_local().unwrap(); + + assert!(tree + .repository_for_path("dir1/src/b.txt".as_ref()) + .is_none()); + }); +} + +#[gpui::test] +async fn test_git_status(cx: &mut TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + const IGNORE_RULE: &'static str = "**/target"; + + let root = temp_tree(json!({ + "project": { + "a.txt": "a", + "b.txt": "bb", + "c": { + "d": { + "e.txt": "eee" + } + }, + "f.txt": "ffff", + "target": { + "build_file": "???" + }, + ".gitignore": IGNORE_RULE + }, + + })); + + const A_TXT: &'static str = "a.txt"; + const B_TXT: &'static str = "b.txt"; + const E_TXT: &'static str = "c/d/e.txt"; + const F_TXT: &'static str = "f.txt"; + const DOTGITIGNORE: &'static str = ".gitignore"; + const BUILD_FILE: &'static str = "target/build_file"; + let project_path = Path::new("project"); + + // Set up git repository before creating the worktree. + let work_dir = root.path().join("project"); + let mut repo = git_init(work_dir.as_path()); + repo.add_ignore_rule(IGNORE_RULE).unwrap(); + git_add(A_TXT, &repo); + git_add(E_TXT, &repo); + git_add(DOTGITIGNORE, &repo); + git_commit("Initial commit", &repo); + + let tree = Worktree::local( + build_client(cx), + root.path(), + true, + Arc::new(RealFs), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.executor().run_until_parked(); + + // Check that the right git state is observed on startup + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + assert_eq!(snapshot.repositories().count(), 1); + let (dir, _) = snapshot.repositories().next().unwrap(); + assert_eq!(dir.as_ref(), Path::new("project")); + + assert_eq!( + snapshot.status_for_file(project_path.join(B_TXT)), + Some(GitFileStatus::Added) + ); + assert_eq!( + snapshot.status_for_file(project_path.join(F_TXT)), + Some(GitFileStatus::Added) + ); + }); + + // Modify a file in the working copy. + std::fs::write(work_dir.join(A_TXT), "aa").unwrap(); + tree.flush_fs_events(cx).await; + cx.executor().run_until_parked(); + + // The worktree detects that the file's git status has changed. + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + assert_eq!( + snapshot.status_for_file(project_path.join(A_TXT)), + Some(GitFileStatus::Modified) + ); + }); + + // Create a commit in the git repository. + git_add(A_TXT, &repo); + git_add(B_TXT, &repo); + git_commit("Committing modified and added", &repo); + tree.flush_fs_events(cx).await; + cx.executor().run_until_parked(); + + // The worktree detects that the files' git status have changed. + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + assert_eq!( + snapshot.status_for_file(project_path.join(F_TXT)), + Some(GitFileStatus::Added) + ); + assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None); + assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None); + }); + + // Modify files in the working copy and perform git operations on other files. + git_reset(0, &repo); + git_remove_index(Path::new(B_TXT), &repo); + git_stash(&mut repo); + std::fs::write(work_dir.join(E_TXT), "eeee").unwrap(); + std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap(); + tree.flush_fs_events(cx).await; + cx.executor().run_until_parked(); + + // Check that more complex repo changes are tracked + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + + assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None); + assert_eq!( + snapshot.status_for_file(project_path.join(B_TXT)), + Some(GitFileStatus::Added) + ); + assert_eq!( + snapshot.status_for_file(project_path.join(E_TXT)), + Some(GitFileStatus::Modified) + ); + }); + + std::fs::remove_file(work_dir.join(B_TXT)).unwrap(); + std::fs::remove_dir_all(work_dir.join("c")).unwrap(); + std::fs::write( + work_dir.join(DOTGITIGNORE), + [IGNORE_RULE, "f.txt"].join("\n"), + ) + .unwrap(); + + git_add(Path::new(DOTGITIGNORE), &repo); + git_commit("Committing modified git ignore", &repo); + + tree.flush_fs_events(cx).await; + cx.executor().run_until_parked(); + + let mut renamed_dir_name = "first_directory/second_directory"; + const RENAMED_FILE: &'static str = "rf.txt"; + + std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap(); + std::fs::write( + work_dir.join(renamed_dir_name).join(RENAMED_FILE), + "new-contents", + ) + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.executor().run_until_parked(); + + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + assert_eq!( + snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)), + Some(GitFileStatus::Added) + ); + }); + + renamed_dir_name = "new_first_directory/second_directory"; + + std::fs::rename( + work_dir.join("first_directory"), + work_dir.join("new_first_directory"), + ) + .unwrap(); + + tree.flush_fs_events(cx).await; + cx.executor().run_until_parked(); + + tree.read_with(cx, |tree, _cx| { + let snapshot = tree.snapshot(); + + assert_eq!( + snapshot.status_for_file( + project_path + .join(Path::new(renamed_dir_name)) + .join(RENAMED_FILE) + ), + Some(GitFileStatus::Added) + ); + }); +} + +#[gpui::test] +async fn test_propagate_git_statuses(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root", + json!({ + ".git": {}, + "a": { + "b": { + "c1.txt": "", + "c2.txt": "", + }, + "d": { + "e1.txt": "", + "e2.txt": "", + "e3.txt": "", + } + }, + "f": { + "no-status.txt": "" + }, + "g": { + "h1.txt": "", + "h2.txt": "" + }, + + }), + ) + .await; + + fs.set_status_for_repo_via_git_operation( + &Path::new("/root/.git"), + &[ + (Path::new("a/b/c1.txt"), GitFileStatus::Added), + (Path::new("a/d/e2.txt"), GitFileStatus::Modified), + (Path::new("g/h2.txt"), GitFileStatus::Conflict), + ], + ); + + let tree = Worktree::local( + build_client(cx), + Path::new("/root"), + true, + fs.clone(), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + cx.executor().run_until_parked(); + let snapshot = tree.read_with(cx, |tree, _| tree.snapshot()); + + check_propagated_statuses( + &snapshot, + &[ + (Path::new(""), Some(GitFileStatus::Conflict)), + (Path::new("a"), Some(GitFileStatus::Modified)), + (Path::new("a/b"), Some(GitFileStatus::Added)), + (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)), + (Path::new("a/b/c2.txt"), None), + (Path::new("a/d"), Some(GitFileStatus::Modified)), + (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)), + (Path::new("f"), None), + (Path::new("f/no-status.txt"), None), + (Path::new("g"), Some(GitFileStatus::Conflict)), + (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)), + ], + ); + + check_propagated_statuses( + &snapshot, + &[ + (Path::new("a/b"), Some(GitFileStatus::Added)), + (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)), + (Path::new("a/b/c2.txt"), None), + (Path::new("a/d"), Some(GitFileStatus::Modified)), + (Path::new("a/d/e1.txt"), None), + (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)), + (Path::new("f"), None), + (Path::new("f/no-status.txt"), None), + (Path::new("g"), Some(GitFileStatus::Conflict)), + ], + ); + + check_propagated_statuses( + &snapshot, + &[ + (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)), + (Path::new("a/b/c2.txt"), None), + (Path::new("a/d/e1.txt"), None), + (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)), + (Path::new("f/no-status.txt"), None), + ], + ); + + #[track_caller] + fn check_propagated_statuses( + snapshot: &Snapshot, + expected_statuses: &[(&Path, Option)], + ) { + let mut entries = expected_statuses + .iter() + .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone()) + .collect::>(); + snapshot.propagate_git_statuses(&mut entries); + assert_eq!( + entries + .iter() + .map(|e| (e.path.as_ref(), e.git_status)) + .collect::>(), + expected_statuses + ); + } +} + +fn build_client(cx: &mut TestAppContext) -> Arc { + let http_client = FakeHttpClient::with_404_response(); + cx.read(|cx| Client::new(http_client, cx)) +} + +#[track_caller] +fn git_init(path: &Path) -> git2::Repository { + git2::Repository::init(path).expect("Failed to initialize git repository") +} + +#[track_caller] +fn git_add>(path: P, repo: &git2::Repository) { + let path = path.as_ref(); + let mut index = repo.index().expect("Failed to get index"); + index.add_path(path).expect("Failed to add a.txt"); + index.write().expect("Failed to write index"); +} + +#[track_caller] +fn git_remove_index(path: &Path, repo: &git2::Repository) { + let mut index = repo.index().expect("Failed to get index"); + index.remove_path(path).expect("Failed to add a.txt"); + index.write().expect("Failed to write index"); +} + +#[track_caller] +fn git_commit(msg: &'static str, repo: &git2::Repository) { + use git2::Signature; + + let signature = Signature::now("test", "test@zed.dev").unwrap(); + let oid = repo.index().unwrap().write_tree().unwrap(); + let tree = repo.find_tree(oid).unwrap(); + if let Some(head) = repo.head().ok() { + let parent_obj = head.peel(git2::ObjectType::Commit).unwrap(); + + let parent_commit = parent_obj.as_commit().unwrap(); + + repo.commit( + Some("HEAD"), + &signature, + &signature, + msg, + &tree, + &[parent_commit], + ) + .expect("Failed to commit with parent"); + } else { + repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[]) + .expect("Failed to commit"); + } +} + +#[track_caller] +fn git_stash(repo: &mut git2::Repository) { + use git2::Signature; + + let signature = Signature::now("test", "test@zed.dev").unwrap(); + repo.stash_save(&signature, "N/A", None) + .expect("Failed to stash"); +} + +#[track_caller] +fn git_reset(offset: usize, repo: &git2::Repository) { + let head = repo.head().expect("Couldn't get repo head"); + let object = head.peel(git2::ObjectType::Commit).unwrap(); + let commit = object.as_commit().unwrap(); + let new_head = commit + .parents() + .inspect(|parnet| { + parnet.message(); + }) + .skip(offset) + .next() + .expect("Not enough history"); + repo.reset(&new_head.as_object(), git2::ResetType::Soft, None) + .expect("Could not reset"); +} + +#[allow(dead_code)] +#[track_caller] +fn git_status(repo: &git2::Repository) -> collections::HashMap { + repo.statuses(None) + .unwrap() + .iter() + .map(|status| (status.path().unwrap().to_string(), status.status())) + .collect() +} + +#[track_caller] +fn check_worktree_entries( + tree: &Worktree, + expected_excluded_paths: &[&str], + expected_ignored_paths: &[&str], + expected_tracked_paths: &[&str], +) { + for path in expected_excluded_paths { + let entry = tree.entry_for_path(path); + assert!( + entry.is_none(), + "expected path '{path}' to be excluded, but got entry: {entry:?}", + ); + } + for path in expected_ignored_paths { + let entry = tree + .entry_for_path(path) + .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'")); + assert!( + entry.is_ignored, + "expected path '{path}' to be ignored, but got entry: {entry:?}", + ); + } + for path in expected_tracked_paths { + let entry = tree + .entry_for_path(path) + .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'")); + assert!( + !entry.is_ignored, + "expected path '{path}' to be tracked, but got entry: {entry:?}", + ); + } +} + +fn init_test(cx: &mut gpui::TestAppContext) { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + Project::init_settings(cx); + }); +} diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index e43423073c2831ee1ce2a90803693a8b8c5e5a84..eb124bfca28840f4b99a3b022abbbee33611fc0e 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -3046,8 +3046,7 @@ mod tests { workspace::init_settings(cx); client::init_settings(cx); Project::init_settings(cx); - }); - cx.update(|cx| { + cx.update_global::(|store, cx| { store.update_user_settings::(cx, |project_settings| { project_settings.file_scan_exclusions = Some(Vec::new()); diff --git a/crates/project_panel2/src/project_panel.rs b/crates/project_panel2/src/project_panel.rs index 7a455fe8ce67c174258414db350a81a7a6b9909b..6702a17957c1cf6dd9c4854b387ee4de2bee3140 100644 --- a/crates/project_panel2/src/project_panel.rs +++ b/crates/project_panel2/src/project_panel.rs @@ -1571,7 +1571,7 @@ mod tests { use super::*; use gpui::{TestAppContext, View, VisualTestContext, WindowHandle}; use pretty_assertions::assert_eq; - use project::FakeFs; + use project::{project_settings::ProjectSettings, FakeFs}; use serde_json::json; use settings::SettingsStore; use std::{ @@ -1672,6 +1672,124 @@ mod tests { ); } + #[gpui::test] + async fn test_exclusions_in_visible_list(cx: &mut gpui::TestAppContext) { + init_test(cx); + cx.update(|cx| { + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = + Some(vec!["**/.git".to_string(), "**/4/**".to_string()]); + }); + }); + }); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/root1", + json!({ + ".dockerignore": "", + ".git": { + "HEAD": "", + }, + "a": { + "0": { "q": "", "r": "", "s": "" }, + "1": { "t": "", "u": "" }, + "2": { "v": "", "w": "", "x": "", "y": "" }, + }, + "b": { + "3": { "Q": "" }, + "4": { "R": "", "S": "", "T": "", "U": "" }, + }, + "C": { + "5": {}, + "6": { "V": "", "W": "" }, + "7": { "X": "" }, + "8": { "Y": {}, "Z": "" } + } + }), + ) + .await; + fs.insert_tree( + "/root2", + json!({ + "d": { + "4": "" + }, + "e": {} + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await; + let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let panel = workspace + .update(cx, |workspace, cx| ProjectPanel::new(workspace, cx)) + .unwrap(); + assert_eq!( + visible_entries_as_strings(&panel, 0..50, cx), + &[ + "v root1", + " > a", + " > b", + " > C", + " .dockerignore", + "v root2", + " > d", + " > e", + ] + ); + + toggle_expand_dir(&panel, "root1/b", cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..50, cx), + &[ + "v root1", + " > a", + " v b <== selected", + " > 3", + " > C", + " .dockerignore", + "v root2", + " > d", + " > e", + ] + ); + + toggle_expand_dir(&panel, "root2/d", cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..50, cx), + &[ + "v root1", + " > a", + " v b", + " > 3", + " > C", + " .dockerignore", + "v root2", + " v d <== selected", + " > e", + ] + ); + + toggle_expand_dir(&panel, "root2/e", cx); + assert_eq!( + visible_entries_as_strings(&panel, 0..50, cx), + &[ + "v root1", + " > a", + " v b", + " > 3", + " > C", + " .dockerignore", + "v root2", + " v d", + " v e <== selected", + ] + ); + } + #[gpui::test(iterations = 30)] async fn test_editing_files(cx: &mut gpui::TestAppContext) { init_test(cx); @@ -2792,6 +2910,12 @@ mod tests { workspace::init_settings(cx); client::init_settings(cx); Project::init_settings(cx); + + cx.update_global::(|store, cx| { + store.update_user_settings::(cx, |project_settings| { + project_settings.file_scan_exclusions = Some(Vec::new()); + }); + }); }); } From ec3bb0f090cb1333f7406b1af44d970939357648 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 17 Nov 2023 14:11:39 -0700 Subject: [PATCH 33/75] Focus workspace on launch Fixes action dispatch when no focus is there. --- crates/zed2/src/zed2.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/zed2/src/zed2.rs b/crates/zed2/src/zed2.rs index 84cacccb5adb597b4b5fcd7650e11681b5ab7216..e7be98d45b5e18c20d8e5d357ce8db4f5480a07b 100644 --- a/crates/zed2/src/zed2.rs +++ b/crates/zed2/src/zed2.rs @@ -10,8 +10,8 @@ pub use assets::*; use collections::VecDeque; use editor::{Editor, MultiBuffer}; use gpui::{ - actions, point, px, AppContext, Context, PromptLevel, TitlebarOptions, ViewContext, - VisualContext, WindowBounds, WindowKind, WindowOptions, + actions, point, px, AppContext, Context, FocusableView, PromptLevel, TitlebarOptions, + ViewContext, VisualContext, WindowBounds, WindowKind, WindowOptions, }; pub use only_instance::*; pub use open_listener::*; @@ -425,6 +425,8 @@ pub fn initialize_workspace(app_state: Arc, cx: &mut AppContext) { } } }); + + workspace.focus_handle(cx).focus(cx); //todo!() // load_default_keymap(cx); }) From f4eb219c7539ba80dbee09e7096584ab2e7ed88d Mon Sep 17 00:00:00 2001 From: Julia Date: Tue, 14 Nov 2023 19:25:55 -0500 Subject: [PATCH 34/75] Get diagnostics view almost building in the zed2 world Co-Authored-By: Nathan Sobo --- Cargo.lock | 28 + Cargo.toml | 1 + crates/diagnostics2/Cargo.toml | 43 + crates/diagnostics2/src/diagnostics.rs | 1653 +++++++++++++++++ crates/diagnostics2/src/items.rs | 251 +++ .../src/project_diagnostics_settings.rs | 28 + crates/diagnostics2/src/toolbar_controls.rs | 123 ++ crates/editor2/src/editor.rs | 41 +- crates/editor2/src/editor_tests.rs | 10 +- crates/editor2/src/element.rs | 8 +- crates/editor2/src/items.rs | 19 +- crates/editor2/src/scroll.rs | 6 +- .../editor2/src/test/editor_test_context.rs | 3 +- crates/go_to_line2/src/go_to_line.rs | 6 +- crates/gpui2/src/element.rs | 9 + crates/gpui2/src/view.rs | 22 +- crates/gpui2/src/window.rs | 12 + crates/picker2/src/picker2.rs | 4 +- crates/project_panel2/src/project_panel.rs | 5 +- crates/ui2/src/components/icon.rs | 38 +- crates/workspace2/src/pane.rs | 10 +- crates/workspace2/src/workspace2.rs | 24 +- 22 files changed, 2252 insertions(+), 92 deletions(-) create mode 100644 crates/diagnostics2/Cargo.toml create mode 100644 crates/diagnostics2/src/diagnostics.rs create mode 100644 crates/diagnostics2/src/items.rs create mode 100644 crates/diagnostics2/src/project_diagnostics_settings.rs create mode 100644 crates/diagnostics2/src/toolbar_controls.rs diff --git a/Cargo.lock b/Cargo.lock index a7eb358ddb372127ea4e5d0c431d93782e4fd2b0..b52108977aeeaa6c6611eb52f45aca44c6812b84 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2573,6 +2573,34 @@ dependencies = [ "workspace", ] +[[package]] +name = "diagnostics2" +version = "0.1.0" +dependencies = [ + "anyhow", + "client2", + "collections", + "editor2", + "futures 0.3.28", + "gpui2", + "language2", + "log", + "lsp2", + "postage", + "project2", + "schemars", + "serde", + "serde_derive", + "serde_json", + "settings2", + "smallvec", + "theme2", + "ui2", + "unindent", + "util", + "workspace2", +] + [[package]] name = "diff" version = "0.1.13" diff --git a/Cargo.toml b/Cargo.toml index 6b29b1812759e7d702df950f733b33ed892a4f04..f66cfae280268fadf6020122b0170723f76f6e75 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -31,6 +31,7 @@ members = [ "crates/refineable", "crates/refineable/derive_refineable", "crates/diagnostics", + "crates/diagnostics2", "crates/drag_and_drop", "crates/editor", "crates/feature_flags", diff --git a/crates/diagnostics2/Cargo.toml b/crates/diagnostics2/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..45d40489424643b5c717e65609a8f272160baea1 --- /dev/null +++ b/crates/diagnostics2/Cargo.toml @@ -0,0 +1,43 @@ +[package] +name = "diagnostics2" +version = "0.1.0" +edition = "2021" +publish = false + +[lib] +path = "src/diagnostics.rs" +doctest = false + +[dependencies] +collections = { path = "../collections" } +editor = { package = "editor2", path = "../editor2" } +gpui = { package = "gpui2", path = "../gpui2" } +ui = { package = "ui2", path = "../ui2" } +language = { package = "language2", path = "../language2" } +lsp = { package = "lsp2", path = "../lsp2" } +project = { package = "project2", path = "../project2" } +settings = { package = "settings2", path = "../settings2" } +theme = { package = "theme2", path = "../theme2" } +util = { path = "../util" } +workspace = { package = "workspace2", path = "../workspace2" } + +log.workspace = true +anyhow.workspace = true +futures.workspace = true +schemars.workspace = true +serde.workspace = true +serde_derive.workspace = true +smallvec.workspace = true +postage.workspace = true + +[dev-dependencies] +client = { package = "client2", path = "../client2", features = ["test-support"] } +editor = { package = "editor2", path = "../editor2", features = ["test-support"] } +language = { package = "language2", path = "../language2", features = ["test-support"] } +lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] } +gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] } +workspace = { package = "workspace2", path = "../workspace2", features = ["test-support"] } +theme = { package = "theme2", path = "../theme2", features = ["test-support"] } + +serde_json.workspace = true +unindent.workspace = true diff --git a/crates/diagnostics2/src/diagnostics.rs b/crates/diagnostics2/src/diagnostics.rs new file mode 100644 index 0000000000000000000000000000000000000000..d9a417b60ac0431731c0e50ad785168b0848b612 --- /dev/null +++ b/crates/diagnostics2/src/diagnostics.rs @@ -0,0 +1,1653 @@ +pub mod items; +mod project_diagnostics_settings; +mod toolbar_controls; + +use anyhow::{Context as _, Result}; +use collections::{HashMap, HashSet}; +use editor::{ + diagnostic_block_renderer, + display_map::{BlockDisposition, BlockId, BlockProperties, BlockStyle, RenderBlock}, + highlight_diagnostic_message, + scroll::autoscroll::Autoscroll, + Editor, EditorEvent, ExcerptId, ExcerptRange, MultiBuffer, ToOffset, +}; +use futures::future::try_join_all; +use gpui::{ + actions, div, AnyElement, AnyView, AppContext, Component, Context, Div, EventEmitter, + FocusHandle, Model, ParentComponent, Render, SharedString, Styled, Subscription, Task, View, + ViewContext, VisualContext, WeakView, +}; +use language::{ + Anchor, Bias, Buffer, Diagnostic, DiagnosticEntry, DiagnosticSeverity, Point, Selection, + SelectionGoal, +}; +use lsp::LanguageServerId; +use project::{DiagnosticSummary, Project, ProjectPath}; +use project_diagnostics_settings::ProjectDiagnosticsSettings; +use settings::Settings; +use std::{ + any::{Any, TypeId}, + borrow::Cow, + cmp::Ordering, + mem, + ops::Range, + path::PathBuf, + sync::Arc, +}; +use theme::ThemeSettings; +pub use toolbar_controls::ToolbarControls; +use ui::Label; +use util::TryFutureExt; +use workspace::{ + item::{BreadcrumbText, Item, ItemEvent, ItemHandle}, + ItemNavHistory, Pane, ToolbarItemLocation, Workspace, +}; + +actions!(Deploy, ToggleWarnings); + +const CONTEXT_LINE_COUNT: u32 = 1; + +pub fn init(cx: &mut AppContext) { + ProjectDiagnosticsSettings::register(cx); + // todo!() + // cx.add_action(ProjectDiagnosticsEditor::deploy); + // cx.add_action(ProjectDiagnosticsEditor::toggle_warnings); + // items::init(cx); +} + +struct ProjectDiagnosticsEditor { + project: Model, + workspace: WeakView, + focus_handle: FocusHandle, + editor: View, + summary: DiagnosticSummary, + excerpts: Model, + path_states: Vec, + paths_to_update: HashMap>, + current_diagnostics: HashMap>, + include_warnings: bool, + _subscriptions: Vec, +} + +struct PathState { + path: ProjectPath, + diagnostic_groups: Vec, +} + +#[derive(Clone, Debug, PartialEq)] +struct Jump { + path: ProjectPath, + position: Point, + anchor: Anchor, +} + +struct DiagnosticGroupState { + language_server_id: LanguageServerId, + primary_diagnostic: DiagnosticEntry, + primary_excerpt_ix: usize, + excerpts: Vec, + blocks: HashSet, + block_count: usize, +} + +impl EventEmitter for ProjectDiagnosticsEditor {} + +impl Render for ProjectDiagnosticsEditor { + type Element = Div; + + fn render(&mut self, cx: &mut ViewContext) -> Self::Element { + div().size_full().bg(gpui::red()) + } +} + +// impl View for ProjectDiagnosticsEditor { +// fn ui_name() -> &'static str { +// "ProjectDiagnosticsEditor" +// } + +// fn render(&mut self, cx: &mut ViewContext) -> AnyElement { +// if self.path_states.is_empty() { +// let theme = &theme::current(cx).project_diagnostics; +// PaneBackdrop::new( +// cx.view_id(), +// Label::new("No problems in workspace", theme.empty_message.clone()) +// .aligned() +// .contained() +// .with_style(theme.container) +// .into_any(), +// ) +// .into_any() +// } else { +// ChildView::new(&self.editor, cx).into_any() +// } +// } + +// fn focus_in(&mut self, _: AnyView, cx: &mut ViewContext) { +// if cx.is_self_focused() && !self.path_states.is_empty() { +// cx.focus(&self.editor); +// } +// } + +// fn debug_json(&self, cx: &AppContext) -> serde_json::Value { +// let project = self.project.read(cx); +// json!({ +// "project": json!({ +// "language_servers": project.language_server_statuses().collect::>(), +// "summary": project.diagnostic_summary(cx), +// }), +// "summary": self.summary, +// "paths_to_update": self.paths_to_update.iter().map(|(server_id, paths)| +// (server_id.0, paths.into_iter().map(|path| path.path.to_string_lossy()).collect::>()) +// ).collect::>(), +// "current_diagnostics": self.current_diagnostics.iter().map(|(server_id, paths)| +// (server_id.0, paths.into_iter().map(|path| path.path.to_string_lossy()).collect::>()) +// ).collect::>(), +// "paths_states": self.path_states.iter().map(|state| +// json!({ +// "path": state.path.path.to_string_lossy(), +// "groups": state.diagnostic_groups.iter().map(|group| +// json!({ +// "block_count": group.blocks.len(), +// "excerpt_count": group.excerpts.len(), +// }) +// ).collect::>(), +// }) +// ).collect::>(), +// }) +// } +// } + +impl ProjectDiagnosticsEditor { + fn new( + project_handle: Model, + workspace: WeakView, + cx: &mut ViewContext, + ) -> Self { + let project_event_subscription = + cx.subscribe(&project_handle, |this, _, event, cx| match event { + project::Event::DiskBasedDiagnosticsFinished { language_server_id } => { + log::debug!("Disk based diagnostics finished for server {language_server_id}"); + this.update_excerpts(Some(*language_server_id), cx); + } + project::Event::DiagnosticsUpdated { + language_server_id, + path, + } => { + log::debug!("Adding path {path:?} to update for server {language_server_id}"); + this.paths_to_update + .entry(*language_server_id) + .or_default() + .insert(path.clone()); + if this.editor.read(cx).selections.all::(cx).is_empty() + && !this.is_dirty(cx) + { + this.update_excerpts(Some(*language_server_id), cx); + } + } + _ => {} + }); + + let excerpts = cx.build_model(|cx| MultiBuffer::new(project_handle.read(cx).replica_id())); + let editor = cx.build_view(|cx| { + let mut editor = + Editor::for_multibuffer(excerpts.clone(), Some(project_handle.clone()), cx); + editor.set_vertical_scroll_margin(5, cx); + editor + }); + let editor_event_subscription = + cx.subscribe(&editor, |this, _editor, event: &EditorEvent, cx| { + Self::emit_item_event_for_editor_event(event, cx); + if event == &EditorEvent::Focused && this.path_states.is_empty() { + cx.focus(&this.focus_handle); + } + }); + + let project = project_handle.read(cx); + let summary = project.diagnostic_summary(cx); + let mut this = Self { + project: project_handle, + summary, + workspace, + excerpts, + focus_handle: cx.focus_handle(), + editor, + path_states: Default::default(), + paths_to_update: HashMap::default(), + include_warnings: ProjectDiagnosticsSettings::get_global(cx).include_warnings, + current_diagnostics: HashMap::default(), + _subscriptions: vec![project_event_subscription, editor_event_subscription], + }; + this.update_excerpts(None, cx); + this + } + + fn emit_item_event_for_editor_event(event: &EditorEvent, cx: &mut ViewContext) { + match event { + EditorEvent::Closed => cx.emit(ItemEvent::CloseItem), + EditorEvent::Saved | EditorEvent::TitleChanged => { + cx.emit(ItemEvent::UpdateTab); + cx.emit(ItemEvent::UpdateBreadcrumbs); + } + EditorEvent::Reparsed => { + cx.emit(ItemEvent::UpdateBreadcrumbs); + } + EditorEvent::SelectionsChanged { local } if *local => { + cx.emit(ItemEvent::UpdateBreadcrumbs); + } + EditorEvent::DirtyChanged => { + cx.emit(ItemEvent::UpdateTab); + } + EditorEvent::BufferEdited => { + cx.emit(ItemEvent::Edit); + cx.emit(ItemEvent::UpdateBreadcrumbs); + } + _ => {} + } + } + + fn deploy(workspace: &mut Workspace, _: &Deploy, cx: &mut ViewContext) { + if let Some(existing) = workspace.item_of_type::(cx) { + workspace.activate_item(&existing, cx); + } else { + let workspace_handle = cx.view().downgrade(); + let diagnostics = cx.build_view(|cx| { + ProjectDiagnosticsEditor::new(workspace.project().clone(), workspace_handle, cx) + }); + workspace.add_item(Box::new(diagnostics), cx); + } + } + + fn toggle_warnings(&mut self, _: &ToggleWarnings, cx: &mut ViewContext) { + self.include_warnings = !self.include_warnings; + self.paths_to_update = self.current_diagnostics.clone(); + self.update_excerpts(None, cx); + cx.notify(); + } + + fn update_excerpts( + &mut self, + language_server_id: Option, + cx: &mut ViewContext, + ) { + log::debug!("Updating excerpts for server {language_server_id:?}"); + let mut paths_to_recheck = HashSet::default(); + let mut new_summaries: HashMap> = self + .project + .read(cx) + .diagnostic_summaries(cx) + .fold(HashMap::default(), |mut summaries, (path, server_id, _)| { + summaries.entry(server_id).or_default().insert(path); + summaries + }); + let mut old_diagnostics = if let Some(language_server_id) = language_server_id { + new_summaries.retain(|server_id, _| server_id == &language_server_id); + self.paths_to_update.retain(|server_id, paths| { + if server_id == &language_server_id { + paths_to_recheck.extend(paths.drain()); + false + } else { + true + } + }); + let mut old_diagnostics = HashMap::default(); + if let Some(new_paths) = new_summaries.get(&language_server_id) { + if let Some(old_paths) = self + .current_diagnostics + .insert(language_server_id, new_paths.clone()) + { + old_diagnostics.insert(language_server_id, old_paths); + } + } else { + if let Some(old_paths) = self.current_diagnostics.remove(&language_server_id) { + old_diagnostics.insert(language_server_id, old_paths); + } + } + old_diagnostics + } else { + paths_to_recheck.extend(self.paths_to_update.drain().flat_map(|(_, paths)| paths)); + mem::replace(&mut self.current_diagnostics, new_summaries.clone()) + }; + for (server_id, new_paths) in new_summaries { + match old_diagnostics.remove(&server_id) { + Some(mut old_paths) => { + paths_to_recheck.extend( + new_paths + .into_iter() + .filter(|new_path| !old_paths.remove(new_path)), + ); + paths_to_recheck.extend(old_paths); + } + None => paths_to_recheck.extend(new_paths), + } + } + paths_to_recheck.extend(old_diagnostics.into_iter().flat_map(|(_, paths)| paths)); + + if paths_to_recheck.is_empty() { + log::debug!("No paths to recheck for language server {language_server_id:?}"); + return; + } + log::debug!( + "Rechecking {} paths for language server {:?}", + paths_to_recheck.len(), + language_server_id + ); + let project = self.project.clone(); + cx.spawn(|this, mut cx| { + async move { + let _: Vec<()> = try_join_all(paths_to_recheck.into_iter().map(|path| { + let mut cx = cx.clone(); + let project = project.clone(); + let this = this.clone(); + async move { + let buffer = project + .update(&mut cx, |project, cx| project.open_buffer(path.clone(), cx))? + .await + .with_context(|| format!("opening buffer for path {path:?}"))?; + this.update(&mut cx, |this, cx| { + this.populate_excerpts(path, language_server_id, buffer, cx); + }) + .context("missing project")?; + anyhow::Ok(()) + } + })) + .await + .context("rechecking diagnostics for paths")?; + + this.update(&mut cx, |this, cx| { + this.summary = this.project.read(cx).diagnostic_summary(cx); + cx.emit(ItemEvent::UpdateTab); + cx.emit(ItemEvent::UpdateBreadcrumbs); + })?; + anyhow::Ok(()) + } + .log_err() + }) + .detach(); + } + + fn populate_excerpts( + &mut self, + path: ProjectPath, + language_server_id: Option, + buffer: Model, + cx: &mut ViewContext, + ) { + let was_empty = self.path_states.is_empty(); + let snapshot = buffer.read(cx).snapshot(); + let path_ix = match self.path_states.binary_search_by_key(&&path, |e| &e.path) { + Ok(ix) => ix, + Err(ix) => { + self.path_states.insert( + ix, + PathState { + path: path.clone(), + diagnostic_groups: Default::default(), + }, + ); + ix + } + }; + + let mut prev_excerpt_id = if path_ix > 0 { + let prev_path_last_group = &self.path_states[path_ix - 1] + .diagnostic_groups + .last() + .unwrap(); + prev_path_last_group.excerpts.last().unwrap().clone() + } else { + ExcerptId::min() + }; + + let path_state = &mut self.path_states[path_ix]; + let mut groups_to_add = Vec::new(); + let mut group_ixs_to_remove = Vec::new(); + let mut blocks_to_add = Vec::new(); + let mut blocks_to_remove = HashSet::default(); + let mut first_excerpt_id = None; + let max_severity = if self.include_warnings { + DiagnosticSeverity::WARNING + } else { + DiagnosticSeverity::ERROR + }; + let excerpts_snapshot = self.excerpts.update(cx, |excerpts, excerpts_cx| { + let mut old_groups = path_state.diagnostic_groups.iter().enumerate().peekable(); + let mut new_groups = snapshot + .diagnostic_groups(language_server_id) + .into_iter() + .filter(|(_, group)| { + group.entries[group.primary_ix].diagnostic.severity <= max_severity + }) + .peekable(); + loop { + let mut to_insert = None; + let mut to_remove = None; + let mut to_keep = None; + match (old_groups.peek(), new_groups.peek()) { + (None, None) => break, + (None, Some(_)) => to_insert = new_groups.next(), + (Some((_, old_group)), None) => { + if language_server_id.map_or(true, |id| id == old_group.language_server_id) + { + to_remove = old_groups.next(); + } else { + to_keep = old_groups.next(); + } + } + (Some((_, old_group)), Some((_, new_group))) => { + let old_primary = &old_group.primary_diagnostic; + let new_primary = &new_group.entries[new_group.primary_ix]; + match compare_diagnostics(old_primary, new_primary, &snapshot) { + Ordering::Less => { + if language_server_id + .map_or(true, |id| id == old_group.language_server_id) + { + to_remove = old_groups.next(); + } else { + to_keep = old_groups.next(); + } + } + Ordering::Equal => { + to_keep = old_groups.next(); + new_groups.next(); + } + Ordering::Greater => to_insert = new_groups.next(), + } + } + } + + if let Some((language_server_id, group)) = to_insert { + let mut group_state = DiagnosticGroupState { + language_server_id, + primary_diagnostic: group.entries[group.primary_ix].clone(), + primary_excerpt_ix: 0, + excerpts: Default::default(), + blocks: Default::default(), + block_count: 0, + }; + let mut pending_range: Option<(Range, usize)> = None; + let mut is_first_excerpt_for_group = true; + for (ix, entry) in group.entries.iter().map(Some).chain([None]).enumerate() { + let resolved_entry = entry.map(|e| e.resolve::(&snapshot)); + if let Some((range, start_ix)) = &mut pending_range { + if let Some(entry) = resolved_entry.as_ref() { + if entry.range.start.row + <= range.end.row + 1 + CONTEXT_LINE_COUNT * 2 + { + range.end = range.end.max(entry.range.end); + continue; + } + } + + let excerpt_start = + Point::new(range.start.row.saturating_sub(CONTEXT_LINE_COUNT), 0); + let excerpt_end = snapshot.clip_point( + Point::new(range.end.row + CONTEXT_LINE_COUNT, u32::MAX), + Bias::Left, + ); + let excerpt_id = excerpts + .insert_excerpts_after( + prev_excerpt_id, + buffer.clone(), + [ExcerptRange { + context: excerpt_start..excerpt_end, + primary: Some(range.clone()), + }], + excerpts_cx, + ) + .pop() + .unwrap(); + + prev_excerpt_id = excerpt_id.clone(); + first_excerpt_id.get_or_insert_with(|| prev_excerpt_id.clone()); + group_state.excerpts.push(excerpt_id.clone()); + let header_position = (excerpt_id.clone(), language::Anchor::MIN); + + if is_first_excerpt_for_group { + is_first_excerpt_for_group = false; + let mut primary = + group.entries[group.primary_ix].diagnostic.clone(); + primary.message = + primary.message.split('\n').next().unwrap().to_string(); + group_state.block_count += 1; + blocks_to_add.push(BlockProperties { + position: header_position, + height: 2, + style: BlockStyle::Sticky, + render: diagnostic_header_renderer(primary), + disposition: BlockDisposition::Above, + }); + } + + for entry in &group.entries[*start_ix..ix] { + let mut diagnostic = entry.diagnostic.clone(); + if diagnostic.is_primary { + group_state.primary_excerpt_ix = group_state.excerpts.len() - 1; + diagnostic.message = + entry.diagnostic.message.split('\n').skip(1).collect(); + } + + if !diagnostic.message.is_empty() { + group_state.block_count += 1; + blocks_to_add.push(BlockProperties { + position: (excerpt_id.clone(), entry.range.start), + height: diagnostic.message.matches('\n').count() as u8 + 1, + style: BlockStyle::Fixed, + render: diagnostic_block_renderer(diagnostic, true), + disposition: BlockDisposition::Below, + }); + } + } + + pending_range.take(); + } + + if let Some(entry) = resolved_entry { + pending_range = Some((entry.range.clone(), ix)); + } + } + + groups_to_add.push(group_state); + } else if let Some((group_ix, group_state)) = to_remove { + excerpts.remove_excerpts(group_state.excerpts.iter().copied(), excerpts_cx); + group_ixs_to_remove.push(group_ix); + blocks_to_remove.extend(group_state.blocks.iter().copied()); + } else if let Some((_, group)) = to_keep { + prev_excerpt_id = group.excerpts.last().unwrap().clone(); + first_excerpt_id.get_or_insert_with(|| prev_excerpt_id.clone()); + } + } + + excerpts.snapshot(excerpts_cx) + }); + + self.editor.update(cx, |editor, cx| { + editor.remove_blocks(blocks_to_remove, None, cx); + let block_ids = editor.insert_blocks( + blocks_to_add.into_iter().map(|block| { + let (excerpt_id, text_anchor) = block.position; + BlockProperties { + position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor), + height: block.height, + style: block.style, + render: block.render, + disposition: block.disposition, + } + }), + Some(Autoscroll::fit()), + cx, + ); + + let mut block_ids = block_ids.into_iter(); + for group_state in &mut groups_to_add { + group_state.blocks = block_ids.by_ref().take(group_state.block_count).collect(); + } + }); + + for ix in group_ixs_to_remove.into_iter().rev() { + path_state.diagnostic_groups.remove(ix); + } + path_state.diagnostic_groups.extend(groups_to_add); + path_state.diagnostic_groups.sort_unstable_by(|a, b| { + let range_a = &a.primary_diagnostic.range; + let range_b = &b.primary_diagnostic.range; + range_a + .start + .cmp(&range_b.start, &snapshot) + .then_with(|| range_a.end.cmp(&range_b.end, &snapshot)) + }); + + if path_state.diagnostic_groups.is_empty() { + self.path_states.remove(path_ix); + } + + self.editor.update(cx, |editor, cx| { + let groups; + let mut selections; + let new_excerpt_ids_by_selection_id; + if was_empty { + groups = self.path_states.first()?.diagnostic_groups.as_slice(); + new_excerpt_ids_by_selection_id = [(0, ExcerptId::min())].into_iter().collect(); + selections = vec![Selection { + id: 0, + start: 0, + end: 0, + reversed: false, + goal: SelectionGoal::None, + }]; + } else { + groups = self.path_states.get(path_ix)?.diagnostic_groups.as_slice(); + new_excerpt_ids_by_selection_id = + editor.change_selections(Some(Autoscroll::fit()), cx, |s| s.refresh()); + selections = editor.selections.all::(cx); + } + + // If any selection has lost its position, move it to start of the next primary diagnostic. + let snapshot = editor.snapshot(cx); + for selection in &mut selections { + if let Some(new_excerpt_id) = new_excerpt_ids_by_selection_id.get(&selection.id) { + let group_ix = match groups.binary_search_by(|probe| { + probe + .excerpts + .last() + .unwrap() + .cmp(new_excerpt_id, &snapshot.buffer_snapshot) + }) { + Ok(ix) | Err(ix) => ix, + }; + if let Some(group) = groups.get(group_ix) { + let offset = excerpts_snapshot + .anchor_in_excerpt( + group.excerpts[group.primary_excerpt_ix].clone(), + group.primary_diagnostic.range.start, + ) + .to_offset(&excerpts_snapshot); + selection.start = offset; + selection.end = offset; + } + } + } + editor.change_selections(None, cx, |s| { + s.select(selections); + }); + Some(()) + }); + + if self.path_states.is_empty() { + if self.editor.focus_handle(cx).is_focused(cx) { + cx.focus(&self.focus_handle); + } + } else if self.focus_handle.is_focused(cx) { + let focus_handle = self.editor.focus_handle(cx); + cx.focus(&focus_handle); + } + cx.notify(); + } +} + +impl Item for ProjectDiagnosticsEditor { + fn focus_handle(&self) -> FocusHandle { + self.focus_handle.clone() + } + + fn deactivated(&mut self, cx: &mut ViewContext) { + self.editor.update(cx, |editor, cx| editor.deactivated(cx)); + } + + fn navigate(&mut self, data: Box, cx: &mut ViewContext) -> bool { + self.editor + .update(cx, |editor, cx| editor.navigate(data, cx)) + } + + fn tab_tooltip_text(&self, _: &AppContext) -> Option { + Some("Project Diagnostics".into()) + } + + fn tab_content(&self, _detail: Option, cx: &AppContext) -> AnyElement { + render_summary(&self.summary) + } + + fn for_each_project_item( + &self, + cx: &AppContext, + f: &mut dyn FnMut(gpui::EntityId, &dyn project::Item), + ) { + self.editor.for_each_project_item(cx, f) + } + + fn is_singleton(&self, _: &AppContext) -> bool { + false + } + + fn set_nav_history(&mut self, nav_history: ItemNavHistory, cx: &mut ViewContext) { + self.editor.update(cx, |editor, _| { + editor.set_nav_history(Some(nav_history)); + }); + } + + fn clone_on_split( + &self, + _workspace_id: workspace::WorkspaceId, + cx: &mut ViewContext, + ) -> Option> + where + Self: Sized, + { + Some(cx.build_view(|cx| { + ProjectDiagnosticsEditor::new(self.project.clone(), self.workspace.clone(), cx) + })) + } + + fn is_dirty(&self, cx: &AppContext) -> bool { + self.excerpts.read(cx).is_dirty(cx) + } + + fn has_conflict(&self, cx: &AppContext) -> bool { + self.excerpts.read(cx).has_conflict(cx) + } + + fn can_save(&self, _: &AppContext) -> bool { + true + } + + fn save(&mut self, project: Model, cx: &mut ViewContext) -> Task> { + self.editor.save(project, cx) + } + + fn save_as( + &mut self, + _: Model, + _: PathBuf, + _: &mut ViewContext, + ) -> Task> { + unreachable!() + } + + fn reload(&mut self, project: Model, cx: &mut ViewContext) -> Task> { + self.editor.reload(project, cx) + } + + fn act_as_type<'a>( + &'a self, + type_id: TypeId, + self_handle: &'a View, + _: &'a AppContext, + ) -> Option { + if type_id == TypeId::of::() { + Some(self_handle.to_any()) + } else if type_id == TypeId::of::() { + Some(self.editor.to_any()) + } else { + None + } + } + + fn breadcrumb_location(&self) -> ToolbarItemLocation { + ToolbarItemLocation::PrimaryLeft { flex: None } + } + + fn breadcrumbs(&self, theme: &theme::Theme, cx: &AppContext) -> Option> { + self.editor.breadcrumbs(theme, cx) + } + + fn added_to_workspace(&mut self, workspace: &mut Workspace, cx: &mut ViewContext) { + self.editor + .update(cx, |editor, cx| editor.added_to_workspace(workspace, cx)); + } + + fn serialized_item_kind() -> Option<&'static str> { + Some("diagnostics") + } + + fn deserialize( + project: Model, + workspace: WeakView, + _workspace_id: workspace::WorkspaceId, + _item_id: workspace::ItemId, + cx: &mut ViewContext, + ) -> Task>> { + Task::ready(Ok(cx.build_view(|cx| Self::new(project, workspace, cx)))) + } +} + +fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock { + let (message, highlights) = highlight_diagnostic_message(Vec::new(), &diagnostic.message); + Arc::new(move |cx| { + let settings = ThemeSettings::get_global(cx); + div().render() + // let font_size = (style.text_scale_factor * settings.buffer_font_size(cx)).round(); + // let icon = if diagnostic.severity == DiagnosticSeverity::ERROR { + // Svg::new("icons/error.svg").with_color(theme.error_diagnostic.message.text.color) + // } else { + // Svg::new("icons/warning.svg").with_color(theme.warning_diagnostic.message.text.color) + // }; + + // Flex::row() + // .with_child( + // icon.constrained() + // .with_width(icon_width) + // .aligned() + // .contained() + // .with_margin_right(cx.gutter_padding), + // ) + // .with_children(diagnostic.source.as_ref().map(|source| { + // Label::new( + // format!("{source}: "), + // style.source.label.clone().with_font_size(font_size), + // ) + // .contained() + // .with_style(style.message.container) + // .aligned() + // })) + // .with_child( + // Label::new( + // message.clone(), + // style.message.label.clone().with_font_size(font_size), + // ) + // .with_highlights(highlights.clone()) + // .contained() + // .with_style(style.message.container) + // .aligned(), + // ) + // .with_children(diagnostic.code.clone().map(|code| { + // Label::new(code, style.code.text.clone().with_font_size(font_size)) + // .contained() + // .with_style(style.code.container) + // .aligned() + // })) + // .contained() + // .with_style(style.container) + // .with_padding_left(cx.gutter_padding) + // .with_padding_right(cx.gutter_padding) + // .expanded() + // .into_any_named("diagnostic header") + }) +} + +pub(crate) fn render_summary(summary: &DiagnosticSummary) -> AnyElement { + if summary.error_count == 0 && summary.warning_count == 0 { + Label::new("No problems").render() + } else { + div() + .bg(gpui::red()) + .child(Label::new("TODO Show warnings/errors")) + .render() + // Flex::row() + // .with_child( + // Svg::new("icons/error.svg") + // .with_color(text_style.color) + // .constrained() + // .with_width(icon_width) + // .aligned() + // .contained() + // .with_margin_right(icon_spacing), + // ) + // .with_child( + // Label::new( + // summary.error_count.to_string(), + // LabelStyle { + // text: text_style.clone(), + // highlight_text: None, + // }, + // ) + // .aligned(), + // ) + // .with_child( + // Svg::new("icons/warning.svg") + // .with_color(text_style.color) + // .constrained() + // .with_width(icon_width) + // .aligned() + // .contained() + // .with_margin_left(summary_spacing) + // .with_margin_right(icon_spacing), + // ) + // .with_child( + // Label::new( + // summary.warning_count.to_string(), + // LabelStyle { + // text: text_style.clone(), + // highlight_text: None, + // }, + // ) + // .aligned(), + // ) + // .into_any() + } +} + +fn compare_diagnostics( + lhs: &DiagnosticEntry, + rhs: &DiagnosticEntry, + snapshot: &language::BufferSnapshot, +) -> Ordering { + lhs.range + .start + .to_offset(snapshot) + .cmp(&rhs.range.start.to_offset(snapshot)) + .then_with(|| { + lhs.range + .end + .to_offset(snapshot) + .cmp(&rhs.range.end.to_offset(snapshot)) + }) + .then_with(|| lhs.diagnostic.message.cmp(&rhs.diagnostic.message)) +} + +#[cfg(test)] +mod tests { + use super::*; + use editor::{ + display_map::{BlockContext, TransformBlock}, + DisplayPoint, + }; + use gpui::{px, TestAppContext, WindowContext}; + use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, PointUtf16, Unclipped}; + use project::FakeFs; + use serde_json::json; + use settings::SettingsStore; + use unindent::Unindent as _; + + #[gpui::test] + async fn test_diagnostics(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/test", + json!({ + "consts.rs": " + const a: i32 = 'a'; + const b: i32 = c; + " + .unindent(), + + "main.rs": " + fn main() { + let x = vec![]; + let y = vec![]; + a(x); + b(y); + // comment 1 + // comment 2 + c(y); + d(x); + } + " + .unindent(), + }), + ) + .await; + + let language_server_id = LanguageServerId(0); + let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await; + let window = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let workspace = window.root(cx); + + // Create some diagnostics + project.update(cx, |project, cx| { + project + .update_diagnostic_entries( + language_server_id, + PathBuf::from("/test/main.rs"), + None, + vec![ + DiagnosticEntry { + range: Unclipped(PointUtf16::new(1, 8))..Unclipped(PointUtf16::new(1, 9)), + diagnostic: Diagnostic { + message: + "move occurs because `x` has type `Vec`, which does not implement the `Copy` trait" + .to_string(), + severity: DiagnosticSeverity::INFORMATION, + is_primary: false, + is_disk_based: true, + group_id: 1, + ..Default::default() + }, + }, + DiagnosticEntry { + range: Unclipped(PointUtf16::new(2, 8))..Unclipped(PointUtf16::new(2, 9)), + diagnostic: Diagnostic { + message: + "move occurs because `y` has type `Vec`, which does not implement the `Copy` trait" + .to_string(), + severity: DiagnosticSeverity::INFORMATION, + is_primary: false, + is_disk_based: true, + group_id: 0, + ..Default::default() + }, + }, + DiagnosticEntry { + range: Unclipped(PointUtf16::new(3, 6))..Unclipped(PointUtf16::new(3, 7)), + diagnostic: Diagnostic { + message: "value moved here".to_string(), + severity: DiagnosticSeverity::INFORMATION, + is_primary: false, + is_disk_based: true, + group_id: 1, + ..Default::default() + }, + }, + DiagnosticEntry { + range: Unclipped(PointUtf16::new(4, 6))..Unclipped(PointUtf16::new(4, 7)), + diagnostic: Diagnostic { + message: "value moved here".to_string(), + severity: DiagnosticSeverity::INFORMATION, + is_primary: false, + is_disk_based: true, + group_id: 0, + ..Default::default() + }, + }, + DiagnosticEntry { + range: Unclipped(PointUtf16::new(7, 6))..Unclipped(PointUtf16::new(7, 7)), + diagnostic: Diagnostic { + message: "use of moved value\nvalue used here after move".to_string(), + severity: DiagnosticSeverity::ERROR, + is_primary: true, + is_disk_based: true, + group_id: 0, + ..Default::default() + }, + }, + DiagnosticEntry { + range: Unclipped(PointUtf16::new(8, 6))..Unclipped(PointUtf16::new(8, 7)), + diagnostic: Diagnostic { + message: "use of moved value\nvalue used here after move".to_string(), + severity: DiagnosticSeverity::ERROR, + is_primary: true, + is_disk_based: true, + group_id: 1, + ..Default::default() + }, + }, + ], + cx, + ) + .unwrap(); + }); + + // Open the project diagnostics view while there are already diagnostics. + let view = window.build_view(cx, |cx| { + ProjectDiagnosticsEditor::new(project.clone(), workspace.downgrade(), cx) + }); + + view.next_notification(cx).await; + view.update(cx, |view, cx| { + assert_eq!( + editor_blocks(&view.editor, cx), + [ + (0, "path header block".into()), + (2, "diagnostic header".into()), + (15, "collapsed context".into()), + (16, "diagnostic header".into()), + (25, "collapsed context".into()), + ] + ); + assert_eq!( + view.editor.update(cx, |editor, cx| editor.display_text(cx)), + concat!( + // + // main.rs + // + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + " let x = vec![];\n", + " let y = vec![];\n", + "\n", // supporting diagnostic + " a(x);\n", + " b(y);\n", + "\n", // supporting diagnostic + " // comment 1\n", + " // comment 2\n", + " c(y);\n", + "\n", // supporting diagnostic + " d(x);\n", + "\n", // context ellipsis + // diagnostic group 2 + "\n", // primary message + "\n", // padding + "fn main() {\n", + " let x = vec![];\n", + "\n", // supporting diagnostic + " let y = vec![];\n", + " a(x);\n", + "\n", // supporting diagnostic + " b(y);\n", + "\n", // context ellipsis + " c(y);\n", + " d(x);\n", + "\n", // supporting diagnostic + "}" + ) + ); + + // Cursor is at the first diagnostic + view.editor.update(cx, |editor, cx| { + assert_eq!( + editor.selections.display_ranges(cx), + [DisplayPoint::new(12, 6)..DisplayPoint::new(12, 6)] + ); + }); + }); + + // Diagnostics are added for another earlier path. + project.update(cx, |project, cx| { + project.disk_based_diagnostics_started(language_server_id, cx); + project + .update_diagnostic_entries( + language_server_id, + PathBuf::from("/test/consts.rs"), + None, + vec![DiagnosticEntry { + range: Unclipped(PointUtf16::new(0, 15))..Unclipped(PointUtf16::new(0, 15)), + diagnostic: Diagnostic { + message: "mismatched types\nexpected `usize`, found `char`".to_string(), + severity: DiagnosticSeverity::ERROR, + is_primary: true, + is_disk_based: true, + group_id: 0, + ..Default::default() + }, + }], + cx, + ) + .unwrap(); + project.disk_based_diagnostics_finished(language_server_id, cx); + }); + + view.next_notification(cx).await; + view.update(cx, |view, cx| { + assert_eq!( + editor_blocks(&view.editor, cx), + [ + (0, "path header block".into()), + (2, "diagnostic header".into()), + (7, "path header block".into()), + (9, "diagnostic header".into()), + (22, "collapsed context".into()), + (23, "diagnostic header".into()), + (32, "collapsed context".into()), + ] + ); + assert_eq!( + view.editor.update(cx, |editor, cx| editor.display_text(cx)), + concat!( + // + // consts.rs + // + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + "const a: i32 = 'a';\n", + "\n", // supporting diagnostic + "const b: i32 = c;\n", + // + // main.rs + // + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + " let x = vec![];\n", + " let y = vec![];\n", + "\n", // supporting diagnostic + " a(x);\n", + " b(y);\n", + "\n", // supporting diagnostic + " // comment 1\n", + " // comment 2\n", + " c(y);\n", + "\n", // supporting diagnostic + " d(x);\n", + "\n", // collapsed context + // diagnostic group 2 + "\n", // primary message + "\n", // filename + "fn main() {\n", + " let x = vec![];\n", + "\n", // supporting diagnostic + " let y = vec![];\n", + " a(x);\n", + "\n", // supporting diagnostic + " b(y);\n", + "\n", // context ellipsis + " c(y);\n", + " d(x);\n", + "\n", // supporting diagnostic + "}" + ) + ); + + // Cursor keeps its position. + view.editor.update(cx, |editor, cx| { + assert_eq!( + editor.selections.display_ranges(cx), + [DisplayPoint::new(19, 6)..DisplayPoint::new(19, 6)] + ); + }); + }); + + // Diagnostics are added to the first path + project.update(cx, |project, cx| { + project.disk_based_diagnostics_started(language_server_id, cx); + project + .update_diagnostic_entries( + language_server_id, + PathBuf::from("/test/consts.rs"), + None, + vec![ + DiagnosticEntry { + range: Unclipped(PointUtf16::new(0, 15)) + ..Unclipped(PointUtf16::new(0, 15)), + diagnostic: Diagnostic { + message: "mismatched types\nexpected `usize`, found `char`" + .to_string(), + severity: DiagnosticSeverity::ERROR, + is_primary: true, + is_disk_based: true, + group_id: 0, + ..Default::default() + }, + }, + DiagnosticEntry { + range: Unclipped(PointUtf16::new(1, 15)) + ..Unclipped(PointUtf16::new(1, 15)), + diagnostic: Diagnostic { + message: "unresolved name `c`".to_string(), + severity: DiagnosticSeverity::ERROR, + is_primary: true, + is_disk_based: true, + group_id: 1, + ..Default::default() + }, + }, + ], + cx, + ) + .unwrap(); + project.disk_based_diagnostics_finished(language_server_id, cx); + }); + + view.next_notification(cx).await; + view.update(cx, |view, cx| { + assert_eq!( + editor_blocks(&view.editor, cx), + [ + (0, "path header block".into()), + (2, "diagnostic header".into()), + (7, "collapsed context".into()), + (8, "diagnostic header".into()), + (13, "path header block".into()), + (15, "diagnostic header".into()), + (28, "collapsed context".into()), + (29, "diagnostic header".into()), + (38, "collapsed context".into()), + ] + ); + assert_eq!( + view.editor.update(cx, |editor, cx| editor.display_text(cx)), + concat!( + // + // consts.rs + // + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + "const a: i32 = 'a';\n", + "\n", // supporting diagnostic + "const b: i32 = c;\n", + "\n", // context ellipsis + // diagnostic group 2 + "\n", // primary message + "\n", // padding + "const a: i32 = 'a';\n", + "const b: i32 = c;\n", + "\n", // supporting diagnostic + // + // main.rs + // + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + " let x = vec![];\n", + " let y = vec![];\n", + "\n", // supporting diagnostic + " a(x);\n", + " b(y);\n", + "\n", // supporting diagnostic + " // comment 1\n", + " // comment 2\n", + " c(y);\n", + "\n", // supporting diagnostic + " d(x);\n", + "\n", // context ellipsis + // diagnostic group 2 + "\n", // primary message + "\n", // filename + "fn main() {\n", + " let x = vec![];\n", + "\n", // supporting diagnostic + " let y = vec![];\n", + " a(x);\n", + "\n", // supporting diagnostic + " b(y);\n", + "\n", // context ellipsis + " c(y);\n", + " d(x);\n", + "\n", // supporting diagnostic + "}" + ) + ); + }); + } + + #[gpui::test] + async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/test", + json!({ + "main.js": " + a(); + b(); + c(); + d(); + e(); + ".unindent() + }), + ) + .await; + + let server_id_1 = LanguageServerId(100); + let server_id_2 = LanguageServerId(101); + let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await; + let window = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + let workspace = window.root(cx); + + let view = window.build_view(cx, |cx| { + ProjectDiagnosticsEditor::new(project.clone(), workspace.downgrade(), cx) + }); + + // Two language servers start updating diagnostics + project.update(cx, |project, cx| { + project.disk_based_diagnostics_started(server_id_1, cx); + project.disk_based_diagnostics_started(server_id_2, cx); + project + .update_diagnostic_entries( + server_id_1, + PathBuf::from("/test/main.js"), + None, + vec![DiagnosticEntry { + range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 1)), + diagnostic: Diagnostic { + message: "error 1".to_string(), + severity: DiagnosticSeverity::WARNING, + is_primary: true, + is_disk_based: true, + group_id: 1, + ..Default::default() + }, + }], + cx, + ) + .unwrap(); + }); + + // The first language server finishes + project.update(cx, |project, cx| { + project.disk_based_diagnostics_finished(server_id_1, cx); + }); + + // Only the first language server's diagnostics are shown. + cx.executor().run_until_parked(); + view.update(cx, |view, cx| { + assert_eq!( + editor_blocks(&view.editor, cx), + [ + (0, "path header block".into()), + (2, "diagnostic header".into()), + ] + ); + assert_eq!( + view.editor.update(cx, |editor, cx| editor.display_text(cx)), + concat!( + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + "a();\n", // + "b();", + ) + ); + }); + + // The second language server finishes + project.update(cx, |project, cx| { + project + .update_diagnostic_entries( + server_id_2, + PathBuf::from("/test/main.js"), + None, + vec![DiagnosticEntry { + range: Unclipped(PointUtf16::new(1, 0))..Unclipped(PointUtf16::new(1, 1)), + diagnostic: Diagnostic { + message: "warning 1".to_string(), + severity: DiagnosticSeverity::ERROR, + is_primary: true, + is_disk_based: true, + group_id: 2, + ..Default::default() + }, + }], + cx, + ) + .unwrap(); + project.disk_based_diagnostics_finished(server_id_2, cx); + }); + + // Both language server's diagnostics are shown. + cx.executor().run_until_parked(); + view.update(cx, |view, cx| { + assert_eq!( + editor_blocks(&view.editor, cx), + [ + (0, "path header block".into()), + (2, "diagnostic header".into()), + (6, "collapsed context".into()), + (7, "diagnostic header".into()), + ] + ); + assert_eq!( + view.editor.update(cx, |editor, cx| editor.display_text(cx)), + concat!( + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + "a();\n", // location + "b();\n", // + "\n", // collapsed context + // diagnostic group 2 + "\n", // primary message + "\n", // padding + "a();\n", // context + "b();\n", // + "c();", // context + ) + ); + }); + + // Both language servers start updating diagnostics, and the first server finishes. + project.update(cx, |project, cx| { + project.disk_based_diagnostics_started(server_id_1, cx); + project.disk_based_diagnostics_started(server_id_2, cx); + project + .update_diagnostic_entries( + server_id_1, + PathBuf::from("/test/main.js"), + None, + vec![DiagnosticEntry { + range: Unclipped(PointUtf16::new(2, 0))..Unclipped(PointUtf16::new(2, 1)), + diagnostic: Diagnostic { + message: "warning 2".to_string(), + severity: DiagnosticSeverity::WARNING, + is_primary: true, + is_disk_based: true, + group_id: 1, + ..Default::default() + }, + }], + cx, + ) + .unwrap(); + project + .update_diagnostic_entries( + server_id_2, + PathBuf::from("/test/main.rs"), + None, + vec![], + cx, + ) + .unwrap(); + project.disk_based_diagnostics_finished(server_id_1, cx); + }); + + // Only the first language server's diagnostics are updated. + cx.executor().run_until_parked(); + view.update(cx, |view, cx| { + assert_eq!( + editor_blocks(&view.editor, cx), + [ + (0, "path header block".into()), + (2, "diagnostic header".into()), + (7, "collapsed context".into()), + (8, "diagnostic header".into()), + ] + ); + assert_eq!( + view.editor.update(cx, |editor, cx| editor.display_text(cx)), + concat!( + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + "a();\n", // location + "b();\n", // + "c();\n", // context + "\n", // collapsed context + // diagnostic group 2 + "\n", // primary message + "\n", // padding + "b();\n", // context + "c();\n", // + "d();", // context + ) + ); + }); + + // The second language server finishes. + project.update(cx, |project, cx| { + project + .update_diagnostic_entries( + server_id_2, + PathBuf::from("/test/main.js"), + None, + vec![DiagnosticEntry { + range: Unclipped(PointUtf16::new(3, 0))..Unclipped(PointUtf16::new(3, 1)), + diagnostic: Diagnostic { + message: "warning 2".to_string(), + severity: DiagnosticSeverity::WARNING, + is_primary: true, + is_disk_based: true, + group_id: 1, + ..Default::default() + }, + }], + cx, + ) + .unwrap(); + project.disk_based_diagnostics_finished(server_id_2, cx); + }); + + // Both language servers' diagnostics are updated. + cx.executor().run_until_parked(); + view.update(cx, |view, cx| { + assert_eq!( + editor_blocks(&view.editor, cx), + [ + (0, "path header block".into()), + (2, "diagnostic header".into()), + (7, "collapsed context".into()), + (8, "diagnostic header".into()), + ] + ); + assert_eq!( + view.editor.update(cx, |editor, cx| editor.display_text(cx)), + concat!( + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + "b();\n", // location + "c();\n", // + "d();\n", // context + "\n", // collapsed context + // diagnostic group 2 + "\n", // primary message + "\n", // padding + "c();\n", // context + "d();\n", // + "e();", // context + ) + ); + }); + } + + fn init_test(cx: &mut TestAppContext) { + cx.update(|cx| { + cx.set_global(SettingsStore::test(cx)); + theme::init(cx); + language::init(cx); + client::init_settings(cx); + workspace::init_settings(cx); + Project::init_settings(cx); + crate::init(cx); + }); + } + + fn editor_blocks(editor: &View, cx: &mut WindowContext) -> Vec<(u32, SharedString)> { + editor.update(cx, |editor, cx| { + let snapshot = editor.snapshot(cx); + snapshot + .blocks_in_range(0..snapshot.max_point().row()) + .enumerate() + .filter_map(|(ix, (row, block))| { + let name = match block { + TransformBlock::Custom(block) => block + .render(&mut BlockContext { + view_context: cx, + anchor_x: px(0.), + gutter_padding: px(0.), + gutter_width: px(0.), + line_height: px(0.), + em_width: px(0.), + block_id: ix, + }) + .element_id()? + .try_into() + .expect("All blocks must have string ID"), + + TransformBlock::ExcerptHeader { + starts_new_buffer, .. + } => { + if *starts_new_buffer { + "path header block".into() + } else { + "collapsed context".into() + } + } + }; + + Some((row, name)) + }) + .collect() + }) + } +} diff --git a/crates/diagnostics2/src/items.rs b/crates/diagnostics2/src/items.rs new file mode 100644 index 0000000000000000000000000000000000000000..23532673037934dc8d67e6274b2e969738f0622e --- /dev/null +++ b/crates/diagnostics2/src/items.rs @@ -0,0 +1,251 @@ +use collections::HashSet; +use editor::{Editor, GoToDiagnostic}; +use gpui::{ + div, serde_json, AppContext, CursorStyle, Div, Entity, EventEmitter, MouseButton, Render, + Styled, Subscription, View, ViewContext, WeakView, +}; +use language::Diagnostic; +use lsp::LanguageServerId; +use workspace::{item::ItemHandle, StatusItemView, ToolbarItemEvent, Workspace}; + +use crate::ProjectDiagnosticsEditor; + +// todo!() +// pub fn init(cx: &mut AppContext) { +// cx.add_action(DiagnosticIndicator::go_to_next_diagnostic); +// } + +pub struct DiagnosticIndicator { + summary: project::DiagnosticSummary, + active_editor: Option>, + workspace: WeakView, + current_diagnostic: Option, + in_progress_checks: HashSet, + _observe_active_editor: Option, +} + +impl Render for DiagnosticIndicator { + type Element = Div; + + fn render(&mut self, cx: &mut ViewContext) -> Self::Element { + div().size_full().bg(gpui::red()) + } +} + +impl DiagnosticIndicator { + pub fn new(workspace: &Workspace, cx: &mut ViewContext) -> Self { + let project = workspace.project(); + cx.subscribe(project, |this, project, event, cx| match event { + project::Event::DiskBasedDiagnosticsStarted { language_server_id } => { + this.in_progress_checks.insert(*language_server_id); + cx.notify(); + } + project::Event::DiskBasedDiagnosticsFinished { language_server_id } + | project::Event::LanguageServerRemoved(language_server_id) => { + this.summary = project.read(cx).diagnostic_summary(cx); + this.in_progress_checks.remove(language_server_id); + cx.notify(); + } + project::Event::DiagnosticsUpdated { .. } => { + this.summary = project.read(cx).diagnostic_summary(cx); + cx.notify(); + } + _ => {} + }) + .detach(); + Self { + summary: project.read(cx).diagnostic_summary(cx), + in_progress_checks: project + .read(cx) + .language_servers_running_disk_based_diagnostics() + .collect(), + active_editor: None, + workspace: workspace.weak_handle(), + current_diagnostic: None, + _observe_active_editor: None, + } + } + + fn go_to_next_diagnostic(&mut self, _: &GoToDiagnostic, cx: &mut ViewContext) { + if let Some(editor) = self.active_editor.as_ref().and_then(|e| e.upgrade()) { + editor.update(cx, |editor, cx| { + editor.go_to_diagnostic_impl(editor::Direction::Next, cx); + }) + } + } + + fn update(&mut self, editor: View, cx: &mut ViewContext) { + let editor = editor.read(cx); + let buffer = editor.buffer().read(cx); + let cursor_position = editor.selections.newest::(cx).head(); + let new_diagnostic = buffer + .snapshot(cx) + .diagnostics_in_range::<_, usize>(cursor_position..cursor_position, false) + .filter(|entry| !entry.range.is_empty()) + .min_by_key(|entry| (entry.diagnostic.severity, entry.range.len())) + .map(|entry| entry.diagnostic); + if new_diagnostic != self.current_diagnostic { + self.current_diagnostic = new_diagnostic; + cx.notify(); + } + } +} + +// todo: is this nessesary anymore? +impl EventEmitter for DiagnosticIndicator {} + +// impl View for DiagnosticIndicator { +// fn ui_name() -> &'static str { +// "DiagnosticIndicator" +// } + +// fn render(&mut self, cx: &mut ViewContext) -> AnyElement { +// enum Summary {} +// enum Message {} + +// let tooltip_style = theme::current(cx).tooltip.clone(); +// let in_progress = !self.in_progress_checks.is_empty(); +// let mut element = Flex::row().with_child( +// MouseEventHandler::new::(0, cx, |state, cx| { +// let theme = theme::current(cx); +// let style = theme +// .workspace +// .status_bar +// .diagnostic_summary +// .style_for(state); + +// let mut summary_row = Flex::row(); +// if self.summary.error_count > 0 { +// summary_row.add_child( +// Svg::new("icons/error.svg") +// .with_color(style.icon_color_error) +// .constrained() +// .with_width(style.icon_width) +// .aligned() +// .contained() +// .with_margin_right(style.icon_spacing), +// ); +// summary_row.add_child( +// Label::new(self.summary.error_count.to_string(), style.text.clone()) +// .aligned(), +// ); +// } + +// if self.summary.warning_count > 0 { +// summary_row.add_child( +// Svg::new("icons/warning.svg") +// .with_color(style.icon_color_warning) +// .constrained() +// .with_width(style.icon_width) +// .aligned() +// .contained() +// .with_margin_right(style.icon_spacing) +// .with_margin_left(if self.summary.error_count > 0 { +// style.summary_spacing +// } else { +// 0. +// }), +// ); +// summary_row.add_child( +// Label::new(self.summary.warning_count.to_string(), style.text.clone()) +// .aligned(), +// ); +// } + +// if self.summary.error_count == 0 && self.summary.warning_count == 0 { +// summary_row.add_child( +// Svg::new("icons/check_circle.svg") +// .with_color(style.icon_color_ok) +// .constrained() +// .with_width(style.icon_width) +// .aligned() +// .into_any_named("ok-icon"), +// ); +// } + +// summary_row +// .constrained() +// .with_height(style.height) +// .contained() +// .with_style(if self.summary.error_count > 0 { +// style.container_error +// } else if self.summary.warning_count > 0 { +// style.container_warning +// } else { +// style.container_ok +// }) +// }) +// .with_cursor_style(CursorStyle::PointingHand) +// .on_click(MouseButton::Left, |_, this, cx| { +// if let Some(workspace) = this.workspace.upgrade(cx) { +// workspace.update(cx, |workspace, cx| { +// ProjectDiagnosticsEditor::deploy(workspace, &Default::default(), cx) +// }) +// } +// }) +// .with_tooltip::( +// 0, +// "Project Diagnostics", +// Some(Box::new(crate::Deploy)), +// tooltip_style, +// cx, +// ) +// .aligned() +// .into_any(), +// ); + +// let style = &theme::current(cx).workspace.status_bar; +// let item_spacing = style.item_spacing; + +// if in_progress { +// element.add_child( +// Label::new("Checking…", style.diagnostic_message.default.text.clone()) +// .aligned() +// .contained() +// .with_margin_left(item_spacing), +// ); +// } else if let Some(diagnostic) = &self.current_diagnostic { +// let message_style = style.diagnostic_message.clone(); +// element.add_child( +// MouseEventHandler::new::(1, cx, |state, _| { +// Label::new( +// diagnostic.message.split('\n').next().unwrap().to_string(), +// message_style.style_for(state).text.clone(), +// ) +// .aligned() +// .contained() +// .with_margin_left(item_spacing) +// }) +// .with_cursor_style(CursorStyle::PointingHand) +// .on_click(MouseButton::Left, |_, this, cx| { +// this.go_to_next_diagnostic(&Default::default(), cx) +// }), +// ); +// } + +// element.into_any_named("diagnostic indicator") +// } + +// fn debug_json(&self, _: &gpui::AppContext) -> serde_json::Value { +// serde_json::json!({ "summary": self.summary }) +// } +// } + +impl StatusItemView for DiagnosticIndicator { + fn set_active_pane_item( + &mut self, + active_pane_item: Option<&dyn ItemHandle>, + cx: &mut ViewContext, + ) { + if let Some(editor) = active_pane_item.and_then(|item| item.downcast::()) { + self.active_editor = Some(editor.downgrade()); + self._observe_active_editor = Some(cx.observe(&editor, Self::update)); + self.update(editor, cx); + } else { + self.active_editor = None; + self.current_diagnostic = None; + self._observe_active_editor = None; + } + cx.notify(); + } +} diff --git a/crates/diagnostics2/src/project_diagnostics_settings.rs b/crates/diagnostics2/src/project_diagnostics_settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..f762d2b1e626f8be1394ccecf2b2d683ca7ea437 --- /dev/null +++ b/crates/diagnostics2/src/project_diagnostics_settings.rs @@ -0,0 +1,28 @@ +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +#[derive(Deserialize, Debug)] +pub struct ProjectDiagnosticsSettings { + pub include_warnings: bool, +} + +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +pub struct ProjectDiagnosticsSettingsContent { + include_warnings: Option, +} + +impl settings::Settings for ProjectDiagnosticsSettings { + const KEY: Option<&'static str> = Some("diagnostics"); + type FileContent = ProjectDiagnosticsSettingsContent; + + fn load( + default_value: &Self::FileContent, + user_values: &[&Self::FileContent], + _cx: &mut gpui::AppContext, + ) -> anyhow::Result + where + Self: Sized, + { + Self::load_via_json_merge(default_value, user_values) + } +} diff --git a/crates/diagnostics2/src/toolbar_controls.rs b/crates/diagnostics2/src/toolbar_controls.rs new file mode 100644 index 0000000000000000000000000000000000000000..e60ded4016302518f43448270c080af26e7498fb --- /dev/null +++ b/crates/diagnostics2/src/toolbar_controls.rs @@ -0,0 +1,123 @@ +use crate::{ProjectDiagnosticsEditor, ToggleWarnings}; +use gpui::{ + div, Action, CursorStyle, Div, Entity, EventEmitter, MouseButton, ParentComponent, Render, + View, ViewContext, WeakView, +}; +use ui::{Icon, IconButton, StyledExt}; +use workspace::{item::ItemHandle, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView}; + +pub struct ToolbarControls { + editor: Option>, +} + +impl Render for ToolbarControls { + type Element = Div; + + fn render(&mut self, cx: &mut ViewContext) -> Self::Element { + div() + .h_flex() + .child(IconButton::new("toggle-warnings", Icon::Warning).on_click(|view, cx| todo!())) + } +} + +impl EventEmitter for ToolbarControls {} + +// impl View for ToolbarControls { +// fn ui_name() -> &'static str { +// "ToolbarControls" +// } + +// fn render(&mut self, cx: &mut ViewContext) -> AnyElement { +// let include_warnings = self +// .editor +// .as_ref() +// .and_then(|editor| editor.upgrade(cx)) +// .map(|editor| editor.read(cx).include_warnings) +// .unwrap_or(false); +// let tooltip = if include_warnings { +// "Exclude Warnings".into() +// } else { +// "Include Warnings".into() +// }; +// Flex::row() +// .with_child(render_toggle_button( +// 0, +// "icons/warning.svg", +// include_warnings, +// (tooltip, Some(Box::new(ToggleWarnings))), +// cx, +// move |this, cx| { +// if let Some(editor) = this.editor.and_then(|editor| editor.upgrade(cx)) { +// editor.update(cx, |editor, cx| { +// editor.toggle_warnings(&Default::default(), cx) +// }); +// } +// }, +// )) +// .into_any() +// } +// } + +impl ToolbarItemView for ToolbarControls { + fn set_active_pane_item( + &mut self, + active_pane_item: Option<&dyn ItemHandle>, + _: &mut ViewContext, + ) -> ToolbarItemLocation { + if let Some(pane_item) = active_pane_item.as_ref() { + if let Some(editor) = pane_item.downcast::() { + self.editor = Some(editor.downgrade()); + ToolbarItemLocation::PrimaryRight { flex: None } + } else { + ToolbarItemLocation::Hidden + } + } else { + ToolbarItemLocation::Hidden + } + } +} + +impl ToolbarControls { + pub fn new() -> Self { + ToolbarControls { editor: None } + } +} + +// fn render_toggle_button< +// F: 'static + Fn(&mut ToolbarControls, &mut EventContext), +// >( +// index: usize, +// icon: &'static str, +// toggled: bool, +// tooltip: (String, Option>), +// cx: &mut ViewContext, +// on_click: F, +// ) -> AnyElement { +// enum Button {} + +// let theme = theme::current(cx); +// let (tooltip_text, action) = tooltip; + +// MouseEventHandler::new::(index, cx, |mouse_state, _| { +// let style = theme +// .workspace +// .toolbar +// .toggleable_tool +// .in_state(toggled) +// .style_for(mouse_state); +// Svg::new(icon) +// .with_color(style.color) +// .constrained() +// .with_width(style.icon_width) +// .aligned() +// .constrained() +// .with_width(style.button_width) +// .with_height(style.button_width) +// .contained() +// .with_style(style.container) +// }) +// .with_cursor_style(CursorStyle::PointingHand) +// .on_click(MouseButton::Left, move |_, view, cx| on_click(view, cx)) +// .with_tooltip::