Merge branch 'main' into derive-element-redux

Conrad Irwin created

Change summary

.github/actions/check_formatting/action.yml                    |   15 
.github/actions/run_tests/action.yml                           |   34 
.github/workflows/ci.yml                                       |   37 
.github/workflows/release_nightly.yml                          |   98 
Cargo.lock                                                     |   82 
Cargo.toml                                                     |    2 
assets/icons/warning.svg                                       |    7 
assets/settings/default.json                                   |   13 
crates/assistant/src/assistant_panel.rs                        |   14 
crates/auto_update/src/auto_update.rs                          |   39 
crates/auto_update2/Cargo.toml                                 |   29 
crates/auto_update2/src/auto_update.rs                         |  406 
crates/auto_update2/src/update_notification.rs                 |   87 
crates/call/src/call.rs                                        |   23 
crates/call2/src/call2.rs                                      |   23 
crates/client/Cargo.toml                                       |    1 
crates/client/src/client.rs                                    |   24 
crates/client/src/telemetry.rs                                 |  152 
crates/client2/Cargo.toml                                      |    1 
crates/client2/src/client2.rs                                  |   22 
crates/client2/src/telemetry.rs                                |  152 
crates/collab/src/tests/integration_tests.rs                   |    2 
crates/collab/src/tests/random_project_collaboration_tests.rs  |    3 
crates/collab2/src/tests/integration_tests.rs                  |    2 
crates/collab2/src/tests/random_project_collaboration_tests.rs |    3 
crates/collab_ui/src/chat_panel/message_editor.rs              |   10 
crates/collab_ui2/src/chat_panel/message_editor.rs             |   10 
crates/command_palette2/src/command_palette.rs                 |   12 
crates/diagnostics2/Cargo.toml                                 |   43 
crates/diagnostics2/src/diagnostics.rs                         | 1572 +
crates/diagnostics2/src/items.rs                               |  151 
crates/diagnostics2/src/project_diagnostics_settings.rs        |   28 
crates/diagnostics2/src/toolbar_controls.rs                    |   66 
crates/editor/src/editor.rs                                    |   14 
crates/editor2/src/editor.rs                                   |   66 
crates/editor2/src/editor_tests.rs                             |   10 
crates/editor2/src/element.rs                                  |    4 
crates/editor2/src/items.rs                                    |   19 
crates/editor2/src/scroll.rs                                   |    6 
crates/editor2/src/test/editor_test_context.rs                 |    3 
crates/file_finder2/src/file_finder.rs                         |   15 
crates/go_to_line2/src/go_to_line.rs                           |   20 
crates/gpui2/Cargo.toml                                        |    2 
crates/gpui2/src/app.rs                                        |    4 
crates/gpui2/src/app/async_context.rs                          |   11 
crates/gpui2/src/app/entity_map.rs                             |   11 
crates/gpui2/src/app/test_context.rs                           |   37 
crates/gpui2/src/elements/div.rs                               |    5 
crates/gpui2/src/elements/img.rs                               |    4 
crates/gpui2/src/elements/text.rs                              |   30 
crates/gpui2/src/gpui2.rs                                      |    4 
crates/gpui2/src/image_cache.rs                                |   10 
crates/gpui2/src/platform/mac/window.rs                        |    5 
crates/gpui2/src/taffy.rs                                      |   46 
crates/gpui2/src/view.rs                                       |   16 
crates/gpui2/src/window.rs                                     |   58 
crates/journal2/Cargo.toml                                     |    4 
crates/picker2/src/picker2.rs                                  |    4 
crates/project/src/ignore.rs                                   |    4 
crates/project/src/project.rs                                  |   32 
crates/project/src/project_settings.rs                         |    2 
crates/project/src/project_tests.rs                            |   16 
crates/project/src/search.rs                                   |   20 
crates/project/src/worktree.rs                                 |  346 
crates/project/src/worktree_tests.rs                           |  172 
crates/project2/src/ignore.rs                                  |    4 
crates/project2/src/project2.rs                                |   32 
crates/project2/src/project_settings.rs                        |    2 
crates/project2/src/project_tests.rs                           |   16 
crates/project2/src/search.rs                                  |   20 
crates/project2/src/worktree.rs                                |  342 
crates/project2/src/worktree_tests.rs                          | 4451 ++-
crates/project_panel/src/project_panel.rs                      |  125 
crates/project_panel2/src/project_panel.rs                     |  159 
crates/rpc/proto/zed.proto                                     |    1 
crates/rpc2/proto/zed.proto                                    |    1 
crates/search/src/buffer_search.rs                             |    2 
crates/search/src/project_search.rs                            |   26 
crates/search/src/search.rs                                    |   24 
crates/settings2/src/settings_file.rs                          |    1 
crates/storybook2/src/stories/text.rs                          |   51 
crates/storybook3/src/storybook3.rs                            |   18 
crates/terminal_view2/src/terminal_view.rs                     |   17 
crates/theme2/src/styles/players.rs                            |    2 
crates/theme2/src/theme2.rs                                    |    2 
crates/ui2/Cargo.toml                                          |    2 
crates/ui2/src/components/context_menu.rs                      |  133 
crates/ui2/src/components/icon.rs                              |   36 
crates/ui2/src/components/keybinding.rs                        |   12 
crates/ui2/src/components/list.rs                              |   62 
crates/ui2/src/lib.rs                                          |    6 
crates/ui2/src/static_data.rs                                  |    8 
crates/util/src/channel.rs                                     |   24 
crates/util/src/paths.rs                                       |   32 
crates/workspace2/src/dock.rs                                  |  183 
crates/workspace2/src/notifications.rs                         |    5 
crates/workspace2/src/pane.rs                                  |   12 
crates/workspace2/src/status_bar.rs                            |    2 
crates/workspace2/src/workspace2.rs                            |   21 
crates/zed/Cargo.toml                                          |   10 
crates/zed/contents/nightly/embedded.provisionprofile          |    0 
crates/zed/src/main.rs                                         |    9 
crates/zed/src/only_instance.rs                                |    2 
crates/zed2/Cargo.toml                                         |   16 
crates/zed2/build.rs                                           |   12 
crates/zed2/contents/nightly/embedded.provisionprofile         |    0 
crates/zed2/src/main.rs                                        |   20 
crates/zed2/src/only_instance.rs                               |    2 
crates/zed2/src/zed2.rs                                        |   29 
script/bump-nightly                                            |   11 
script/bump-zed-minor-versions                                 |    5 
script/bump-zed-patch-version                                  |    5 
script/bundle                                                  |   33 
script/deploy                                                  |    7 
script/deploy-migration                                        |    9 
script/upload-nightly                                          |   37 
script/what-is-deployed                                        |    7 
117 files changed, 7,260 insertions(+), 2,951 deletions(-)

Detailed changes

.github/actions/check_formatting/action.yml 🔗

@@ -0,0 +1,15 @@
+name: 'Check formatting'
+description: 'Checks code formatting use cargo fmt'
+
+runs:
+  using: "composite"
+  steps:
+    - name: Install Rust
+      shell: bash -euxo pipefail {0}
+      run: |
+        rustup set profile minimal
+        rustup update stable
+
+    - name: cargo fmt
+      shell: bash -euxo pipefail {0}
+      run: cargo fmt --all -- --check

.github/actions/run_tests/action.yml 🔗

@@ -0,0 +1,34 @@
+name: "Run tests"
+description: "Runs the tests"
+
+runs:
+  using: "composite"
+  steps:
+    - name: Install Rust
+      shell: bash -euxo pipefail {0}
+      run: |
+        rustup set profile minimal
+        rustup update stable
+        rustup target add wasm32-wasi
+        cargo install cargo-nextest
+
+    - name: Install Node
+      uses: actions/setup-node@v3
+      with:
+        node-version: "18"
+
+    - name: Limit target directory size
+      shell: bash -euxo pipefail {0}
+      run: script/clear-target-dir-if-larger-than 70
+
+    - name: Run check
+      env:
+        RUSTFLAGS: -D warnings
+      shell: bash -euxo pipefail {0}
+      run: cargo check --tests --workspace
+
+    - name: Run tests
+      env:
+        RUSTFLAGS: -D warnings
+      shell: bash -euxo pipefail {0}
+      run: cargo nextest run --workspace --no-fail-fast

.github/workflows/ci.yml 🔗

@@ -23,19 +23,14 @@ jobs:
       - self-hosted
       - test
     steps:
-      - name: Install Rust
-        run: |
-          rustup set profile minimal
-          rustup update stable
-
       - name: Checkout repo
         uses: actions/checkout@v3
         with:
           clean: false
           submodules: "recursive"
 
-      - name: cargo fmt
-        run: cargo fmt --all -- --check
+      - name: Run rustfmt
+        uses: ./.github/actions/check_formatting
 
   tests:
     name: Run tests
@@ -43,35 +38,15 @@ jobs:
       - self-hosted
       - test
     needs: rustfmt
-    env:
-      RUSTFLAGS: -D warnings
     steps:
-      - name: Install Rust
-        run: |
-          rustup set profile minimal
-          rustup update stable
-          rustup target add wasm32-wasi
-          cargo install cargo-nextest
-
-      - name: Install Node
-        uses: actions/setup-node@v3
-        with:
-          node-version: "18"
-
       - name: Checkout repo
         uses: actions/checkout@v3
         with:
           clean: false
           submodules: "recursive"
 
-      - name: Limit target directory size
-        run: script/clear-target-dir-if-larger-than 70
-
-      - name: Run check
-        run: cargo check --workspace
-
       - name: Run tests
-        run: cargo nextest run --workspace --no-fail-fast
+        uses: ./.github/actions/run_tests
 
       - name: Build collab
         run: cargo build -p collab
@@ -130,6 +105,8 @@ jobs:
               expected_tag_name="v${version}";;
             preview)
               expected_tag_name="v${version}-pre";;
+            nightly)
+              expected_tag_name="v${version}-nightly";;
             *)
               echo "can't publish a release on channel ${channel}"
               exit 1;;
@@ -154,7 +131,9 @@ jobs:
 
       - uses: softprops/action-gh-release@v1
         name: Upload app bundle to release
-        if: ${{ env.RELEASE_CHANNEL }}
+        # TODO kb seems that zed.dev relies on GitHub releases for release version tracking.
+        # Find alternatives for `nightly` or just go on with more releases?
+        if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }}
         with:
           draft: true
           prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}

.github/workflows/release_nightly.yml 🔗

@@ -0,0 +1,98 @@
+name: Release Nightly
+
+on:
+  schedule:
+    # Fire every night at 1:00am
+    - cron: "0 1 * * *"
+  push:
+    tags:
+      - "nightly"
+
+env:
+  CARGO_TERM_COLOR: always
+  CARGO_INCREMENTAL: 0
+  RUST_BACKTRACE: 1
+
+jobs:
+  rustfmt:
+    name: Check formatting
+    runs-on:
+      - self-hosted
+      - test
+    steps:
+      - name: Checkout repo
+        uses: actions/checkout@v3
+        with:
+          clean: false
+          submodules: "recursive"
+
+      - name: Run rustfmt
+        uses: ./.github/actions/check_formatting
+
+  tests:
+    name: Run tests
+    runs-on:
+      - self-hosted
+      - test
+    needs: rustfmt
+    steps:
+      - name: Checkout repo
+        uses: actions/checkout@v3
+        with:
+          clean: false
+          submodules: "recursive"
+
+      - name: Run tests
+        uses: ./.github/actions/run_tests
+
+  bundle:
+    name: Bundle app
+    runs-on:
+      - self-hosted
+      - bundle
+    needs: tests
+    env:
+      MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
+      MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
+      APPLE_NOTARIZATION_USERNAME: ${{ secrets.APPLE_NOTARIZATION_USERNAME }}
+      APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }}
+      DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
+      DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
+    steps:
+      - name: Install Rust
+        run: |
+          rustup set profile minimal
+          rustup update stable
+          rustup target add aarch64-apple-darwin
+          rustup target add x86_64-apple-darwin
+          rustup target add wasm32-wasi
+
+      - name: Install Node
+        uses: actions/setup-node@v3
+        with:
+          node-version: "18"
+
+      - name: Checkout repo
+        uses: actions/checkout@v3
+        with:
+          clean: false
+          submodules: "recursive"
+
+      - name: Limit target directory size
+        run: script/clear-target-dir-if-larger-than 70
+
+      - name: Set release channel to nightly
+        run: |
+          set -eu
+          version=$(git rev-parse --short HEAD)
+          echo "Publishing version: ${version} on release channel nightly"
+          echo "nightly" > crates/zed/RELEASE_CHANNEL
+
+      - name: Generate license file
+        run: script/generate-licenses
+
+      - name: Create app bundle
+        run: script/bundle -2
+
+      - name: Upload Zed Nightly
+        run: script/upload-nightly

Cargo.lock 🔗

@@ -724,6 +724,30 @@ dependencies = [
  "workspace",
 ]
 
+[[package]]
+name = "auto_update2"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "client2",
+ "db2",
+ "gpui2",
+ "isahc",
+ "lazy_static",
+ "log",
+ "menu2",
+ "project2",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "settings2",
+ "smol",
+ "tempdir",
+ "theme2",
+ "util",
+ "workspace2",
+]
+
 [[package]]
 name = "autocfg"
 version = "1.1.0"
@@ -1526,6 +1550,7 @@ dependencies = [
  "anyhow",
  "async-recursion 0.3.2",
  "async-tungstenite",
+ "chrono",
  "collections",
  "db",
  "feature_flags",
@@ -1562,6 +1587,7 @@ dependencies = [
  "anyhow",
  "async-recursion 0.3.2",
  "async-tungstenite",
+ "chrono",
  "collections",
  "db2",
  "feature_flags2",
@@ -2614,6 +2640,34 @@ dependencies = [
  "workspace",
 ]
 
+[[package]]
+name = "diagnostics2"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "client2",
+ "collections",
+ "editor2",
+ "futures 0.3.28",
+ "gpui2",
+ "language2",
+ "log",
+ "lsp2",
+ "postage",
+ "project2",
+ "schemars",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "settings2",
+ "smallvec",
+ "theme2",
+ "ui2",
+ "unindent",
+ "util",
+ "workspace2",
+]
+
 [[package]]
 name = "diff"
 version = "0.1.13"
@@ -3759,7 +3813,7 @@ dependencies = [
  "smol",
  "sqlez",
  "sum_tree",
- "taffy",
+ "taffy 0.3.11 (git+https://github.com/DioxusLabs/taffy?rev=4fb530bdd71609bb1d3f76c6a8bde1ba82805d5e)",
  "thiserror",
  "time",
  "tiny-skia",
@@ -3824,7 +3878,7 @@ dependencies = [
  "smol",
  "sqlez",
  "sum_tree",
- "taffy",
+ "taffy 0.3.11 (git+https://github.com/DioxusLabs/taffy?rev=1876f72bee5e376023eaa518aa7b8a34c769bd1b)",
  "thiserror",
  "time",
  "tiny-skia",
@@ -3859,6 +3913,12 @@ version = "0.10.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "eec1c01eb1de97451ee0d60de7d81cf1e72aabefb021616027f3d1c3ec1c723c"
 
+[[package]]
+name = "grid"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1df00eed8d1f0db937f6be10e46e8072b0671accb504cf0f959c5c52c679f5b9"
+
 [[package]]
 name = "h2"
 version = "0.3.21"
@@ -4486,7 +4546,7 @@ dependencies = [
  "anyhow",
  "chrono",
  "dirs 4.0.0",
- "editor",
+ "editor2",
  "gpui2",
  "log",
  "schemars",
@@ -9053,13 +9113,24 @@ dependencies = [
  "winx",
 ]
 
+[[package]]
+name = "taffy"
+version = "0.3.11"
+source = "git+https://github.com/DioxusLabs/taffy?rev=1876f72bee5e376023eaa518aa7b8a34c769bd1b#1876f72bee5e376023eaa518aa7b8a34c769bd1b"
+dependencies = [
+ "arrayvec 0.7.4",
+ "grid 0.11.0",
+ "num-traits",
+ "slotmap",
+]
+
 [[package]]
 name = "taffy"
 version = "0.3.11"
 source = "git+https://github.com/DioxusLabs/taffy?rev=4fb530bdd71609bb1d3f76c6a8bde1ba82805d5e#4fb530bdd71609bb1d3f76c6a8bde1ba82805d5e"
 dependencies = [
  "arrayvec 0.7.4",
- "grid",
+ "grid 0.10.0",
  "num-traits",
  "slotmap",
 ]
@@ -11543,6 +11614,7 @@ dependencies = [
  "async-recursion 0.3.2",
  "async-tar",
  "async-trait",
+ "auto_update2",
  "backtrace",
  "call2",
  "chrono",
@@ -11554,6 +11626,7 @@ dependencies = [
  "copilot2",
  "ctor",
  "db2",
+ "diagnostics2",
  "editor2",
  "env_logger 0.9.3",
  "feature_flags2",
@@ -11571,7 +11644,6 @@ dependencies = [
  "isahc",
  "journal2",
  "language2",
- "language_tools",
  "lazy_static",
  "libc",
  "log",

Cargo.toml 🔗

@@ -6,6 +6,7 @@ members = [
     "crates/audio",
     "crates/audio2",
     "crates/auto_update",
+    "crates/auto_update2",
     "crates/breadcrumbs",
     "crates/call",
     "crates/call2",
@@ -32,6 +33,7 @@ members = [
     "crates/refineable",
     "crates/refineable/derive_refineable",
     "crates/diagnostics",
+    "crates/diagnostics2",
     "crates/drag_and_drop",
     "crates/editor",
     "crates/feature_flags",

assets/icons/warning.svg 🔗

@@ -1,6 +1 @@
-<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
-<path d="M2.45563 12.3438H11.5444C11.9137 12.3438 12.1556 11.9571 11.994 11.625L10.2346 8.00952C9.77174 7.05841 8.89104 6.37821 7.85383 6.17077C7.29019 6.05804 6.70981 6.05804 6.14617 6.17077C5.10896 6.37821 4.22826 7.05841 3.76542 8.00952L2.00603 11.625C1.84442 11.9571 2.08628 12.3438 2.45563 12.3438Z" fill="#001A33" fill-opacity="0.157"/>
-<path d="M9.5 6.5L11.994 11.625C12.1556 11.9571 11.9137 12.3438 11.5444 12.3438H2.45563C2.08628 12.3438 1.84442 11.9571 2.00603 11.625L4.5 6.5" stroke="#11181C" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
-<path d="M7 7L7 2" stroke="#11181C" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
-<circle cx="7" cy="9.24219" r="0.75" fill="#11181C"/>
-</svg>
+<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-alert-triangle"><path d="m21.73 18-8-14a2 2 0 0 0-3.48 0l-8 14A2 2 0 0 0 4 21h16a2 2 0 0 0 1.73-3Z"/><path d="M12 9v4"/><path d="M12 17h.01"/></svg>

assets/settings/default.json 🔗

@@ -268,6 +268,19 @@
     // Whether to show warnings or not by default.
     "include_warnings": true
   },
+  // Add files or globs of files that will be excluded by Zed entirely:
+  // they will be skipped during FS scan(s), file tree and file search
+  // will lack the corresponding file entries.
+  "file_scan_exclusions": [
+    "**/.git",
+    "**/.svn",
+    "**/.hg",
+    "**/CVS",
+    "**/.DS_Store",
+    "**/Thumbs.db",
+    "**/.classpath",
+    "**/.settings"
+  ],
   // Git gutter behavior configuration.
   "git": {
     // Control whether the git gutter is shown. May take 2 values:

crates/assistant/src/assistant_panel.rs 🔗

@@ -15,7 +15,7 @@ use ai::{
 use ai::prompts::repository_context::PromptCodeSnippet;
 use anyhow::{anyhow, Result};
 use chrono::{DateTime, Local};
-use client::{telemetry::AssistantKind, ClickhouseEvent, TelemetrySettings};
+use client::{telemetry::AssistantKind, TelemetrySettings};
 use collections::{hash_map, HashMap, HashSet, VecDeque};
 use editor::{
     display_map::{
@@ -3803,12 +3803,12 @@ fn report_assistant_event(
         .default_open_ai_model
         .clone();
 
-    let event = ClickhouseEvent::Assistant {
-        conversation_id,
-        kind: assistant_kind,
-        model: model.full_name(),
-    };
     let telemetry_settings = *settings::get::<TelemetrySettings>(cx);
 
-    telemetry.report_clickhouse_event(event, telemetry_settings)
+    telemetry.report_assistant_event(
+        telemetry_settings,
+        conversation_id,
+        assistant_kind,
+        model.full_name(),
+    )
 }

crates/auto_update/src/auto_update.rs 🔗

@@ -118,14 +118,18 @@ fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) {
         let auto_updater = auto_updater.read(cx);
         let server_url = &auto_updater.server_url;
         let current_version = auto_updater.current_version;
-        let latest_release_url = if cx.has_global::<ReleaseChannel>()
-            && *cx.global::<ReleaseChannel>() == ReleaseChannel::Preview
-        {
-            format!("{server_url}/releases/preview/{current_version}")
-        } else {
-            format!("{server_url}/releases/stable/{current_version}")
-        };
-        cx.platform().open_url(&latest_release_url);
+        if cx.has_global::<ReleaseChannel>() {
+            match cx.global::<ReleaseChannel>() {
+                ReleaseChannel::Dev => {}
+                ReleaseChannel::Nightly => {}
+                ReleaseChannel::Preview => cx
+                    .platform()
+                    .open_url(&format!("{server_url}/releases/preview/{current_version}")),
+                ReleaseChannel::Stable => cx
+                    .platform()
+                    .open_url(&format!("{server_url}/releases/stable/{current_version}")),
+            }
+        }
     }
 }
 
@@ -224,22 +228,19 @@ impl AutoUpdater {
             )
         });
 
-        let preview_param = cx.read(|cx| {
+        let mut url_string = format!(
+            "{server_url}/api/releases/latest?token={ZED_SECRET_CLIENT_TOKEN}&asset=Zed.dmg"
+        );
+        cx.read(|cx| {
             if cx.has_global::<ReleaseChannel>() {
-                if *cx.global::<ReleaseChannel>() == ReleaseChannel::Preview {
-                    return "&preview=1";
+                if let Some(param) = cx.global::<ReleaseChannel>().release_query_param() {
+                    url_string += "&";
+                    url_string += param;
                 }
             }
-            ""
         });
 
-        let mut response = client
-            .get(
-                &format!("{server_url}/api/releases/latest?token={ZED_SECRET_CLIENT_TOKEN}&asset=Zed.dmg{preview_param}"),
-                Default::default(),
-                true,
-            )
-            .await?;
+        let mut response = client.get(&url_string, Default::default(), true).await?;
 
         let mut body = Vec::new();
         response

crates/auto_update2/Cargo.toml 🔗

@@ -0,0 +1,29 @@
+[package]
+name = "auto_update2"
+version = "0.1.0"
+edition = "2021"
+publish = false
+
+[lib]
+path = "src/auto_update.rs"
+doctest = false
+
+[dependencies]
+db = { package = "db2", path = "../db2" }
+client = { package = "client2", path = "../client2" }
+gpui = { package = "gpui2", path = "../gpui2" }
+menu = { package = "menu2", path = "../menu2" }
+project = { package = "project2", path = "../project2" }
+settings = { package = "settings2", path = "../settings2" }
+theme = { package = "theme2", path = "../theme2" }
+workspace = { package = "workspace2", path = "../workspace2" }
+util = { path = "../util" }
+anyhow.workspace = true
+isahc.workspace = true
+lazy_static.workspace = true
+log.workspace = true
+serde.workspace = true
+serde_derive.workspace = true
+serde_json.workspace = true
+smol.workspace = true
+tempdir.workspace = true

crates/auto_update2/src/auto_update.rs 🔗

@@ -0,0 +1,406 @@
+mod update_notification;
+
+use anyhow::{anyhow, Context, Result};
+use client::{Client, TelemetrySettings, ZED_APP_PATH, ZED_APP_VERSION, ZED_SECRET_CLIENT_TOKEN};
+use db::kvp::KEY_VALUE_STORE;
+use db::RELEASE_CHANNEL;
+use gpui::{
+    actions, AppContext, AsyncAppContext, Context as _, Model, ModelContext, SemanticVersion, Task,
+    ViewContext, VisualContext,
+};
+use isahc::AsyncBody;
+use serde::Deserialize;
+use serde_derive::Serialize;
+use smol::io::AsyncReadExt;
+
+use settings::{Settings, SettingsStore};
+use smol::{fs::File, process::Command};
+use std::{ffi::OsString, sync::Arc, time::Duration};
+use update_notification::UpdateNotification;
+use util::channel::{AppCommitSha, ReleaseChannel};
+use util::http::HttpClient;
+use workspace::Workspace;
+
+const SHOULD_SHOW_UPDATE_NOTIFICATION_KEY: &str = "auto-updater-should-show-updated-notification";
+const POLL_INTERVAL: Duration = Duration::from_secs(60 * 60);
+
+//todo!(remove CheckThatAutoUpdaterWorks)
+actions!(
+    Check,
+    DismissErrorMessage,
+    ViewReleaseNotes,
+    CheckThatAutoUpdaterWorks
+);
+
+#[derive(Serialize)]
+struct UpdateRequestBody {
+    installation_id: Option<Arc<str>>,
+    release_channel: Option<&'static str>,
+    telemetry: bool,
+}
+
+#[derive(Clone, Copy, PartialEq, Eq)]
+pub enum AutoUpdateStatus {
+    Idle,
+    Checking,
+    Downloading,
+    Installing,
+    Updated,
+    Errored,
+}
+
+pub struct AutoUpdater {
+    status: AutoUpdateStatus,
+    current_version: SemanticVersion,
+    http_client: Arc<dyn HttpClient>,
+    pending_poll: Option<Task<Option<()>>>,
+    server_url: String,
+}
+
+#[derive(Deserialize)]
+struct JsonRelease {
+    version: String,
+    url: String,
+}
+
+struct AutoUpdateSetting(bool);
+
+impl Settings for AutoUpdateSetting {
+    const KEY: Option<&'static str> = Some("auto_update");
+
+    type FileContent = Option<bool>;
+
+    fn load(
+        default_value: &Option<bool>,
+        user_values: &[&Option<bool>],
+        _: &mut AppContext,
+    ) -> Result<Self> {
+        Ok(Self(
+            Self::json_merge(default_value, user_values)?.ok_or_else(Self::missing_default)?,
+        ))
+    }
+}
+
+pub fn init(http_client: Arc<dyn HttpClient>, server_url: String, cx: &mut AppContext) {
+    AutoUpdateSetting::register(cx);
+
+    cx.observe_new_views(|wokrspace: &mut Workspace, _cx| {
+        wokrspace
+            .register_action(|_, action: &Check, cx| check(action, cx))
+            .register_action(|_, _action: &CheckThatAutoUpdaterWorks, cx| {
+                let prompt = cx.prompt(gpui::PromptLevel::Info, "It does!", &["Ok"]);
+                cx.spawn(|_, _cx| async move {
+                    prompt.await.ok();
+                })
+                .detach();
+            });
+    })
+    .detach();
+
+    if let Some(version) = *ZED_APP_VERSION {
+        let auto_updater = cx.build_model(|cx| {
+            let updater = AutoUpdater::new(version, http_client, server_url);
+
+            let mut update_subscription = AutoUpdateSetting::get_global(cx)
+                .0
+                .then(|| updater.start_polling(cx));
+
+            cx.observe_global::<SettingsStore>(move |updater, cx| {
+                if AutoUpdateSetting::get_global(cx).0 {
+                    if update_subscription.is_none() {
+                        update_subscription = Some(updater.start_polling(cx))
+                    }
+                } else {
+                    update_subscription.take();
+                }
+            })
+            .detach();
+
+            updater
+        });
+        cx.set_global(Some(auto_updater));
+        //todo!(action)
+        // cx.add_global_action(view_release_notes);
+        // cx.add_action(UpdateNotification::dismiss);
+    }
+}
+
+pub fn check(_: &Check, cx: &mut AppContext) {
+    if let Some(updater) = AutoUpdater::get(cx) {
+        updater.update(cx, |updater, cx| updater.poll(cx));
+    }
+}
+
+fn _view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) {
+    if let Some(auto_updater) = AutoUpdater::get(cx) {
+        let auto_updater = auto_updater.read(cx);
+        let server_url = &auto_updater.server_url;
+        let current_version = auto_updater.current_version;
+        if cx.has_global::<ReleaseChannel>() {
+            match cx.global::<ReleaseChannel>() {
+                ReleaseChannel::Dev => {}
+                ReleaseChannel::Nightly => {}
+                ReleaseChannel::Preview => {
+                    cx.open_url(&format!("{server_url}/releases/preview/{current_version}"))
+                }
+                ReleaseChannel::Stable => {
+                    cx.open_url(&format!("{server_url}/releases/stable/{current_version}"))
+                }
+            }
+        }
+    }
+}
+
+pub fn notify_of_any_new_update(cx: &mut ViewContext<Workspace>) -> Option<()> {
+    let updater = AutoUpdater::get(cx)?;
+    let version = updater.read(cx).current_version;
+    let should_show_notification = updater.read(cx).should_show_update_notification(cx);
+
+    cx.spawn(|workspace, mut cx| async move {
+        let should_show_notification = should_show_notification.await?;
+        if should_show_notification {
+            workspace.update(&mut cx, |workspace, cx| {
+                workspace.show_notification(0, cx, |cx| {
+                    cx.build_view(|_| UpdateNotification::new(version))
+                });
+                updater
+                    .read(cx)
+                    .set_should_show_update_notification(false, cx)
+                    .detach_and_log_err(cx);
+            })?;
+        }
+        anyhow::Ok(())
+    })
+    .detach();
+
+    None
+}
+
+impl AutoUpdater {
+    pub fn get(cx: &mut AppContext) -> Option<Model<Self>> {
+        cx.default_global::<Option<Model<Self>>>().clone()
+    }
+
+    fn new(
+        current_version: SemanticVersion,
+        http_client: Arc<dyn HttpClient>,
+        server_url: String,
+    ) -> Self {
+        Self {
+            status: AutoUpdateStatus::Idle,
+            current_version,
+            http_client,
+            server_url,
+            pending_poll: None,
+        }
+    }
+
+    pub fn start_polling(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
+        cx.spawn(|this, mut cx| async move {
+            loop {
+                this.update(&mut cx, |this, cx| this.poll(cx))?;
+                cx.background_executor().timer(POLL_INTERVAL).await;
+            }
+        })
+    }
+
+    pub fn poll(&mut self, cx: &mut ModelContext<Self>) {
+        if self.pending_poll.is_some() || self.status == AutoUpdateStatus::Updated {
+            return;
+        }
+
+        self.status = AutoUpdateStatus::Checking;
+        cx.notify();
+
+        self.pending_poll = Some(cx.spawn(|this, mut cx| async move {
+            let result = Self::update(this.upgrade()?, cx.clone()).await;
+            this.update(&mut cx, |this, cx| {
+                this.pending_poll = None;
+                if let Err(error) = result {
+                    log::error!("auto-update failed: error:{:?}", error);
+                    this.status = AutoUpdateStatus::Errored;
+                    cx.notify();
+                }
+            })
+            .ok()
+        }));
+    }
+
+    pub fn status(&self) -> AutoUpdateStatus {
+        self.status
+    }
+
+    pub fn dismiss_error(&mut self, cx: &mut ModelContext<Self>) {
+        self.status = AutoUpdateStatus::Idle;
+        cx.notify();
+    }
+
+    async fn update(this: Model<Self>, mut cx: AsyncAppContext) -> Result<()> {
+        let (client, server_url, current_version) = this.read_with(&cx, |this, _| {
+            (
+                this.http_client.clone(),
+                this.server_url.clone(),
+                this.current_version,
+            )
+        })?;
+
+        let mut url_string = format!(
+            "{server_url}/api/releases/latest?token={ZED_SECRET_CLIENT_TOKEN}&asset=Zed.dmg"
+        );
+        cx.update(|cx| {
+            if cx.has_global::<ReleaseChannel>() {
+                if let Some(param) = cx.global::<ReleaseChannel>().release_query_param() {
+                    url_string += "&";
+                    url_string += param;
+                }
+            }
+        })?;
+
+        let mut response = client.get(&url_string, Default::default(), true).await?;
+
+        let mut body = Vec::new();
+        response
+            .body_mut()
+            .read_to_end(&mut body)
+            .await
+            .context("error reading release")?;
+        let release: JsonRelease =
+            serde_json::from_slice(body.as_slice()).context("error deserializing release")?;
+
+        let should_download = match *RELEASE_CHANNEL {
+            ReleaseChannel::Nightly => cx
+                .try_read_global::<AppCommitSha, _>(|sha, _| release.version != sha.0)
+                .unwrap_or(true),
+            _ => release.version.parse::<SemanticVersion>()? <= current_version,
+        };
+
+        if !should_download {
+            this.update(&mut cx, |this, cx| {
+                this.status = AutoUpdateStatus::Idle;
+                cx.notify();
+            })?;
+            return Ok(());
+        }
+
+        this.update(&mut cx, |this, cx| {
+            this.status = AutoUpdateStatus::Downloading;
+            cx.notify();
+        })?;
+
+        let temp_dir = tempdir::TempDir::new("zed-auto-update")?;
+        let dmg_path = temp_dir.path().join("Zed.dmg");
+        let mount_path = temp_dir.path().join("Zed");
+        let running_app_path = ZED_APP_PATH
+            .clone()
+            .map_or_else(|| cx.update(|cx| cx.app_path())?, Ok)?;
+        let running_app_filename = running_app_path
+            .file_name()
+            .ok_or_else(|| anyhow!("invalid running app path"))?;
+        let mut mounted_app_path: OsString = mount_path.join(running_app_filename).into();
+        mounted_app_path.push("/");
+
+        let mut dmg_file = File::create(&dmg_path).await?;
+
+        let (installation_id, release_channel, telemetry) = cx.update(|cx| {
+            let installation_id = cx.global::<Arc<Client>>().telemetry().installation_id();
+            let release_channel = cx
+                .has_global::<ReleaseChannel>()
+                .then(|| cx.global::<ReleaseChannel>().display_name());
+            let telemetry = TelemetrySettings::get_global(cx).metrics;
+
+            (installation_id, release_channel, telemetry)
+        })?;
+
+        let request_body = AsyncBody::from(serde_json::to_string(&UpdateRequestBody {
+            installation_id,
+            release_channel,
+            telemetry,
+        })?);
+
+        let mut response = client.get(&release.url, request_body, true).await?;
+        smol::io::copy(response.body_mut(), &mut dmg_file).await?;
+        log::info!("downloaded update. path:{:?}", dmg_path);
+
+        this.update(&mut cx, |this, cx| {
+            this.status = AutoUpdateStatus::Installing;
+            cx.notify();
+        })?;
+
+        let output = Command::new("hdiutil")
+            .args(&["attach", "-nobrowse"])
+            .arg(&dmg_path)
+            .arg("-mountroot")
+            .arg(&temp_dir.path())
+            .output()
+            .await?;
+        if !output.status.success() {
+            Err(anyhow!(
+                "failed to mount: {:?}",
+                String::from_utf8_lossy(&output.stderr)
+            ))?;
+        }
+
+        let output = Command::new("rsync")
+            .args(&["-av", "--delete"])
+            .arg(&mounted_app_path)
+            .arg(&running_app_path)
+            .output()
+            .await?;
+        if !output.status.success() {
+            Err(anyhow!(
+                "failed to copy app: {:?}",
+                String::from_utf8_lossy(&output.stderr)
+            ))?;
+        }
+
+        let output = Command::new("hdiutil")
+            .args(&["detach"])
+            .arg(&mount_path)
+            .output()
+            .await?;
+        if !output.status.success() {
+            Err(anyhow!(
+                "failed to unmount: {:?}",
+                String::from_utf8_lossy(&output.stderr)
+            ))?;
+        }
+
+        this.update(&mut cx, |this, cx| {
+            this.set_should_show_update_notification(true, cx)
+                .detach_and_log_err(cx);
+            this.status = AutoUpdateStatus::Updated;
+            cx.notify();
+        })?;
+        Ok(())
+    }
+
+    fn set_should_show_update_notification(
+        &self,
+        should_show: bool,
+        cx: &AppContext,
+    ) -> Task<Result<()>> {
+        cx.background_executor().spawn(async move {
+            if should_show {
+                KEY_VALUE_STORE
+                    .write_kvp(
+                        SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string(),
+                        "".to_string(),
+                    )
+                    .await?;
+            } else {
+                KEY_VALUE_STORE
+                    .delete_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string())
+                    .await?;
+            }
+            Ok(())
+        })
+    }
+
+    fn should_show_update_notification(&self, cx: &AppContext) -> Task<Result<bool>> {
+        cx.background_executor().spawn(async move {
+            Ok(KEY_VALUE_STORE
+                .read_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY)?
+                .is_some())
+        })
+    }
+}

crates/auto_update2/src/update_notification.rs 🔗

@@ -0,0 +1,87 @@
+use gpui::{div, Div, EventEmitter, ParentComponent, Render, SemanticVersion, ViewContext};
+use menu::Cancel;
+use workspace::notifications::NotificationEvent;
+
+pub struct UpdateNotification {
+    _version: SemanticVersion,
+}
+
+impl EventEmitter<NotificationEvent> for UpdateNotification {}
+
+impl Render for UpdateNotification {
+    type Element = Div<Self>;
+
+    fn render(&mut self, _cx: &mut gpui::ViewContext<Self>) -> Self::Element {
+        div().child("Updated zed!")
+        // let theme = theme::current(cx).clone();
+        // let theme = &theme.update_notification;
+
+        // let app_name = cx.global::<ReleaseChannel>().display_name();
+
+        // MouseEventHandler::new::<ViewReleaseNotes, _>(0, cx, |state, cx| {
+        //     Flex::column()
+        //         .with_child(
+        //             Flex::row()
+        //                 .with_child(
+        //                     Text::new(
+        //                         format!("Updated to {app_name} {}", self.version),
+        //                         theme.message.text.clone(),
+        //                     )
+        //                     .contained()
+        //                     .with_style(theme.message.container)
+        //                     .aligned()
+        //                     .top()
+        //                     .left()
+        //                     .flex(1., true),
+        //                 )
+        //                 .with_child(
+        //                     MouseEventHandler::new::<Cancel, _>(0, cx, |state, _| {
+        //                         let style = theme.dismiss_button.style_for(state);
+        //                         Svg::new("icons/x.svg")
+        //                             .with_color(style.color)
+        //                             .constrained()
+        //                             .with_width(style.icon_width)
+        //                             .aligned()
+        //                             .contained()
+        //                             .with_style(style.container)
+        //                             .constrained()
+        //                             .with_width(style.button_width)
+        //                             .with_height(style.button_width)
+        //                     })
+        //                     .with_padding(Padding::uniform(5.))
+        //                     .on_click(MouseButton::Left, move |_, this, cx| {
+        //                         this.dismiss(&Default::default(), cx)
+        //                     })
+        //                     .aligned()
+        //                     .constrained()
+        //                     .with_height(cx.font_cache().line_height(theme.message.text.font_size))
+        //                     .aligned()
+        //                     .top()
+        //                     .flex_float(),
+        //                 ),
+        //         )
+        //         .with_child({
+        //             let style = theme.action_message.style_for(state);
+        //             Text::new("View the release notes", style.text.clone())
+        //                 .contained()
+        //                 .with_style(style.container)
+        //         })
+        //         .contained()
+        // })
+        // .with_cursor_style(CursorStyle::PointingHand)
+        // .on_click(MouseButton::Left, |_, _, cx| {
+        //     crate::view_release_notes(&Default::default(), cx)
+        // })
+        // .into_any_named("update notification")
+    }
+}
+
+impl UpdateNotification {
+    pub fn new(version: SemanticVersion) -> Self {
+        Self { _version: version }
+    }
+
+    pub fn _dismiss(&mut self, _: &Cancel, cx: &mut ViewContext<Self>) {
+        cx.emit(NotificationEvent::Dismiss);
+    }
+}

crates/call/src/call.rs 🔗

@@ -5,10 +5,7 @@ pub mod room;
 use anyhow::{anyhow, Result};
 use audio::Audio;
 use call_settings::CallSettings;
-use client::{
-    proto, ClickhouseEvent, Client, TelemetrySettings, TypedEnvelope, User, UserStore,
-    ZED_ALWAYS_ACTIVE,
-};
+use client::{proto, Client, TelemetrySettings, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE};
 use collections::HashSet;
 use futures::{channel::oneshot, future::Shared, Future, FutureExt};
 use gpui::{
@@ -485,12 +482,8 @@ pub fn report_call_event_for_room(
 ) {
     let telemetry = client.telemetry();
     let telemetry_settings = *settings::get::<TelemetrySettings>(cx);
-    let event = ClickhouseEvent::Call {
-        operation,
-        room_id: Some(room_id),
-        channel_id,
-    };
-    telemetry.report_clickhouse_event(event, telemetry_settings);
+
+    telemetry.report_call_event(telemetry_settings, operation, Some(room_id), channel_id)
 }
 
 pub fn report_call_event_for_channel(
@@ -504,12 +497,12 @@ pub fn report_call_event_for_channel(
     let telemetry = client.telemetry();
     let telemetry_settings = *settings::get::<TelemetrySettings>(cx);
 
-    let event = ClickhouseEvent::Call {
+    telemetry.report_call_event(
+        telemetry_settings,
         operation,
-        room_id: room.map(|r| r.read(cx).id()),
-        channel_id: Some(channel_id),
-    };
-    telemetry.report_clickhouse_event(event, telemetry_settings);
+        room.map(|r| r.read(cx).id()),
+        Some(channel_id),
+    )
 }
 
 #[cfg(test)]

crates/call2/src/call2.rs 🔗

@@ -5,10 +5,7 @@ pub mod room;
 use anyhow::{anyhow, Result};
 use audio::Audio;
 use call_settings::CallSettings;
-use client::{
-    proto, ClickhouseEvent, Client, TelemetrySettings, TypedEnvelope, User, UserStore,
-    ZED_ALWAYS_ACTIVE,
-};
+use client::{proto, Client, TelemetrySettings, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE};
 use collections::HashSet;
 use futures::{channel::oneshot, future::Shared, Future, FutureExt};
 use gpui::{
@@ -484,12 +481,8 @@ pub fn report_call_event_for_room(
 ) {
     let telemetry = client.telemetry();
     let telemetry_settings = *TelemetrySettings::get_global(cx);
-    let event = ClickhouseEvent::Call {
-        operation,
-        room_id: Some(room_id),
-        channel_id,
-    };
-    telemetry.report_clickhouse_event(event, telemetry_settings);
+
+    telemetry.report_call_event(telemetry_settings, operation, Some(room_id), channel_id)
 }
 
 pub fn report_call_event_for_channel(
@@ -504,12 +497,12 @@ pub fn report_call_event_for_channel(
 
     let telemetry_settings = *TelemetrySettings::get_global(cx);
 
-    let event = ClickhouseEvent::Call {
+    telemetry.report_call_event(
+        telemetry_settings,
         operation,
-        room_id: room.map(|r| r.read(cx).id()),
-        channel_id: Some(channel_id),
-    };
-    telemetry.report_clickhouse_event(event, telemetry_settings);
+        room.map(|r| r.read(cx).id()),
+        Some(channel_id),
+    )
 }
 
 #[cfg(test)]

crates/client/Cargo.toml 🔗

@@ -12,6 +12,7 @@ doctest = false
 test-support = ["collections/test-support", "gpui/test-support", "rpc/test-support"]
 
 [dependencies]
+chrono = { version = "0.4", features = ["serde"] }
 collections = { path = "../collections" }
 db = { path = "../db" }
 gpui = { path = "../gpui" }

crates/client/src/client.rs 🔗

@@ -987,9 +987,17 @@ impl Client {
         self.establish_websocket_connection(credentials, cx)
     }
 
-    async fn get_rpc_url(http: Arc<dyn HttpClient>, is_preview: bool) -> Result<Url> {
-        let preview_param = if is_preview { "?preview=1" } else { "" };
-        let url = format!("{}/rpc{preview_param}", *ZED_SERVER_URL);
+    async fn get_rpc_url(
+        http: Arc<dyn HttpClient>,
+        release_channel: Option<ReleaseChannel>,
+    ) -> Result<Url> {
+        let mut url = format!("{}/rpc", *ZED_SERVER_URL);
+        if let Some(preview_param) =
+            release_channel.and_then(|channel| channel.release_query_param())
+        {
+            url += "?";
+            url += preview_param;
+        }
         let response = http.get(&url, Default::default(), false).await?;
 
         // Normally, ZED_SERVER_URL is set to the URL of zed.dev website.
@@ -1024,11 +1032,11 @@ impl Client {
         credentials: &Credentials,
         cx: &AsyncAppContext,
     ) -> Task<Result<Connection, EstablishConnectionError>> {
-        let use_preview_server = cx.read(|cx| {
+        let release_channel = cx.read(|cx| {
             if cx.has_global::<ReleaseChannel>() {
-                *cx.global::<ReleaseChannel>() != ReleaseChannel::Stable
+                Some(*cx.global::<ReleaseChannel>())
             } else {
-                false
+                None
             }
         });
 
@@ -1041,7 +1049,7 @@ impl Client {
 
         let http = self.http.clone();
         cx.background().spawn(async move {
-            let mut rpc_url = Self::get_rpc_url(http, use_preview_server).await?;
+            let mut rpc_url = Self::get_rpc_url(http, release_channel).await?;
             let rpc_host = rpc_url
                 .host_str()
                 .zip(rpc_url.port_or_known_default())
@@ -1191,7 +1199,7 @@ impl Client {
 
         // Use the collab server's admin API to retrieve the id
         // of the impersonated user.
-        let mut url = Self::get_rpc_url(http.clone(), false).await?;
+        let mut url = Self::get_rpc_url(http.clone(), None).await?;
         url.set_path("/user");
         url.set_query(Some(&format!("github_login={login}")));
         let request = Request::get(url.as_str())

crates/client/src/telemetry.rs 🔗

@@ -1,4 +1,5 @@
 use crate::{TelemetrySettings, ZED_SECRET_CLIENT_TOKEN, ZED_SERVER_URL};
+use chrono::{DateTime, Utc};
 use gpui::{executor::Background, serde_json, AppContext, Task};
 use lazy_static::lazy_static;
 use parking_lot::Mutex;
@@ -20,7 +21,7 @@ pub struct Telemetry {
 #[derive(Default)]
 struct TelemetryState {
     metrics_id: Option<Arc<str>>,      // Per logged-in user
-    installation_id: Option<Arc<str>>, // Per app installation (different for dev, preview, and stable)
+    installation_id: Option<Arc<str>>, // Per app installation (different for dev, nightly, preview, and stable)
     session_id: Option<Arc<str>>,      // Per app launch
     app_version: Option<Arc<str>>,
     release_channel: Option<&'static str>,
@@ -31,6 +32,7 @@ struct TelemetryState {
     flush_clickhouse_events_task: Option<Task<()>>,
     log_file: Option<NamedTempFile>,
     is_staff: Option<bool>,
+    first_event_datetime: Option<DateTime<Utc>>,
 }
 
 const CLICKHOUSE_EVENTS_URL_PATH: &'static str = "/api/events";
@@ -77,29 +79,35 @@ pub enum ClickhouseEvent {
         vim_mode: bool,
         copilot_enabled: bool,
         copilot_enabled_for_language: bool,
+        milliseconds_since_first_event: i64,
     },
     Copilot {
         suggestion_id: Option<String>,
         suggestion_accepted: bool,
         file_extension: Option<String>,
+        milliseconds_since_first_event: i64,
     },
     Call {
         operation: &'static str,
         room_id: Option<u64>,
         channel_id: Option<u64>,
+        milliseconds_since_first_event: i64,
     },
     Assistant {
         conversation_id: Option<String>,
         kind: AssistantKind,
         model: &'static str,
+        milliseconds_since_first_event: i64,
     },
     Cpu {
         usage_as_percentage: f32,
         core_count: u32,
+        milliseconds_since_first_event: i64,
     },
     Memory {
         memory_in_bytes: u64,
         virtual_memory_in_bytes: u64,
+        milliseconds_since_first_event: i64,
     },
 }
 
@@ -140,6 +148,7 @@ impl Telemetry {
                 flush_clickhouse_events_task: Default::default(),
                 log_file: None,
                 is_staff: None,
+                first_event_datetime: None,
             }),
         });
 
@@ -195,20 +204,18 @@ impl Telemetry {
                     return;
                 };
 
-                let memory_event = ClickhouseEvent::Memory {
-                    memory_in_bytes: process.memory(),
-                    virtual_memory_in_bytes: process.virtual_memory(),
-                };
-
-                let cpu_event = ClickhouseEvent::Cpu {
-                    usage_as_percentage: process.cpu_usage(),
-                    core_count: system.cpus().len() as u32,
-                };
-
                 let telemetry_settings = cx.update(|cx| *settings::get::<TelemetrySettings>(cx));
 
-                this.report_clickhouse_event(memory_event, telemetry_settings);
-                this.report_clickhouse_event(cpu_event, telemetry_settings);
+                this.report_memory_event(
+                    telemetry_settings,
+                    process.memory(),
+                    process.virtual_memory(),
+                );
+                this.report_cpu_event(
+                    telemetry_settings,
+                    process.cpu_usage(),
+                    system.cpus().len() as u32,
+                );
             }
         })
         .detach();
@@ -231,7 +238,123 @@ impl Telemetry {
         drop(state);
     }
 
-    pub fn report_clickhouse_event(
+    pub fn report_editor_event(
+        self: &Arc<Self>,
+        telemetry_settings: TelemetrySettings,
+        file_extension: Option<String>,
+        vim_mode: bool,
+        operation: &'static str,
+        copilot_enabled: bool,
+        copilot_enabled_for_language: bool,
+    ) {
+        let event = ClickhouseEvent::Editor {
+            file_extension,
+            vim_mode,
+            operation,
+            copilot_enabled,
+            copilot_enabled_for_language,
+            milliseconds_since_first_event: self.milliseconds_since_first_event(),
+        };
+
+        self.report_clickhouse_event(event, telemetry_settings)
+    }
+
+    pub fn report_copilot_event(
+        self: &Arc<Self>,
+        telemetry_settings: TelemetrySettings,
+        suggestion_id: Option<String>,
+        suggestion_accepted: bool,
+        file_extension: Option<String>,
+    ) {
+        let event = ClickhouseEvent::Copilot {
+            suggestion_id,
+            suggestion_accepted,
+            file_extension,
+            milliseconds_since_first_event: self.milliseconds_since_first_event(),
+        };
+
+        self.report_clickhouse_event(event, telemetry_settings)
+    }
+
+    pub fn report_assistant_event(
+        self: &Arc<Self>,
+        telemetry_settings: TelemetrySettings,
+        conversation_id: Option<String>,
+        kind: AssistantKind,
+        model: &'static str,
+    ) {
+        let event = ClickhouseEvent::Assistant {
+            conversation_id,
+            kind,
+            model,
+            milliseconds_since_first_event: self.milliseconds_since_first_event(),
+        };
+
+        self.report_clickhouse_event(event, telemetry_settings)
+    }
+
+    pub fn report_call_event(
+        self: &Arc<Self>,
+        telemetry_settings: TelemetrySettings,
+        operation: &'static str,
+        room_id: Option<u64>,
+        channel_id: Option<u64>,
+    ) {
+        let event = ClickhouseEvent::Call {
+            operation,
+            room_id,
+            channel_id,
+            milliseconds_since_first_event: self.milliseconds_since_first_event(),
+        };
+
+        self.report_clickhouse_event(event, telemetry_settings)
+    }
+
+    pub fn report_cpu_event(
+        self: &Arc<Self>,
+        telemetry_settings: TelemetrySettings,
+        usage_as_percentage: f32,
+        core_count: u32,
+    ) {
+        let event = ClickhouseEvent::Cpu {
+            usage_as_percentage,
+            core_count,
+            milliseconds_since_first_event: self.milliseconds_since_first_event(),
+        };
+
+        self.report_clickhouse_event(event, telemetry_settings)
+    }
+
+    pub fn report_memory_event(
+        self: &Arc<Self>,
+        telemetry_settings: TelemetrySettings,
+        memory_in_bytes: u64,
+        virtual_memory_in_bytes: u64,
+    ) {
+        let event = ClickhouseEvent::Memory {
+            memory_in_bytes,
+            virtual_memory_in_bytes,
+            milliseconds_since_first_event: self.milliseconds_since_first_event(),
+        };
+
+        self.report_clickhouse_event(event, telemetry_settings)
+    }
+
+    fn milliseconds_since_first_event(&self) -> i64 {
+        let mut state = self.state.lock();
+        match state.first_event_datetime {
+            Some(first_event_datetime) => {
+                let now: DateTime<Utc> = Utc::now();
+                now.timestamp_millis() - first_event_datetime.timestamp_millis()
+            }
+            None => {
+                state.first_event_datetime = Some(Utc::now());
+                0
+            }
+        }
+    }
+
+    fn report_clickhouse_event(
         self: &Arc<Self>,
         event: ClickhouseEvent,
         telemetry_settings: TelemetrySettings,
@@ -275,6 +398,7 @@ impl Telemetry {
 
     fn flush_clickhouse_events(self: &Arc<Self>) {
         let mut state = self.state.lock();
+        state.first_event_datetime = None;
         let mut events = mem::take(&mut state.clickhouse_events_queue);
         state.flush_clickhouse_events_task.take();
         drop(state);

crates/client2/Cargo.toml 🔗

@@ -12,6 +12,7 @@ doctest = false
 test-support = ["collections/test-support", "gpui/test-support", "rpc/test-support"]
 
 [dependencies]
+chrono = { version = "0.4", features = ["serde"] }
 collections = { path = "../collections" }
 db = { package = "db2", path = "../db2" }
 gpui = { package = "gpui2", path = "../gpui2" }

crates/client2/src/client2.rs 🔗

@@ -923,9 +923,17 @@ impl Client {
         self.establish_websocket_connection(credentials, cx)
     }
 
-    async fn get_rpc_url(http: Arc<dyn HttpClient>, is_preview: bool) -> Result<Url> {
-        let preview_param = if is_preview { "?preview=1" } else { "" };
-        let url = format!("{}/rpc{preview_param}", *ZED_SERVER_URL);
+    async fn get_rpc_url(
+        http: Arc<dyn HttpClient>,
+        release_channel: Option<ReleaseChannel>,
+    ) -> Result<Url> {
+        let mut url = format!("{}/rpc", *ZED_SERVER_URL);
+        if let Some(preview_param) =
+            release_channel.and_then(|channel| channel.release_query_param())
+        {
+            url += "?";
+            url += preview_param;
+        }
         let response = http.get(&url, Default::default(), false).await?;
 
         // Normally, ZED_SERVER_URL is set to the URL of zed.dev website.
@@ -960,9 +968,7 @@ impl Client {
         credentials: &Credentials,
         cx: &AsyncAppContext,
     ) -> Task<Result<Connection, EstablishConnectionError>> {
-        let use_preview_server = cx
-            .try_read_global(|channel: &ReleaseChannel, _| *channel != ReleaseChannel::Stable)
-            .unwrap_or(false);
+        let release_channel = cx.try_read_global(|channel: &ReleaseChannel, _| *channel);
 
         let request = Request::builder()
             .header(
@@ -973,7 +979,7 @@ impl Client {
 
         let http = self.http.clone();
         cx.background_executor().spawn(async move {
-            let mut rpc_url = Self::get_rpc_url(http, use_preview_server).await?;
+            let mut rpc_url = Self::get_rpc_url(http, release_channel).await?;
             let rpc_host = rpc_url
                 .host_str()
                 .zip(rpc_url.port_or_known_default())
@@ -1120,7 +1126,7 @@ impl Client {
 
         // Use the collab server's admin API to retrieve the id
         // of the impersonated user.
-        let mut url = Self::get_rpc_url(http.clone(), false).await?;
+        let mut url = Self::get_rpc_url(http.clone(), None).await?;
         url.set_path("/user");
         url.set_query(Some(&format!("github_login={login}")));
         let request = Request::get(url.as_str())

crates/client2/src/telemetry.rs 🔗

@@ -1,4 +1,5 @@
 use crate::{TelemetrySettings, ZED_SECRET_CLIENT_TOKEN, ZED_SERVER_URL};
+use chrono::{DateTime, Utc};
 use gpui::{serde_json, AppContext, AppMetadata, BackgroundExecutor, Task};
 use lazy_static::lazy_static;
 use parking_lot::Mutex;
@@ -20,7 +21,7 @@ pub struct Telemetry {
 
 struct TelemetryState {
     metrics_id: Option<Arc<str>>,      // Per logged-in user
-    installation_id: Option<Arc<str>>, // Per app installation (different for dev, preview, and stable)
+    installation_id: Option<Arc<str>>, // Per app installation (different for dev, nightly, preview, and stable)
     session_id: Option<Arc<str>>,      // Per app launch
     release_channel: Option<&'static str>,
     app_metadata: AppMetadata,
@@ -29,6 +30,7 @@ struct TelemetryState {
     flush_clickhouse_events_task: Option<Task<()>>,
     log_file: Option<NamedTempFile>,
     is_staff: Option<bool>,
+    first_event_datetime: Option<DateTime<Utc>>,
 }
 
 const CLICKHOUSE_EVENTS_URL_PATH: &'static str = "/api/events";
@@ -75,29 +77,35 @@ pub enum ClickhouseEvent {
         vim_mode: bool,
         copilot_enabled: bool,
         copilot_enabled_for_language: bool,
+        milliseconds_since_first_event: i64,
     },
     Copilot {
         suggestion_id: Option<String>,
         suggestion_accepted: bool,
         file_extension: Option<String>,
+        milliseconds_since_first_event: i64,
     },
     Call {
         operation: &'static str,
         room_id: Option<u64>,
         channel_id: Option<u64>,
+        milliseconds_since_first_event: i64,
     },
     Assistant {
         conversation_id: Option<String>,
         kind: AssistantKind,
         model: &'static str,
+        milliseconds_since_first_event: i64,
     },
     Cpu {
         usage_as_percentage: f32,
         core_count: u32,
+        milliseconds_since_first_event: i64,
     },
     Memory {
         memory_in_bytes: u64,
         virtual_memory_in_bytes: u64,
+        milliseconds_since_first_event: i64,
     },
 }
 
@@ -135,6 +143,7 @@ impl Telemetry {
                 flush_clickhouse_events_task: Default::default(),
                 log_file: None,
                 is_staff: None,
+                first_event_datetime: None,
             }),
         });
 
@@ -190,16 +199,6 @@ impl Telemetry {
                     return;
                 };
 
-                let memory_event = ClickhouseEvent::Memory {
-                    memory_in_bytes: process.memory(),
-                    virtual_memory_in_bytes: process.virtual_memory(),
-                };
-
-                let cpu_event = ClickhouseEvent::Cpu {
-                    usage_as_percentage: process.cpu_usage(),
-                    core_count: system.cpus().len() as u32,
-                };
-
                 let telemetry_settings = if let Ok(telemetry_settings) =
                     cx.update(|cx| *TelemetrySettings::get_global(cx))
                 {
@@ -208,8 +207,16 @@ impl Telemetry {
                     break;
                 };
 
-                this.report_clickhouse_event(memory_event, telemetry_settings);
-                this.report_clickhouse_event(cpu_event, telemetry_settings);
+                this.report_memory_event(
+                    telemetry_settings,
+                    process.memory(),
+                    process.virtual_memory(),
+                );
+                this.report_cpu_event(
+                    telemetry_settings,
+                    process.cpu_usage(),
+                    system.cpus().len() as u32,
+                );
             }
         })
         .detach();
@@ -232,7 +239,123 @@ impl Telemetry {
         drop(state);
     }
 
-    pub fn report_clickhouse_event(
+    pub fn report_editor_event(
+        self: &Arc<Self>,
+        telemetry_settings: TelemetrySettings,
+        file_extension: Option<String>,
+        vim_mode: bool,
+        operation: &'static str,
+        copilot_enabled: bool,
+        copilot_enabled_for_language: bool,
+    ) {
+        let event = ClickhouseEvent::Editor {
+            file_extension,
+            vim_mode,
+            operation,
+            copilot_enabled,
+            copilot_enabled_for_language,
+            milliseconds_since_first_event: self.milliseconds_since_first_event(),
+        };
+
+        self.report_clickhouse_event(event, telemetry_settings)
+    }
+
+    pub fn report_copilot_event(
+        self: &Arc<Self>,
+        telemetry_settings: TelemetrySettings,
+        suggestion_id: Option<String>,
+        suggestion_accepted: bool,
+        file_extension: Option<String>,
+    ) {
+        let event = ClickhouseEvent::Copilot {
+            suggestion_id,
+            suggestion_accepted,
+            file_extension,
+            milliseconds_since_first_event: self.milliseconds_since_first_event(),
+        };
+
+        self.report_clickhouse_event(event, telemetry_settings)
+    }
+
+    pub fn report_assistant_event(
+        self: &Arc<Self>,
+        telemetry_settings: TelemetrySettings,
+        conversation_id: Option<String>,
+        kind: AssistantKind,
+        model: &'static str,
+    ) {
+        let event = ClickhouseEvent::Assistant {
+            conversation_id,
+            kind,
+            model,
+            milliseconds_since_first_event: self.milliseconds_since_first_event(),
+        };
+
+        self.report_clickhouse_event(event, telemetry_settings)
+    }
+
+    pub fn report_call_event(
+        self: &Arc<Self>,
+        telemetry_settings: TelemetrySettings,
+        operation: &'static str,
+        room_id: Option<u64>,
+        channel_id: Option<u64>,
+    ) {
+        let event = ClickhouseEvent::Call {
+            operation,
+            room_id,
+            channel_id,
+            milliseconds_since_first_event: self.milliseconds_since_first_event(),
+        };
+
+        self.report_clickhouse_event(event, telemetry_settings)
+    }
+
+    pub fn report_cpu_event(
+        self: &Arc<Self>,
+        telemetry_settings: TelemetrySettings,
+        usage_as_percentage: f32,
+        core_count: u32,
+    ) {
+        let event = ClickhouseEvent::Cpu {
+            usage_as_percentage,
+            core_count,
+            milliseconds_since_first_event: self.milliseconds_since_first_event(),
+        };
+
+        self.report_clickhouse_event(event, telemetry_settings)
+    }
+
+    pub fn report_memory_event(
+        self: &Arc<Self>,
+        telemetry_settings: TelemetrySettings,
+        memory_in_bytes: u64,
+        virtual_memory_in_bytes: u64,
+    ) {
+        let event = ClickhouseEvent::Memory {
+            memory_in_bytes,
+            virtual_memory_in_bytes,
+            milliseconds_since_first_event: self.milliseconds_since_first_event(),
+        };
+
+        self.report_clickhouse_event(event, telemetry_settings)
+    }
+
+    fn milliseconds_since_first_event(&self) -> i64 {
+        let mut state = self.state.lock();
+        match state.first_event_datetime {
+            Some(first_event_datetime) => {
+                let now: DateTime<Utc> = Utc::now();
+                now.timestamp_millis() - first_event_datetime.timestamp_millis()
+            }
+            None => {
+                state.first_event_datetime = Some(Utc::now());
+                0
+            }
+        }
+    }
+
+    fn report_clickhouse_event(
         self: &Arc<Self>,
         event: ClickhouseEvent,
         telemetry_settings: TelemetrySettings,
@@ -276,6 +399,7 @@ impl Telemetry {
 
     fn flush_clickhouse_events(self: &Arc<Self>) {
         let mut state = self.state.lock();
+        state.first_event_datetime = None;
         let mut events = mem::take(&mut state.clickhouse_events_queue);
         state.flush_clickhouse_events_task.take();
         drop(state);

crates/collab/src/tests/integration_tests.rs 🔗

@@ -5052,7 +5052,7 @@ async fn test_project_search(
     let mut results = HashMap::default();
     let mut search_rx = project_b.update(cx_b, |project, cx| {
         project.search(
-            SearchQuery::text("world", false, false, Vec::new(), Vec::new()).unwrap(),
+            SearchQuery::text("world", false, false, false, Vec::new(), Vec::new()).unwrap(),
             cx,
         )
     });

crates/collab/src/tests/random_project_collaboration_tests.rs 🔗

@@ -869,7 +869,8 @@ impl RandomizedTest for ProjectCollaborationTest {
 
                 let mut search = project.update(cx, |project, cx| {
                     project.search(
-                        SearchQuery::text(query, false, false, Vec::new(), Vec::new()).unwrap(),
+                        SearchQuery::text(query, false, false, false, Vec::new(), Vec::new())
+                            .unwrap(),
                         cx,
                     )
                 });

crates/collab2/src/tests/integration_tests.rs 🔗

@@ -4599,7 +4599,7 @@ async fn test_project_search(
     let mut results = HashMap::default();
     let mut search_rx = project_b.update(cx_b, |project, cx| {
         project.search(
-            SearchQuery::text("world", false, false, Vec::new(), Vec::new()).unwrap(),
+            SearchQuery::text("world", false, false, false, Vec::new(), Vec::new()).unwrap(),
             cx,
         )
     });

crates/collab2/src/tests/random_project_collaboration_tests.rs 🔗

@@ -870,7 +870,8 @@ impl RandomizedTest for ProjectCollaborationTest {
 
                 let mut search = project.update(cx, |project, cx| {
                     project.search(
-                        SearchQuery::text(query, false, false, Vec::new(), Vec::new()).unwrap(),
+                        SearchQuery::text(query, false, false, false, Vec::new(), Vec::new())
+                            .unwrap(),
                         cx,
                     )
                 });

crates/collab_ui/src/chat_panel/message_editor.rs 🔗

@@ -14,14 +14,8 @@ use std::{sync::Arc, time::Duration};
 const MENTIONS_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(50);
 
 lazy_static! {
-    static ref MENTIONS_SEARCH: SearchQuery = SearchQuery::regex(
-        "@[-_\\w]+",
-        false,
-        false,
-        Default::default(),
-        Default::default()
-    )
-    .unwrap();
+    static ref MENTIONS_SEARCH: SearchQuery =
+        SearchQuery::regex("@[-_\\w]+", false, false, false, Vec::new(), Vec::new()).unwrap();
 }
 
 pub struct MessageEditor {

crates/collab_ui2/src/chat_panel/message_editor.rs 🔗

@@ -14,14 +14,8 @@ use std::{sync::Arc, time::Duration};
 const MENTIONS_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(50);
 
 lazy_static! {
-    static ref MENTIONS_SEARCH: SearchQuery = SearchQuery::regex(
-        "@[-_\\w]+",
-        false,
-        false,
-        Default::default(),
-        Default::default()
-    )
-    .unwrap();
+    static ref MENTIONS_SEARCH: SearchQuery =
+        SearchQuery::regex("@[-_\\w]+", false, false, false, Vec::new(), Vec::new()).unwrap();
 }
 
 pub struct MessageEditor {

crates/command_palette2/src/command_palette.rs 🔗

@@ -1,8 +1,9 @@
 use collections::{CommandPaletteFilter, HashMap};
 use fuzzy::{StringMatch, StringMatchCandidate};
 use gpui::{
-    actions, div, prelude::*, Action, AppContext, Dismiss, Div, FocusHandle, Keystroke,
-    ManagedView, ParentElement, Render, Styled, View, ViewContext, VisualContext, WeakView,
+    actions, actions, div, prelude::*, prelude::*, Action, AppContext, Component, Dismiss, Div,
+    EventEmitter, FocusHandle, FocusableView, Keystroke, ManagedView, Manager, ParentComponent,
+    ParentElement, Render, Render, Styled, View, ViewContext, VisualContext, WeakView,
 };
 use picker::{Picker, PickerDelegate};
 use std::{
@@ -68,7 +69,9 @@ impl CommandPalette {
     }
 }
 
-impl ManagedView for CommandPalette {
+impl EventEmitter<Manager> for CommandPalette {}
+
+impl FocusableView for CommandPalette {
     fn focus_handle(&self, cx: &AppContext) -> FocusHandle {
         self.picker.focus_handle(cx)
     }
@@ -114,6 +117,7 @@ impl Clone for Command {
         }
     }
 }
+
 /// Hit count for each command in the palette.
 /// We only account for commands triggered directly via command palette and not by e.g. keystrokes because
 /// if an user already knows a keystroke for a command, they are unlikely to use a command palette to look for it.
@@ -265,7 +269,7 @@ impl PickerDelegate for CommandPaletteDelegate {
 
     fn dismissed(&mut self, cx: &mut ViewContext<Picker<Self>>) {
         self.command_palette
-            .update(cx, |_, cx| cx.emit(Dismiss))
+            .update(cx, |_, cx| cx.emit(Manager::Dismiss))
             .log_err();
     }
 

crates/diagnostics2/Cargo.toml 🔗

@@ -0,0 +1,43 @@
+[package]
+name = "diagnostics2"
+version = "0.1.0"
+edition = "2021"
+publish = false
+
+[lib]
+path = "src/diagnostics.rs"
+doctest = false
+
+[dependencies]
+collections = { path = "../collections" }
+editor = { package = "editor2", path = "../editor2" }
+gpui = { package = "gpui2", path = "../gpui2" }
+ui = { package = "ui2", path = "../ui2" }
+language = { package = "language2", path = "../language2" }
+lsp = { package = "lsp2", path = "../lsp2" }
+project = { package = "project2", path = "../project2" }
+settings = { package = "settings2", path = "../settings2" }
+theme = { package = "theme2", path = "../theme2" }
+util = { path = "../util" }
+workspace = { package = "workspace2", path = "../workspace2" }
+
+log.workspace = true
+anyhow.workspace = true
+futures.workspace = true
+schemars.workspace = true
+serde.workspace = true
+serde_derive.workspace = true
+smallvec.workspace = true
+postage.workspace = true
+
+[dev-dependencies]
+client = { package = "client2", path = "../client2", features = ["test-support"] }
+editor = { package = "editor2", path = "../editor2", features = ["test-support"] }
+language = { package = "language2", path = "../language2", features = ["test-support"] }
+lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
+gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
+workspace = { package = "workspace2", path = "../workspace2", features = ["test-support"] }
+theme = { package = "theme2", path = "../theme2", features = ["test-support"] }
+
+serde_json.workspace = true
+unindent.workspace = true

crates/diagnostics2/src/diagnostics.rs 🔗

@@ -0,0 +1,1572 @@
+pub mod items;
+mod project_diagnostics_settings;
+mod toolbar_controls;
+
+use anyhow::{Context as _, Result};
+use collections::{HashMap, HashSet};
+use editor::{
+    diagnostic_block_renderer,
+    display_map::{BlockDisposition, BlockId, BlockProperties, BlockStyle, RenderBlock},
+    highlight_diagnostic_message,
+    scroll::autoscroll::Autoscroll,
+    Editor, EditorEvent, ExcerptId, ExcerptRange, MultiBuffer, ToOffset,
+};
+use futures::future::try_join_all;
+use gpui::{
+    actions, div, AnyElement, AnyView, AppContext, Component, Context, Div, EventEmitter,
+    FocusEvent, FocusHandle, Focusable, FocusableComponent, FocusableView, InteractiveComponent,
+    Model, ParentComponent, Render, SharedString, Styled, Subscription, Task, View, ViewContext,
+    VisualContext, WeakView,
+};
+use language::{
+    Anchor, Bias, Buffer, Diagnostic, DiagnosticEntry, DiagnosticSeverity, Point, Selection,
+    SelectionGoal,
+};
+use lsp::LanguageServerId;
+use project::{DiagnosticSummary, Project, ProjectPath};
+use project_diagnostics_settings::ProjectDiagnosticsSettings;
+use settings::Settings;
+use std::{
+    any::{Any, TypeId},
+    cmp::Ordering,
+    mem,
+    ops::Range,
+    path::PathBuf,
+    sync::Arc,
+};
+use theme::ActiveTheme;
+pub use toolbar_controls::ToolbarControls;
+use ui::{h_stack, HighlightedLabel, Icon, IconElement, Label, TextColor};
+use util::TryFutureExt;
+use workspace::{
+    item::{BreadcrumbText, Item, ItemEvent, ItemHandle},
+    ItemNavHistory, Pane, ToolbarItemLocation, Workspace,
+};
+
+actions!(Deploy, ToggleWarnings);
+
+const CONTEXT_LINE_COUNT: u32 = 1;
+
+pub fn init(cx: &mut AppContext) {
+    ProjectDiagnosticsSettings::register(cx);
+    cx.observe_new_views(ProjectDiagnosticsEditor::register)
+        .detach();
+}
+
+struct ProjectDiagnosticsEditor {
+    project: Model<Project>,
+    workspace: WeakView<Workspace>,
+    focus_handle: FocusHandle,
+    editor: View<Editor>,
+    summary: DiagnosticSummary,
+    excerpts: Model<MultiBuffer>,
+    path_states: Vec<PathState>,
+    paths_to_update: HashMap<LanguageServerId, HashSet<ProjectPath>>,
+    current_diagnostics: HashMap<LanguageServerId, HashSet<ProjectPath>>,
+    include_warnings: bool,
+    _subscriptions: Vec<Subscription>,
+}
+
+struct PathState {
+    path: ProjectPath,
+    diagnostic_groups: Vec<DiagnosticGroupState>,
+}
+
+#[derive(Clone, Debug, PartialEq)]
+struct Jump {
+    path: ProjectPath,
+    position: Point,
+    anchor: Anchor,
+}
+
+struct DiagnosticGroupState {
+    language_server_id: LanguageServerId,
+    primary_diagnostic: DiagnosticEntry<language::Anchor>,
+    primary_excerpt_ix: usize,
+    excerpts: Vec<ExcerptId>,
+    blocks: HashSet<BlockId>,
+    block_count: usize,
+}
+
+impl EventEmitter<ItemEvent> for ProjectDiagnosticsEditor {}
+
+impl Render for ProjectDiagnosticsEditor {
+    type Element = Focusable<Self, Div<Self>>;
+
+    fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element {
+        let child = if self.path_states.is_empty() {
+            div()
+                .bg(cx.theme().colors().editor_background)
+                .flex()
+                .items_center()
+                .justify_center()
+                .size_full()
+                .child(Label::new("No problems in workspace"))
+        } else {
+            div().size_full().child(self.editor.clone())
+        };
+
+        div()
+            .track_focus(&self.focus_handle)
+            .size_full()
+            .on_focus_in(Self::focus_in)
+            .on_action(Self::toggle_warnings)
+            .child(child)
+    }
+}
+
+impl ProjectDiagnosticsEditor {
+    fn register(workspace: &mut Workspace, _: &mut ViewContext<Workspace>) {
+        workspace.register_action(Self::deploy);
+    }
+
+    fn new(
+        project_handle: Model<Project>,
+        workspace: WeakView<Workspace>,
+        cx: &mut ViewContext<Self>,
+    ) -> Self {
+        let project_event_subscription =
+            cx.subscribe(&project_handle, |this, _, event, cx| match event {
+                project::Event::DiskBasedDiagnosticsFinished { language_server_id } => {
+                    log::debug!("Disk based diagnostics finished for server {language_server_id}");
+                    this.update_excerpts(Some(*language_server_id), cx);
+                }
+                project::Event::DiagnosticsUpdated {
+                    language_server_id,
+                    path,
+                } => {
+                    log::debug!("Adding path {path:?} to update for server {language_server_id}");
+                    this.paths_to_update
+                        .entry(*language_server_id)
+                        .or_default()
+                        .insert(path.clone());
+                    if this.editor.read(cx).selections.all::<usize>(cx).is_empty()
+                        && !this.is_dirty(cx)
+                    {
+                        this.update_excerpts(Some(*language_server_id), cx);
+                    }
+                }
+                _ => {}
+            });
+
+        let excerpts = cx.build_model(|cx| MultiBuffer::new(project_handle.read(cx).replica_id()));
+        let editor = cx.build_view(|cx| {
+            let mut editor =
+                Editor::for_multibuffer(excerpts.clone(), Some(project_handle.clone()), cx);
+            editor.set_vertical_scroll_margin(5, cx);
+            editor
+        });
+        let editor_event_subscription =
+            cx.subscribe(&editor, |this, _editor, event: &EditorEvent, cx| {
+                Self::emit_item_event_for_editor_event(event, cx);
+                if event == &EditorEvent::Focused && this.path_states.is_empty() {
+                    cx.focus(&this.focus_handle);
+                }
+            });
+
+        let project = project_handle.read(cx);
+        let summary = project.diagnostic_summary(cx);
+        let mut this = Self {
+            project: project_handle,
+            summary,
+            workspace,
+            excerpts,
+            focus_handle: cx.focus_handle(),
+            editor,
+            path_states: Default::default(),
+            paths_to_update: HashMap::default(),
+            include_warnings: ProjectDiagnosticsSettings::get_global(cx).include_warnings,
+            current_diagnostics: HashMap::default(),
+            _subscriptions: vec![project_event_subscription, editor_event_subscription],
+        };
+        this.update_excerpts(None, cx);
+        this
+    }
+
+    fn emit_item_event_for_editor_event(event: &EditorEvent, cx: &mut ViewContext<Self>) {
+        match event {
+            EditorEvent::Closed => cx.emit(ItemEvent::CloseItem),
+
+            EditorEvent::Saved | EditorEvent::TitleChanged => {
+                cx.emit(ItemEvent::UpdateTab);
+                cx.emit(ItemEvent::UpdateBreadcrumbs);
+            }
+
+            EditorEvent::Reparsed => {
+                cx.emit(ItemEvent::UpdateBreadcrumbs);
+            }
+
+            EditorEvent::SelectionsChanged { local } if *local => {
+                cx.emit(ItemEvent::UpdateBreadcrumbs);
+            }
+
+            EditorEvent::DirtyChanged => {
+                cx.emit(ItemEvent::UpdateTab);
+            }
+
+            EditorEvent::BufferEdited => {
+                cx.emit(ItemEvent::Edit);
+                cx.emit(ItemEvent::UpdateBreadcrumbs);
+            }
+
+            EditorEvent::ExcerptsAdded { .. } | EditorEvent::ExcerptsRemoved { .. } => {
+                cx.emit(ItemEvent::Edit);
+            }
+
+            _ => {}
+        }
+    }
+
+    fn deploy(workspace: &mut Workspace, _: &Deploy, cx: &mut ViewContext<Workspace>) {
+        if let Some(existing) = workspace.item_of_type::<ProjectDiagnosticsEditor>(cx) {
+            workspace.activate_item(&existing, cx);
+        } else {
+            let workspace_handle = cx.view().downgrade();
+            let diagnostics = cx.build_view(|cx| {
+                ProjectDiagnosticsEditor::new(workspace.project().clone(), workspace_handle, cx)
+            });
+            workspace.add_item(Box::new(diagnostics), cx);
+        }
+    }
+
+    fn toggle_warnings(&mut self, _: &ToggleWarnings, cx: &mut ViewContext<Self>) {
+        self.include_warnings = !self.include_warnings;
+        self.paths_to_update = self.current_diagnostics.clone();
+        self.update_excerpts(None, cx);
+        cx.notify();
+    }
+
+    fn focus_in(&mut self, _: &FocusEvent, cx: &mut ViewContext<Self>) {
+        if self.focus_handle.is_focused(cx) && !self.path_states.is_empty() {
+            self.editor.focus_handle(cx).focus(cx)
+        }
+    }
+
+    fn update_excerpts(
+        &mut self,
+        language_server_id: Option<LanguageServerId>,
+        cx: &mut ViewContext<Self>,
+    ) {
+        log::debug!("Updating excerpts for server {language_server_id:?}");
+        let mut paths_to_recheck = HashSet::default();
+        let mut new_summaries: HashMap<LanguageServerId, HashSet<ProjectPath>> = self
+            .project
+            .read(cx)
+            .diagnostic_summaries(cx)
+            .fold(HashMap::default(), |mut summaries, (path, server_id, _)| {
+                summaries.entry(server_id).or_default().insert(path);
+                summaries
+            });
+        let mut old_diagnostics = if let Some(language_server_id) = language_server_id {
+            new_summaries.retain(|server_id, _| server_id == &language_server_id);
+            self.paths_to_update.retain(|server_id, paths| {
+                if server_id == &language_server_id {
+                    paths_to_recheck.extend(paths.drain());
+                    false
+                } else {
+                    true
+                }
+            });
+            let mut old_diagnostics = HashMap::default();
+            if let Some(new_paths) = new_summaries.get(&language_server_id) {
+                if let Some(old_paths) = self
+                    .current_diagnostics
+                    .insert(language_server_id, new_paths.clone())
+                {
+                    old_diagnostics.insert(language_server_id, old_paths);
+                }
+            } else {
+                if let Some(old_paths) = self.current_diagnostics.remove(&language_server_id) {
+                    old_diagnostics.insert(language_server_id, old_paths);
+                }
+            }
+            old_diagnostics
+        } else {
+            paths_to_recheck.extend(self.paths_to_update.drain().flat_map(|(_, paths)| paths));
+            mem::replace(&mut self.current_diagnostics, new_summaries.clone())
+        };
+        for (server_id, new_paths) in new_summaries {
+            match old_diagnostics.remove(&server_id) {
+                Some(mut old_paths) => {
+                    paths_to_recheck.extend(
+                        new_paths
+                            .into_iter()
+                            .filter(|new_path| !old_paths.remove(new_path)),
+                    );
+                    paths_to_recheck.extend(old_paths);
+                }
+                None => paths_to_recheck.extend(new_paths),
+            }
+        }
+        paths_to_recheck.extend(old_diagnostics.into_iter().flat_map(|(_, paths)| paths));
+
+        if paths_to_recheck.is_empty() {
+            log::debug!("No paths to recheck for language server {language_server_id:?}");
+            return;
+        }
+        log::debug!(
+            "Rechecking {} paths for language server {:?}",
+            paths_to_recheck.len(),
+            language_server_id
+        );
+        let project = self.project.clone();
+        cx.spawn(|this, mut cx| {
+            async move {
+                let _: Vec<()> = try_join_all(paths_to_recheck.into_iter().map(|path| {
+                    let mut cx = cx.clone();
+                    let project = project.clone();
+                    let this = this.clone();
+                    async move {
+                        let buffer = project
+                            .update(&mut cx, |project, cx| project.open_buffer(path.clone(), cx))?
+                            .await
+                            .with_context(|| format!("opening buffer for path {path:?}"))?;
+                        this.update(&mut cx, |this, cx| {
+                            this.populate_excerpts(path, language_server_id, buffer, cx);
+                        })
+                        .context("missing project")?;
+                        anyhow::Ok(())
+                    }
+                }))
+                .await
+                .context("rechecking diagnostics for paths")?;
+
+                this.update(&mut cx, |this, cx| {
+                    this.summary = this.project.read(cx).diagnostic_summary(cx);
+                    cx.emit(ItemEvent::UpdateTab);
+                    cx.emit(ItemEvent::UpdateBreadcrumbs);
+                })?;
+                anyhow::Ok(())
+            }
+            .log_err()
+        })
+        .detach();
+    }
+
+    fn populate_excerpts(
+        &mut self,
+        path: ProjectPath,
+        language_server_id: Option<LanguageServerId>,
+        buffer: Model<Buffer>,
+        cx: &mut ViewContext<Self>,
+    ) {
+        let was_empty = self.path_states.is_empty();
+        let snapshot = buffer.read(cx).snapshot();
+        let path_ix = match self.path_states.binary_search_by_key(&&path, |e| &e.path) {
+            Ok(ix) => ix,
+            Err(ix) => {
+                self.path_states.insert(
+                    ix,
+                    PathState {
+                        path: path.clone(),
+                        diagnostic_groups: Default::default(),
+                    },
+                );
+                ix
+            }
+        };
+
+        let mut prev_excerpt_id = if path_ix > 0 {
+            let prev_path_last_group = &self.path_states[path_ix - 1]
+                .diagnostic_groups
+                .last()
+                .unwrap();
+            prev_path_last_group.excerpts.last().unwrap().clone()
+        } else {
+            ExcerptId::min()
+        };
+
+        let path_state = &mut self.path_states[path_ix];
+        let mut groups_to_add = Vec::new();
+        let mut group_ixs_to_remove = Vec::new();
+        let mut blocks_to_add = Vec::new();
+        let mut blocks_to_remove = HashSet::default();
+        let mut first_excerpt_id = None;
+        let max_severity = if self.include_warnings {
+            DiagnosticSeverity::WARNING
+        } else {
+            DiagnosticSeverity::ERROR
+        };
+        let excerpts_snapshot = self.excerpts.update(cx, |excerpts, excerpts_cx| {
+            let mut old_groups = path_state.diagnostic_groups.iter().enumerate().peekable();
+            let mut new_groups = snapshot
+                .diagnostic_groups(language_server_id)
+                .into_iter()
+                .filter(|(_, group)| {
+                    group.entries[group.primary_ix].diagnostic.severity <= max_severity
+                })
+                .peekable();
+            loop {
+                let mut to_insert = None;
+                let mut to_remove = None;
+                let mut to_keep = None;
+                match (old_groups.peek(), new_groups.peek()) {
+                    (None, None) => break,
+                    (None, Some(_)) => to_insert = new_groups.next(),
+                    (Some((_, old_group)), None) => {
+                        if language_server_id.map_or(true, |id| id == old_group.language_server_id)
+                        {
+                            to_remove = old_groups.next();
+                        } else {
+                            to_keep = old_groups.next();
+                        }
+                    }
+                    (Some((_, old_group)), Some((_, new_group))) => {
+                        let old_primary = &old_group.primary_diagnostic;
+                        let new_primary = &new_group.entries[new_group.primary_ix];
+                        match compare_diagnostics(old_primary, new_primary, &snapshot) {
+                            Ordering::Less => {
+                                if language_server_id
+                                    .map_or(true, |id| id == old_group.language_server_id)
+                                {
+                                    to_remove = old_groups.next();
+                                } else {
+                                    to_keep = old_groups.next();
+                                }
+                            }
+                            Ordering::Equal => {
+                                to_keep = old_groups.next();
+                                new_groups.next();
+                            }
+                            Ordering::Greater => to_insert = new_groups.next(),
+                        }
+                    }
+                }
+
+                if let Some((language_server_id, group)) = to_insert {
+                    let mut group_state = DiagnosticGroupState {
+                        language_server_id,
+                        primary_diagnostic: group.entries[group.primary_ix].clone(),
+                        primary_excerpt_ix: 0,
+                        excerpts: Default::default(),
+                        blocks: Default::default(),
+                        block_count: 0,
+                    };
+                    let mut pending_range: Option<(Range<Point>, usize)> = None;
+                    let mut is_first_excerpt_for_group = true;
+                    for (ix, entry) in group.entries.iter().map(Some).chain([None]).enumerate() {
+                        let resolved_entry = entry.map(|e| e.resolve::<Point>(&snapshot));
+                        if let Some((range, start_ix)) = &mut pending_range {
+                            if let Some(entry) = resolved_entry.as_ref() {
+                                if entry.range.start.row
+                                    <= range.end.row + 1 + CONTEXT_LINE_COUNT * 2
+                                {
+                                    range.end = range.end.max(entry.range.end);
+                                    continue;
+                                }
+                            }
+
+                            let excerpt_start =
+                                Point::new(range.start.row.saturating_sub(CONTEXT_LINE_COUNT), 0);
+                            let excerpt_end = snapshot.clip_point(
+                                Point::new(range.end.row + CONTEXT_LINE_COUNT, u32::MAX),
+                                Bias::Left,
+                            );
+                            let excerpt_id = excerpts
+                                .insert_excerpts_after(
+                                    prev_excerpt_id,
+                                    buffer.clone(),
+                                    [ExcerptRange {
+                                        context: excerpt_start..excerpt_end,
+                                        primary: Some(range.clone()),
+                                    }],
+                                    excerpts_cx,
+                                )
+                                .pop()
+                                .unwrap();
+
+                            prev_excerpt_id = excerpt_id.clone();
+                            first_excerpt_id.get_or_insert_with(|| prev_excerpt_id.clone());
+                            group_state.excerpts.push(excerpt_id.clone());
+                            let header_position = (excerpt_id.clone(), language::Anchor::MIN);
+
+                            if is_first_excerpt_for_group {
+                                is_first_excerpt_for_group = false;
+                                let mut primary =
+                                    group.entries[group.primary_ix].diagnostic.clone();
+                                primary.message =
+                                    primary.message.split('\n').next().unwrap().to_string();
+                                group_state.block_count += 1;
+                                blocks_to_add.push(BlockProperties {
+                                    position: header_position,
+                                    height: 2,
+                                    style: BlockStyle::Sticky,
+                                    render: diagnostic_header_renderer(primary),
+                                    disposition: BlockDisposition::Above,
+                                });
+                            }
+
+                            for entry in &group.entries[*start_ix..ix] {
+                                let mut diagnostic = entry.diagnostic.clone();
+                                if diagnostic.is_primary {
+                                    group_state.primary_excerpt_ix = group_state.excerpts.len() - 1;
+                                    diagnostic.message =
+                                        entry.diagnostic.message.split('\n').skip(1).collect();
+                                }
+
+                                if !diagnostic.message.is_empty() {
+                                    group_state.block_count += 1;
+                                    blocks_to_add.push(BlockProperties {
+                                        position: (excerpt_id.clone(), entry.range.start),
+                                        height: diagnostic.message.matches('\n').count() as u8 + 1,
+                                        style: BlockStyle::Fixed,
+                                        render: diagnostic_block_renderer(diagnostic, true),
+                                        disposition: BlockDisposition::Below,
+                                    });
+                                }
+                            }
+
+                            pending_range.take();
+                        }
+
+                        if let Some(entry) = resolved_entry {
+                            pending_range = Some((entry.range.clone(), ix));
+                        }
+                    }
+
+                    groups_to_add.push(group_state);
+                } else if let Some((group_ix, group_state)) = to_remove {
+                    excerpts.remove_excerpts(group_state.excerpts.iter().copied(), excerpts_cx);
+                    group_ixs_to_remove.push(group_ix);
+                    blocks_to_remove.extend(group_state.blocks.iter().copied());
+                } else if let Some((_, group)) = to_keep {
+                    prev_excerpt_id = group.excerpts.last().unwrap().clone();
+                    first_excerpt_id.get_or_insert_with(|| prev_excerpt_id.clone());
+                }
+            }
+
+            excerpts.snapshot(excerpts_cx)
+        });
+
+        self.editor.update(cx, |editor, cx| {
+            editor.remove_blocks(blocks_to_remove, None, cx);
+            let block_ids = editor.insert_blocks(
+                blocks_to_add.into_iter().map(|block| {
+                    let (excerpt_id, text_anchor) = block.position;
+                    BlockProperties {
+                        position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor),
+                        height: block.height,
+                        style: block.style,
+                        render: block.render,
+                        disposition: block.disposition,
+                    }
+                }),
+                Some(Autoscroll::fit()),
+                cx,
+            );
+
+            let mut block_ids = block_ids.into_iter();
+            for group_state in &mut groups_to_add {
+                group_state.blocks = block_ids.by_ref().take(group_state.block_count).collect();
+            }
+        });
+
+        for ix in group_ixs_to_remove.into_iter().rev() {
+            path_state.diagnostic_groups.remove(ix);
+        }
+        path_state.diagnostic_groups.extend(groups_to_add);
+        path_state.diagnostic_groups.sort_unstable_by(|a, b| {
+            let range_a = &a.primary_diagnostic.range;
+            let range_b = &b.primary_diagnostic.range;
+            range_a
+                .start
+                .cmp(&range_b.start, &snapshot)
+                .then_with(|| range_a.end.cmp(&range_b.end, &snapshot))
+        });
+
+        if path_state.diagnostic_groups.is_empty() {
+            self.path_states.remove(path_ix);
+        }
+
+        self.editor.update(cx, |editor, cx| {
+            let groups;
+            let mut selections;
+            let new_excerpt_ids_by_selection_id;
+            if was_empty {
+                groups = self.path_states.first()?.diagnostic_groups.as_slice();
+                new_excerpt_ids_by_selection_id = [(0, ExcerptId::min())].into_iter().collect();
+                selections = vec![Selection {
+                    id: 0,
+                    start: 0,
+                    end: 0,
+                    reversed: false,
+                    goal: SelectionGoal::None,
+                }];
+            } else {
+                groups = self.path_states.get(path_ix)?.diagnostic_groups.as_slice();
+                new_excerpt_ids_by_selection_id =
+                    editor.change_selections(Some(Autoscroll::fit()), cx, |s| s.refresh());
+                selections = editor.selections.all::<usize>(cx);
+            }
+
+            // If any selection has lost its position, move it to start of the next primary diagnostic.
+            let snapshot = editor.snapshot(cx);
+            for selection in &mut selections {
+                if let Some(new_excerpt_id) = new_excerpt_ids_by_selection_id.get(&selection.id) {
+                    let group_ix = match groups.binary_search_by(|probe| {
+                        probe
+                            .excerpts
+                            .last()
+                            .unwrap()
+                            .cmp(new_excerpt_id, &snapshot.buffer_snapshot)
+                    }) {
+                        Ok(ix) | Err(ix) => ix,
+                    };
+                    if let Some(group) = groups.get(group_ix) {
+                        let offset = excerpts_snapshot
+                            .anchor_in_excerpt(
+                                group.excerpts[group.primary_excerpt_ix].clone(),
+                                group.primary_diagnostic.range.start,
+                            )
+                            .to_offset(&excerpts_snapshot);
+                        selection.start = offset;
+                        selection.end = offset;
+                    }
+                }
+            }
+            editor.change_selections(None, cx, |s| {
+                s.select(selections);
+            });
+            Some(())
+        });
+
+        if self.path_states.is_empty() {
+            if self.editor.focus_handle(cx).is_focused(cx) {
+                cx.focus(&self.focus_handle);
+            }
+        } else if self.focus_handle.is_focused(cx) {
+            let focus_handle = self.editor.focus_handle(cx);
+            cx.focus(&focus_handle);
+        }
+        cx.notify();
+    }
+}
+
+impl FocusableView for ProjectDiagnosticsEditor {
+    fn focus_handle(&self, _: &AppContext) -> FocusHandle {
+        self.focus_handle.clone()
+    }
+}
+
+impl Item for ProjectDiagnosticsEditor {
+    fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
+        self.editor.update(cx, |editor, cx| editor.deactivated(cx));
+    }
+
+    fn navigate(&mut self, data: Box<dyn Any>, cx: &mut ViewContext<Self>) -> bool {
+        self.editor
+            .update(cx, |editor, cx| editor.navigate(data, cx))
+    }
+
+    fn tab_tooltip_text(&self, _: &AppContext) -> Option<SharedString> {
+        Some("Project Diagnostics".into())
+    }
+
+    fn tab_content<T: 'static>(&self, _detail: Option<usize>, _: &AppContext) -> AnyElement<T> {
+        render_summary(&self.summary)
+    }
+
+    fn for_each_project_item(
+        &self,
+        cx: &AppContext,
+        f: &mut dyn FnMut(gpui::EntityId, &dyn project::Item),
+    ) {
+        self.editor.for_each_project_item(cx, f)
+    }
+
+    fn is_singleton(&self, _: &AppContext) -> bool {
+        false
+    }
+
+    fn set_nav_history(&mut self, nav_history: ItemNavHistory, cx: &mut ViewContext<Self>) {
+        self.editor.update(cx, |editor, _| {
+            editor.set_nav_history(Some(nav_history));
+        });
+    }
+
+    fn clone_on_split(
+        &self,
+        _workspace_id: workspace::WorkspaceId,
+        cx: &mut ViewContext<Self>,
+    ) -> Option<View<Self>>
+    where
+        Self: Sized,
+    {
+        Some(cx.build_view(|cx| {
+            ProjectDiagnosticsEditor::new(self.project.clone(), self.workspace.clone(), cx)
+        }))
+    }
+
+    fn is_dirty(&self, cx: &AppContext) -> bool {
+        self.excerpts.read(cx).is_dirty(cx)
+    }
+
+    fn has_conflict(&self, cx: &AppContext) -> bool {
+        self.excerpts.read(cx).has_conflict(cx)
+    }
+
+    fn can_save(&self, _: &AppContext) -> bool {
+        true
+    }
+
+    fn save(&mut self, project: Model<Project>, cx: &mut ViewContext<Self>) -> Task<Result<()>> {
+        self.editor.save(project, cx)
+    }
+
+    fn save_as(
+        &mut self,
+        _: Model<Project>,
+        _: PathBuf,
+        _: &mut ViewContext<Self>,
+    ) -> Task<Result<()>> {
+        unreachable!()
+    }
+
+    fn reload(&mut self, project: Model<Project>, cx: &mut ViewContext<Self>) -> Task<Result<()>> {
+        self.editor.reload(project, cx)
+    }
+
+    fn act_as_type<'a>(
+        &'a self,
+        type_id: TypeId,
+        self_handle: &'a View<Self>,
+        _: &'a AppContext,
+    ) -> Option<AnyView> {
+        if type_id == TypeId::of::<Self>() {
+            Some(self_handle.to_any())
+        } else if type_id == TypeId::of::<Editor>() {
+            Some(self.editor.to_any())
+        } else {
+            None
+        }
+    }
+
+    fn breadcrumb_location(&self) -> ToolbarItemLocation {
+        ToolbarItemLocation::PrimaryLeft { flex: None }
+    }
+
+    fn breadcrumbs(&self, theme: &theme::Theme, cx: &AppContext) -> Option<Vec<BreadcrumbText>> {
+        self.editor.breadcrumbs(theme, cx)
+    }
+
+    fn added_to_workspace(&mut self, workspace: &mut Workspace, cx: &mut ViewContext<Self>) {
+        self.editor
+            .update(cx, |editor, cx| editor.added_to_workspace(workspace, cx));
+    }
+
+    fn serialized_item_kind() -> Option<&'static str> {
+        Some("diagnostics")
+    }
+
+    fn deserialize(
+        project: Model<Project>,
+        workspace: WeakView<Workspace>,
+        _workspace_id: workspace::WorkspaceId,
+        _item_id: workspace::ItemId,
+        cx: &mut ViewContext<Pane>,
+    ) -> Task<Result<View<Self>>> {
+        Task::ready(Ok(cx.build_view(|cx| Self::new(project, workspace, cx))))
+    }
+}
+
+fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock {
+    let (message, highlights) = highlight_diagnostic_message(Vec::new(), &diagnostic.message);
+    Arc::new(move |_| {
+        h_stack()
+            .id("diagnostic header")
+            .gap_3()
+            .bg(gpui::red())
+            .map(|stack| {
+                let icon = if diagnostic.severity == DiagnosticSeverity::ERROR {
+                    IconElement::new(Icon::XCircle).color(TextColor::Error)
+                } else {
+                    IconElement::new(Icon::ExclamationTriangle).color(TextColor::Warning)
+                };
+
+                stack.child(div().pl_8().child(icon))
+            })
+            .when_some(diagnostic.source.as_ref(), |stack, source| {
+                stack.child(Label::new(format!("{source}:")).color(TextColor::Accent))
+            })
+            .child(HighlightedLabel::new(message.clone(), highlights.clone()))
+            .when_some(diagnostic.code.as_ref(), |stack, code| {
+                stack.child(Label::new(code.clone()))
+            })
+            .render()
+    })
+}
+
+pub(crate) fn render_summary<T: 'static>(summary: &DiagnosticSummary) -> AnyElement<T> {
+    if summary.error_count == 0 && summary.warning_count == 0 {
+        Label::new("No problems").render()
+    } else {
+        h_stack()
+            .bg(gpui::red())
+            .child(IconElement::new(Icon::XCircle))
+            .child(Label::new(summary.error_count.to_string()))
+            .child(IconElement::new(Icon::ExclamationTriangle))
+            .child(Label::new(summary.warning_count.to_string()))
+            .render()
+    }
+}
+
+fn compare_diagnostics<L: language::ToOffset, R: language::ToOffset>(
+    lhs: &DiagnosticEntry<L>,
+    rhs: &DiagnosticEntry<R>,
+    snapshot: &language::BufferSnapshot,
+) -> Ordering {
+    lhs.range
+        .start
+        .to_offset(snapshot)
+        .cmp(&rhs.range.start.to_offset(snapshot))
+        .then_with(|| {
+            lhs.range
+                .end
+                .to_offset(snapshot)
+                .cmp(&rhs.range.end.to_offset(snapshot))
+        })
+        .then_with(|| lhs.diagnostic.message.cmp(&rhs.diagnostic.message))
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use editor::{
+        display_map::{BlockContext, TransformBlock},
+        DisplayPoint,
+    };
+    use gpui::{px, TestAppContext, VisualTestContext, WindowContext};
+    use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, PointUtf16, Unclipped};
+    use project::FakeFs;
+    use serde_json::json;
+    use settings::SettingsStore;
+    use unindent::Unindent as _;
+
+    #[gpui::test]
+    async fn test_diagnostics(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree(
+            "/test",
+            json!({
+                "consts.rs": "
+                    const a: i32 = 'a';
+                    const b: i32 = c;
+                "
+                .unindent(),
+
+                "main.rs": "
+                    fn main() {
+                        let x = vec![];
+                        let y = vec![];
+                        a(x);
+                        b(y);
+                        // comment 1
+                        // comment 2
+                        c(y);
+                        d(x);
+                    }
+                "
+                .unindent(),
+            }),
+        )
+        .await;
+
+        let language_server_id = LanguageServerId(0);
+        let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await;
+        let window = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
+        let cx = &mut VisualTestContext::from_window(*window, cx);
+        let workspace = window.root(cx).unwrap();
+
+        // Create some diagnostics
+        project.update(cx, |project, cx| {
+            project
+                .update_diagnostic_entries(
+                    language_server_id,
+                    PathBuf::from("/test/main.rs"),
+                    None,
+                    vec![
+                        DiagnosticEntry {
+                            range: Unclipped(PointUtf16::new(1, 8))..Unclipped(PointUtf16::new(1, 9)),
+                            diagnostic: Diagnostic {
+                                message:
+                                    "move occurs because `x` has type `Vec<char>`, which does not implement the `Copy` trait"
+                                        .to_string(),
+                                severity: DiagnosticSeverity::INFORMATION,
+                                is_primary: false,
+                                is_disk_based: true,
+                                group_id: 1,
+                                ..Default::default()
+                            },
+                        },
+                        DiagnosticEntry {
+                            range: Unclipped(PointUtf16::new(2, 8))..Unclipped(PointUtf16::new(2, 9)),
+                            diagnostic: Diagnostic {
+                                message:
+                                    "move occurs because `y` has type `Vec<char>`, which does not implement the `Copy` trait"
+                                        .to_string(),
+                                severity: DiagnosticSeverity::INFORMATION,
+                                is_primary: false,
+                                is_disk_based: true,
+                                group_id: 0,
+                                ..Default::default()
+                            },
+                        },
+                        DiagnosticEntry {
+                            range: Unclipped(PointUtf16::new(3, 6))..Unclipped(PointUtf16::new(3, 7)),
+                            diagnostic: Diagnostic {
+                                message: "value moved here".to_string(),
+                                severity: DiagnosticSeverity::INFORMATION,
+                                is_primary: false,
+                                is_disk_based: true,
+                                group_id: 1,
+                                ..Default::default()
+                            },
+                        },
+                        DiagnosticEntry {
+                            range: Unclipped(PointUtf16::new(4, 6))..Unclipped(PointUtf16::new(4, 7)),
+                            diagnostic: Diagnostic {
+                                message: "value moved here".to_string(),
+                                severity: DiagnosticSeverity::INFORMATION,
+                                is_primary: false,
+                                is_disk_based: true,
+                                group_id: 0,
+                                ..Default::default()
+                            },
+                        },
+                        DiagnosticEntry {
+                            range: Unclipped(PointUtf16::new(7, 6))..Unclipped(PointUtf16::new(7, 7)),
+                            diagnostic: Diagnostic {
+                                message: "use of moved value\nvalue used here after move".to_string(),
+                                severity: DiagnosticSeverity::ERROR,
+                                is_primary: true,
+                                is_disk_based: true,
+                                group_id: 0,
+                                ..Default::default()
+                            },
+                        },
+                        DiagnosticEntry {
+                            range: Unclipped(PointUtf16::new(8, 6))..Unclipped(PointUtf16::new(8, 7)),
+                            diagnostic: Diagnostic {
+                                message: "use of moved value\nvalue used here after move".to_string(),
+                                severity: DiagnosticSeverity::ERROR,
+                                is_primary: true,
+                                is_disk_based: true,
+                                group_id: 1,
+                                ..Default::default()
+                            },
+                        },
+                    ],
+                    cx,
+                )
+                .unwrap();
+        });
+
+        // Open the project diagnostics view while there are already diagnostics.
+        let view = window.build_view(cx, |cx| {
+            ProjectDiagnosticsEditor::new(project.clone(), workspace.downgrade(), cx)
+        });
+
+        view.next_notification(cx).await;
+        view.update(cx, |view, cx| {
+            assert_eq!(
+                editor_blocks(&view.editor, cx),
+                [
+                    (0, "path header block".into()),
+                    (2, "diagnostic header".into()),
+                    (15, "collapsed context".into()),
+                    (16, "diagnostic header".into()),
+                    (25, "collapsed context".into()),
+                ]
+            );
+            assert_eq!(
+                view.editor.update(cx, |editor, cx| editor.display_text(cx)),
+                concat!(
+                    //
+                    // main.rs
+                    //
+                    "\n", // filename
+                    "\n", // padding
+                    // diagnostic group 1
+                    "\n", // primary message
+                    "\n", // padding
+                    "    let x = vec![];\n",
+                    "    let y = vec![];\n",
+                    "\n", // supporting diagnostic
+                    "    a(x);\n",
+                    "    b(y);\n",
+                    "\n", // supporting diagnostic
+                    "    // comment 1\n",
+                    "    // comment 2\n",
+                    "    c(y);\n",
+                    "\n", // supporting diagnostic
+                    "    d(x);\n",
+                    "\n", // context ellipsis
+                    // diagnostic group 2
+                    "\n", // primary message
+                    "\n", // padding
+                    "fn main() {\n",
+                    "    let x = vec![];\n",
+                    "\n", // supporting diagnostic
+                    "    let y = vec![];\n",
+                    "    a(x);\n",
+                    "\n", // supporting diagnostic
+                    "    b(y);\n",
+                    "\n", // context ellipsis
+                    "    c(y);\n",
+                    "    d(x);\n",
+                    "\n", // supporting diagnostic
+                    "}"
+                )
+            );
+
+            // Cursor is at the first diagnostic
+            view.editor.update(cx, |editor, cx| {
+                assert_eq!(
+                    editor.selections.display_ranges(cx),
+                    [DisplayPoint::new(12, 6)..DisplayPoint::new(12, 6)]
+                );
+            });
+        });
+
+        // Diagnostics are added for another earlier path.
+        project.update(cx, |project, cx| {
+            project.disk_based_diagnostics_started(language_server_id, cx);
+            project
+                .update_diagnostic_entries(
+                    language_server_id,
+                    PathBuf::from("/test/consts.rs"),
+                    None,
+                    vec![DiagnosticEntry {
+                        range: Unclipped(PointUtf16::new(0, 15))..Unclipped(PointUtf16::new(0, 15)),
+                        diagnostic: Diagnostic {
+                            message: "mismatched types\nexpected `usize`, found `char`".to_string(),
+                            severity: DiagnosticSeverity::ERROR,
+                            is_primary: true,
+                            is_disk_based: true,
+                            group_id: 0,
+                            ..Default::default()
+                        },
+                    }],
+                    cx,
+                )
+                .unwrap();
+            project.disk_based_diagnostics_finished(language_server_id, cx);
+        });
+
+        view.next_notification(cx).await;
+        view.update(cx, |view, cx| {
+            assert_eq!(
+                editor_blocks(&view.editor, cx),
+                [
+                    (0, "path header block".into()),
+                    (2, "diagnostic header".into()),
+                    (7, "path header block".into()),
+                    (9, "diagnostic header".into()),
+                    (22, "collapsed context".into()),
+                    (23, "diagnostic header".into()),
+                    (32, "collapsed context".into()),
+                ]
+            );
+            assert_eq!(
+                view.editor.update(cx, |editor, cx| editor.display_text(cx)),
+                concat!(
+                    //
+                    // consts.rs
+                    //
+                    "\n", // filename
+                    "\n", // padding
+                    // diagnostic group 1
+                    "\n", // primary message
+                    "\n", // padding
+                    "const a: i32 = 'a';\n",
+                    "\n", // supporting diagnostic
+                    "const b: i32 = c;\n",
+                    //
+                    // main.rs
+                    //
+                    "\n", // filename
+                    "\n", // padding
+                    // diagnostic group 1
+                    "\n", // primary message
+                    "\n", // padding
+                    "    let x = vec![];\n",
+                    "    let y = vec![];\n",
+                    "\n", // supporting diagnostic
+                    "    a(x);\n",
+                    "    b(y);\n",
+                    "\n", // supporting diagnostic
+                    "    // comment 1\n",
+                    "    // comment 2\n",
+                    "    c(y);\n",
+                    "\n", // supporting diagnostic
+                    "    d(x);\n",
+                    "\n", // collapsed context
+                    // diagnostic group 2
+                    "\n", // primary message
+                    "\n", // filename
+                    "fn main() {\n",
+                    "    let x = vec![];\n",
+                    "\n", // supporting diagnostic
+                    "    let y = vec![];\n",
+                    "    a(x);\n",
+                    "\n", // supporting diagnostic
+                    "    b(y);\n",
+                    "\n", // context ellipsis
+                    "    c(y);\n",
+                    "    d(x);\n",
+                    "\n", // supporting diagnostic
+                    "}"
+                )
+            );
+
+            // Cursor keeps its position.
+            view.editor.update(cx, |editor, cx| {
+                assert_eq!(
+                    editor.selections.display_ranges(cx),
+                    [DisplayPoint::new(19, 6)..DisplayPoint::new(19, 6)]
+                );
+            });
+        });
+
+        // Diagnostics are added to the first path
+        project.update(cx, |project, cx| {
+            project.disk_based_diagnostics_started(language_server_id, cx);
+            project
+                .update_diagnostic_entries(
+                    language_server_id,
+                    PathBuf::from("/test/consts.rs"),
+                    None,
+                    vec![
+                        DiagnosticEntry {
+                            range: Unclipped(PointUtf16::new(0, 15))
+                                ..Unclipped(PointUtf16::new(0, 15)),
+                            diagnostic: Diagnostic {
+                                message: "mismatched types\nexpected `usize`, found `char`"
+                                    .to_string(),
+                                severity: DiagnosticSeverity::ERROR,
+                                is_primary: true,
+                                is_disk_based: true,
+                                group_id: 0,
+                                ..Default::default()
+                            },
+                        },
+                        DiagnosticEntry {
+                            range: Unclipped(PointUtf16::new(1, 15))
+                                ..Unclipped(PointUtf16::new(1, 15)),
+                            diagnostic: Diagnostic {
+                                message: "unresolved name `c`".to_string(),
+                                severity: DiagnosticSeverity::ERROR,
+                                is_primary: true,
+                                is_disk_based: true,
+                                group_id: 1,
+                                ..Default::default()
+                            },
+                        },
+                    ],
+                    cx,
+                )
+                .unwrap();
+            project.disk_based_diagnostics_finished(language_server_id, cx);
+        });
+
+        view.next_notification(cx).await;
+        view.update(cx, |view, cx| {
+            assert_eq!(
+                editor_blocks(&view.editor, cx),
+                [
+                    (0, "path header block".into()),
+                    (2, "diagnostic header".into()),
+                    (7, "collapsed context".into()),
+                    (8, "diagnostic header".into()),
+                    (13, "path header block".into()),
+                    (15, "diagnostic header".into()),
+                    (28, "collapsed context".into()),
+                    (29, "diagnostic header".into()),
+                    (38, "collapsed context".into()),
+                ]
+            );
+            assert_eq!(
+                view.editor.update(cx, |editor, cx| editor.display_text(cx)),
+                concat!(
+                    //
+                    // consts.rs
+                    //
+                    "\n", // filename
+                    "\n", // padding
+                    // diagnostic group 1
+                    "\n", // primary message
+                    "\n", // padding
+                    "const a: i32 = 'a';\n",
+                    "\n", // supporting diagnostic
+                    "const b: i32 = c;\n",
+                    "\n", // context ellipsis
+                    // diagnostic group 2
+                    "\n", // primary message
+                    "\n", // padding
+                    "const a: i32 = 'a';\n",
+                    "const b: i32 = c;\n",
+                    "\n", // supporting diagnostic
+                    //
+                    // main.rs
+                    //
+                    "\n", // filename
+                    "\n", // padding
+                    // diagnostic group 1
+                    "\n", // primary message
+                    "\n", // padding
+                    "    let x = vec![];\n",
+                    "    let y = vec![];\n",
+                    "\n", // supporting diagnostic
+                    "    a(x);\n",
+                    "    b(y);\n",
+                    "\n", // supporting diagnostic
+                    "    // comment 1\n",
+                    "    // comment 2\n",
+                    "    c(y);\n",
+                    "\n", // supporting diagnostic
+                    "    d(x);\n",
+                    "\n", // context ellipsis
+                    // diagnostic group 2
+                    "\n", // primary message
+                    "\n", // filename
+                    "fn main() {\n",
+                    "    let x = vec![];\n",
+                    "\n", // supporting diagnostic
+                    "    let y = vec![];\n",
+                    "    a(x);\n",
+                    "\n", // supporting diagnostic
+                    "    b(y);\n",
+                    "\n", // context ellipsis
+                    "    c(y);\n",
+                    "    d(x);\n",
+                    "\n", // supporting diagnostic
+                    "}"
+                )
+            );
+        });
+    }
+
+    #[gpui::test]
+    async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        fs.insert_tree(
+            "/test",
+            json!({
+                "main.js": "
+                    a();
+                    b();
+                    c();
+                    d();
+                    e();
+                ".unindent()
+            }),
+        )
+        .await;
+
+        let server_id_1 = LanguageServerId(100);
+        let server_id_2 = LanguageServerId(101);
+        let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await;
+        let window = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
+        let cx = &mut VisualTestContext::from_window(*window, cx);
+        let workspace = window.root(cx).unwrap();
+
+        let view = window.build_view(cx, |cx| {
+            ProjectDiagnosticsEditor::new(project.clone(), workspace.downgrade(), cx)
+        });
+
+        // Two language servers start updating diagnostics
+        project.update(cx, |project, cx| {
+            project.disk_based_diagnostics_started(server_id_1, cx);
+            project.disk_based_diagnostics_started(server_id_2, cx);
+            project
+                .update_diagnostic_entries(
+                    server_id_1,
+                    PathBuf::from("/test/main.js"),
+                    None,
+                    vec![DiagnosticEntry {
+                        range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 1)),
+                        diagnostic: Diagnostic {
+                            message: "error 1".to_string(),
+                            severity: DiagnosticSeverity::WARNING,
+                            is_primary: true,
+                            is_disk_based: true,
+                            group_id: 1,
+                            ..Default::default()
+                        },
+                    }],
+                    cx,
+                )
+                .unwrap();
+        });
+
+        // The first language server finishes
+        project.update(cx, |project, cx| {
+            project.disk_based_diagnostics_finished(server_id_1, cx);
+        });
+
+        // Only the first language server's diagnostics are shown.
+        cx.executor().run_until_parked();
+        view.update(cx, |view, cx| {
+            assert_eq!(
+                editor_blocks(&view.editor, cx),
+                [
+                    (0, "path header block".into()),
+                    (2, "diagnostic header".into()),
+                ]
+            );
+            assert_eq!(
+                view.editor.update(cx, |editor, cx| editor.display_text(cx)),
+                concat!(
+                    "\n", // filename
+                    "\n", // padding
+                    // diagnostic group 1
+                    "\n",     // primary message
+                    "\n",     // padding
+                    "a();\n", //
+                    "b();",
+                )
+            );
+        });
+
+        // The second language server finishes
+        project.update(cx, |project, cx| {
+            project
+                .update_diagnostic_entries(
+                    server_id_2,
+                    PathBuf::from("/test/main.js"),
+                    None,
+                    vec![DiagnosticEntry {
+                        range: Unclipped(PointUtf16::new(1, 0))..Unclipped(PointUtf16::new(1, 1)),
+                        diagnostic: Diagnostic {
+                            message: "warning 1".to_string(),
+                            severity: DiagnosticSeverity::ERROR,
+                            is_primary: true,
+                            is_disk_based: true,
+                            group_id: 2,
+                            ..Default::default()
+                        },
+                    }],
+                    cx,
+                )
+                .unwrap();
+            project.disk_based_diagnostics_finished(server_id_2, cx);
+        });
+
+        // Both language server's diagnostics are shown.
+        cx.executor().run_until_parked();
+        view.update(cx, |view, cx| {
+            assert_eq!(
+                editor_blocks(&view.editor, cx),
+                [
+                    (0, "path header block".into()),
+                    (2, "diagnostic header".into()),
+                    (6, "collapsed context".into()),
+                    (7, "diagnostic header".into()),
+                ]
+            );
+            assert_eq!(
+                view.editor.update(cx, |editor, cx| editor.display_text(cx)),
+                concat!(
+                    "\n", // filename
+                    "\n", // padding
+                    // diagnostic group 1
+                    "\n",     // primary message
+                    "\n",     // padding
+                    "a();\n", // location
+                    "b();\n", //
+                    "\n",     // collapsed context
+                    // diagnostic group 2
+                    "\n",     // primary message
+                    "\n",     // padding
+                    "a();\n", // context
+                    "b();\n", //
+                    "c();",   // context
+                )
+            );
+        });
+
+        // Both language servers start updating diagnostics, and the first server finishes.
+        project.update(cx, |project, cx| {
+            project.disk_based_diagnostics_started(server_id_1, cx);
+            project.disk_based_diagnostics_started(server_id_2, cx);
+            project
+                .update_diagnostic_entries(
+                    server_id_1,
+                    PathBuf::from("/test/main.js"),
+                    None,
+                    vec![DiagnosticEntry {
+                        range: Unclipped(PointUtf16::new(2, 0))..Unclipped(PointUtf16::new(2, 1)),
+                        diagnostic: Diagnostic {
+                            message: "warning 2".to_string(),
+                            severity: DiagnosticSeverity::WARNING,
+                            is_primary: true,
+                            is_disk_based: true,
+                            group_id: 1,
+                            ..Default::default()
+                        },
+                    }],
+                    cx,
+                )
+                .unwrap();
+            project
+                .update_diagnostic_entries(
+                    server_id_2,
+                    PathBuf::from("/test/main.rs"),
+                    None,
+                    vec![],
+                    cx,
+                )
+                .unwrap();
+            project.disk_based_diagnostics_finished(server_id_1, cx);
+        });
+
+        // Only the first language server's diagnostics are updated.
+        cx.executor().run_until_parked();
+        view.update(cx, |view, cx| {
+            assert_eq!(
+                editor_blocks(&view.editor, cx),
+                [
+                    (0, "path header block".into()),
+                    (2, "diagnostic header".into()),
+                    (7, "collapsed context".into()),
+                    (8, "diagnostic header".into()),
+                ]
+            );
+            assert_eq!(
+                view.editor.update(cx, |editor, cx| editor.display_text(cx)),
+                concat!(
+                    "\n", // filename
+                    "\n", // padding
+                    // diagnostic group 1
+                    "\n",     // primary message
+                    "\n",     // padding
+                    "a();\n", // location
+                    "b();\n", //
+                    "c();\n", // context
+                    "\n",     // collapsed context
+                    // diagnostic group 2
+                    "\n",     // primary message
+                    "\n",     // padding
+                    "b();\n", // context
+                    "c();\n", //
+                    "d();",   // context
+                )
+            );
+        });
+
+        // The second language server finishes.
+        project.update(cx, |project, cx| {
+            project
+                .update_diagnostic_entries(
+                    server_id_2,
+                    PathBuf::from("/test/main.js"),
+                    None,
+                    vec![DiagnosticEntry {
+                        range: Unclipped(PointUtf16::new(3, 0))..Unclipped(PointUtf16::new(3, 1)),
+                        diagnostic: Diagnostic {
+                            message: "warning 2".to_string(),
+                            severity: DiagnosticSeverity::WARNING,
+                            is_primary: true,
+                            is_disk_based: true,
+                            group_id: 1,
+                            ..Default::default()
+                        },
+                    }],
+                    cx,
+                )
+                .unwrap();
+            project.disk_based_diagnostics_finished(server_id_2, cx);
+        });
+
+        // Both language servers' diagnostics are updated.
+        cx.executor().run_until_parked();
+        view.update(cx, |view, cx| {
+            assert_eq!(
+                editor_blocks(&view.editor, cx),
+                [
+                    (0, "path header block".into()),
+                    (2, "diagnostic header".into()),
+                    (7, "collapsed context".into()),
+                    (8, "diagnostic header".into()),
+                ]
+            );
+            assert_eq!(
+                view.editor.update(cx, |editor, cx| editor.display_text(cx)),
+                concat!(
+                    "\n", // filename
+                    "\n", // padding
+                    // diagnostic group 1
+                    "\n",     // primary message
+                    "\n",     // padding
+                    "b();\n", // location
+                    "c();\n", //
+                    "d();\n", // context
+                    "\n",     // collapsed context
+                    // diagnostic group 2
+                    "\n",     // primary message
+                    "\n",     // padding
+                    "c();\n", // context
+                    "d();\n", //
+                    "e();",   // context
+                )
+            );
+        });
+    }
+
+    fn init_test(cx: &mut TestAppContext) {
+        cx.update(|cx| {
+            let settings = SettingsStore::test(cx);
+            cx.set_global(settings);
+            theme::init(theme::LoadThemes::JustBase, cx);
+            language::init(cx);
+            client::init_settings(cx);
+            workspace::init_settings(cx);
+            Project::init_settings(cx);
+            crate::init(cx);
+        });
+    }
+
+    fn editor_blocks(editor: &View<Editor>, cx: &mut WindowContext) -> Vec<(u32, SharedString)> {
+        editor.update(cx, |editor, cx| {
+            let snapshot = editor.snapshot(cx);
+            snapshot
+                .blocks_in_range(0..snapshot.max_point().row())
+                .enumerate()
+                .filter_map(|(ix, (row, block))| {
+                    let name = match block {
+                        TransformBlock::Custom(block) => block
+                            .render(&mut BlockContext {
+                                view_context: cx,
+                                anchor_x: px(0.),
+                                gutter_padding: px(0.),
+                                gutter_width: px(0.),
+                                line_height: px(0.),
+                                em_width: px(0.),
+                                block_id: ix,
+                                editor_style: &editor::EditorStyle::default(),
+                            })
+                            .element_id()?
+                            .try_into()
+                            .ok()?,
+
+                        TransformBlock::ExcerptHeader {
+                            starts_new_buffer, ..
+                        } => {
+                            if *starts_new_buffer {
+                                "path header block".into()
+                            } else {
+                                "collapsed context".into()
+                            }
+                        }
+                    };
+
+                    Some((row, name))
+                })
+                .collect()
+        })
+    }
+}

crates/diagnostics2/src/items.rs 🔗

@@ -0,0 +1,151 @@
+use collections::HashSet;
+use editor::{Editor, GoToDiagnostic};
+use gpui::{
+    rems, Div, EventEmitter, InteractiveComponent, ParentComponent, Render, Stateful,
+    StatefulInteractiveComponent, Styled, Subscription, View, ViewContext, WeakView,
+};
+use language::Diagnostic;
+use lsp::LanguageServerId;
+use theme::ActiveTheme;
+use ui::{h_stack, Icon, IconElement, Label, TextColor, Tooltip};
+use workspace::{item::ItemHandle, StatusItemView, ToolbarItemEvent, Workspace};
+
+use crate::ProjectDiagnosticsEditor;
+
+pub struct DiagnosticIndicator {
+    summary: project::DiagnosticSummary,
+    active_editor: Option<WeakView<Editor>>,
+    workspace: WeakView<Workspace>,
+    current_diagnostic: Option<Diagnostic>,
+    in_progress_checks: HashSet<LanguageServerId>,
+    _observe_active_editor: Option<Subscription>,
+}
+
+impl Render for DiagnosticIndicator {
+    type Element = Stateful<Self, Div<Self>>;
+
+    fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element {
+        let diagnostic_indicator = match (self.summary.error_count, self.summary.warning_count) {
+            (0, 0) => h_stack().child(IconElement::new(Icon::Check).color(TextColor::Success)),
+            (0, warning_count) => h_stack()
+                .gap_1()
+                .child(IconElement::new(Icon::ExclamationTriangle).color(TextColor::Warning))
+                .child(Label::new(warning_count.to_string())),
+            (error_count, 0) => h_stack()
+                .gap_1()
+                .child(IconElement::new(Icon::XCircle).color(TextColor::Error))
+                .child(Label::new(error_count.to_string())),
+            (error_count, warning_count) => h_stack()
+                .gap_1()
+                .child(IconElement::new(Icon::XCircle).color(TextColor::Error))
+                .child(Label::new(error_count.to_string()))
+                .child(IconElement::new(Icon::ExclamationTriangle).color(TextColor::Warning))
+                .child(Label::new(warning_count.to_string())),
+        };
+
+        h_stack()
+            .id(cx.entity_id())
+            .on_action(Self::go_to_next_diagnostic)
+            .rounded_md()
+            .flex_none()
+            .h(rems(1.375))
+            .px_1()
+            .cursor_pointer()
+            .bg(cx.theme().colors().ghost_element_background)
+            .hover(|style| style.bg(cx.theme().colors().ghost_element_hover))
+            .active(|style| style.bg(cx.theme().colors().ghost_element_active))
+            .tooltip(|_, cx| Tooltip::text("Project Diagnostics", cx))
+            .on_click(|this, _, cx| {
+                if let Some(workspace) = this.workspace.upgrade() {
+                    workspace.update(cx, |workspace, cx| {
+                        ProjectDiagnosticsEditor::deploy(workspace, &Default::default(), cx)
+                    })
+                }
+            })
+            .child(diagnostic_indicator)
+    }
+}
+
+impl DiagnosticIndicator {
+    pub fn new(workspace: &Workspace, cx: &mut ViewContext<Self>) -> Self {
+        let project = workspace.project();
+        cx.subscribe(project, |this, project, event, cx| match event {
+            project::Event::DiskBasedDiagnosticsStarted { language_server_id } => {
+                this.in_progress_checks.insert(*language_server_id);
+                cx.notify();
+            }
+
+            project::Event::DiskBasedDiagnosticsFinished { language_server_id }
+            | project::Event::LanguageServerRemoved(language_server_id) => {
+                this.summary = project.read(cx).diagnostic_summary(cx);
+                this.in_progress_checks.remove(language_server_id);
+                cx.notify();
+            }
+
+            project::Event::DiagnosticsUpdated { .. } => {
+                this.summary = project.read(cx).diagnostic_summary(cx);
+                cx.notify();
+            }
+
+            _ => {}
+        })
+        .detach();
+
+        Self {
+            summary: project.read(cx).diagnostic_summary(cx),
+            in_progress_checks: project
+                .read(cx)
+                .language_servers_running_disk_based_diagnostics()
+                .collect(),
+            active_editor: None,
+            workspace: workspace.weak_handle(),
+            current_diagnostic: None,
+            _observe_active_editor: None,
+        }
+    }
+
+    fn go_to_next_diagnostic(&mut self, _: &GoToDiagnostic, cx: &mut ViewContext<Self>) {
+        if let Some(editor) = self.active_editor.as_ref().and_then(|e| e.upgrade()) {
+            editor.update(cx, |editor, cx| {
+                editor.go_to_diagnostic_impl(editor::Direction::Next, cx);
+            })
+        }
+    }
+
+    fn update(&mut self, editor: View<Editor>, cx: &mut ViewContext<Self>) {
+        let editor = editor.read(cx);
+        let buffer = editor.buffer().read(cx);
+        let cursor_position = editor.selections.newest::<usize>(cx).head();
+        let new_diagnostic = buffer
+            .snapshot(cx)
+            .diagnostics_in_range::<_, usize>(cursor_position..cursor_position, false)
+            .filter(|entry| !entry.range.is_empty())
+            .min_by_key(|entry| (entry.diagnostic.severity, entry.range.len()))
+            .map(|entry| entry.diagnostic);
+        if new_diagnostic != self.current_diagnostic {
+            self.current_diagnostic = new_diagnostic;
+            cx.notify();
+        }
+    }
+}
+
+impl EventEmitter<ToolbarItemEvent> for DiagnosticIndicator {}
+
+impl StatusItemView for DiagnosticIndicator {
+    fn set_active_pane_item(
+        &mut self,
+        active_pane_item: Option<&dyn ItemHandle>,
+        cx: &mut ViewContext<Self>,
+    ) {
+        if let Some(editor) = active_pane_item.and_then(|item| item.downcast::<Editor>()) {
+            self.active_editor = Some(editor.downgrade());
+            self._observe_active_editor = Some(cx.observe(&editor, Self::update));
+            self.update(editor, cx);
+        } else {
+            self.active_editor = None;
+            self.current_diagnostic = None;
+            self._observe_active_editor = None;
+        }
+        cx.notify();
+    }
+}

crates/diagnostics2/src/project_diagnostics_settings.rs 🔗

@@ -0,0 +1,28 @@
+use schemars::JsonSchema;
+use serde::{Deserialize, Serialize};
+
+#[derive(Deserialize, Debug)]
+pub struct ProjectDiagnosticsSettings {
+    pub include_warnings: bool,
+}
+
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
+pub struct ProjectDiagnosticsSettingsContent {
+    include_warnings: Option<bool>,
+}
+
+impl settings::Settings for ProjectDiagnosticsSettings {
+    const KEY: Option<&'static str> = Some("diagnostics");
+    type FileContent = ProjectDiagnosticsSettingsContent;
+
+    fn load(
+        default_value: &Self::FileContent,
+        user_values: &[&Self::FileContent],
+        _cx: &mut gpui::AppContext,
+    ) -> anyhow::Result<Self>
+    where
+        Self: Sized,
+    {
+        Self::load_via_json_merge(default_value, user_values)
+    }
+}

crates/diagnostics2/src/toolbar_controls.rs 🔗

@@ -0,0 +1,66 @@
+use crate::ProjectDiagnosticsEditor;
+use gpui::{div, Div, EventEmitter, ParentComponent, Render, ViewContext, WeakView};
+use ui::{Icon, IconButton, Tooltip};
+use workspace::{item::ItemHandle, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView};
+
+pub struct ToolbarControls {
+    editor: Option<WeakView<ProjectDiagnosticsEditor>>,
+}
+
+impl Render for ToolbarControls {
+    type Element = Div<Self>;
+
+    fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element {
+        let include_warnings = self
+            .editor
+            .as_ref()
+            .and_then(|editor| editor.upgrade())
+            .map(|editor| editor.read(cx).include_warnings)
+            .unwrap_or(false);
+
+        let tooltip = if include_warnings {
+            "Exclude Warnings"
+        } else {
+            "Include Warnings"
+        };
+
+        div().child(
+            IconButton::new("toggle-warnings", Icon::ExclamationTriangle)
+                .tooltip(move |_, cx| Tooltip::text(tooltip, cx))
+                .on_click(|this: &mut Self, cx| {
+                    if let Some(editor) = this.editor.as_ref().and_then(|editor| editor.upgrade()) {
+                        editor.update(cx, |editor, cx| {
+                            editor.toggle_warnings(&Default::default(), cx);
+                        });
+                    }
+                }),
+        )
+    }
+}
+
+impl EventEmitter<ToolbarItemEvent> for ToolbarControls {}
+
+impl ToolbarItemView for ToolbarControls {
+    fn set_active_pane_item(
+        &mut self,
+        active_pane_item: Option<&dyn ItemHandle>,
+        _: &mut ViewContext<Self>,
+    ) -> ToolbarItemLocation {
+        if let Some(pane_item) = active_pane_item.as_ref() {
+            if let Some(editor) = pane_item.downcast::<ProjectDiagnosticsEditor>() {
+                self.editor = Some(editor.downgrade());
+                ToolbarItemLocation::PrimaryRight { flex: None }
+            } else {
+                ToolbarItemLocation::Hidden
+            }
+        } else {
+            ToolbarItemLocation::Hidden
+        }
+    }
+}
+
+impl ToolbarControls {
+    pub fn new() -> Self {
+        ToolbarControls { editor: None }
+    }
+}

crates/editor/src/editor.rs 🔗

@@ -24,7 +24,7 @@ use ::git::diff::DiffHunk;
 use aho_corasick::AhoCorasick;
 use anyhow::{anyhow, Context, Result};
 use blink_manager::BlinkManager;
-use client::{ClickhouseEvent, Client, Collaborator, ParticipantIndex, TelemetrySettings};
+use client::{Client, Collaborator, ParticipantIndex, TelemetrySettings};
 use clock::{Global, ReplicaId};
 use collections::{BTreeMap, Bound, HashMap, HashSet, VecDeque};
 use convert_case::{Case, Casing};
@@ -8946,12 +8946,12 @@ impl Editor {
         let telemetry = project.read(cx).client().telemetry().clone();
         let telemetry_settings = *settings::get::<TelemetrySettings>(cx);
 
-        let event = ClickhouseEvent::Copilot {
+        telemetry.report_copilot_event(
+            telemetry_settings,
             suggestion_id,
             suggestion_accepted,
             file_extension,
-        };
-        telemetry.report_clickhouse_event(event, telemetry_settings);
+        )
     }
 
     #[cfg(any(test, feature = "test-support"))]
@@ -8998,14 +8998,14 @@ impl Editor {
             .show_copilot_suggestions;
 
         let telemetry = project.read(cx).client().telemetry().clone();
-        let event = ClickhouseEvent::Editor {
+        telemetry.report_editor_event(
+            telemetry_settings,
             file_extension,
             vim_mode,
             operation,
             copilot_enabled,
             copilot_enabled_for_language,
-        };
-        telemetry.report_clickhouse_event(event, telemetry_settings)
+        )
     }
 
     /// Copy the highlighted chunks to the clipboard as JSON. The format is an array of lines,

crates/editor2/src/editor.rs 🔗

@@ -24,7 +24,7 @@ use ::git::diff::DiffHunk;
 use aho_corasick::AhoCorasick;
 use anyhow::{anyhow, Context as _, Result};
 use blink_manager::BlinkManager;
-use client::{ClickhouseEvent, Client, Collaborator, ParticipantIndex, TelemetrySettings};
+use client::{Client, Collaborator, ParticipantIndex, TelemetrySettings};
 use clock::ReplicaId;
 use collections::{BTreeMap, Bound, HashMap, HashSet, VecDeque};
 use convert_case::{Case, Casing};
@@ -585,7 +585,7 @@ pub enum SoftWrap {
     Column(u32),
 }
 
-#[derive(Clone)]
+#[derive(Clone, Default)]
 pub struct EditorStyle {
     pub background: Hsla,
     pub local_player: PlayerColor,
@@ -2319,7 +2319,7 @@ impl Editor {
         }
 
         self.blink_manager.update(cx, BlinkManager::pause_blinking);
-        cx.emit(Event::SelectionsChanged { local });
+        cx.emit(EditorEvent::SelectionsChanged { local });
 
         if self.selections.disjoint_anchors().len() == 1 {
             cx.emit(SearchEvent::ActiveMatchChanged)
@@ -4243,7 +4243,7 @@ impl Editor {
 
                 self.report_copilot_event(Some(completion.uuid.clone()), true, cx)
             }
-            cx.emit(Event::InputHandled {
+            cx.emit(EditorEvent::InputHandled {
                 utf16_range_to_replace: None,
                 text: suggestion.text.to_string().into(),
             });
@@ -4393,16 +4393,17 @@ impl Editor {
                                 FoldStatus::Folded => ui::Icon::ChevronRight,
                                 FoldStatus::Foldable => ui::Icon::ChevronDown,
                             };
-                            IconButton::new(ix as usize, icon).on_click(
-                                move |editor: &mut Editor, cx| match fold_status {
+                            IconButton::new(ix as usize, icon)
+                                .on_click(move |editor: &mut Editor, cx| match fold_status {
                                     FoldStatus::Folded => {
                                         editor.unfold_at(&UnfoldAt { buffer_row }, cx);
                                     }
                                     FoldStatus::Foldable => {
                                         editor.fold_at(&FoldAt { buffer_row }, cx);
                                     }
-                                },
-                            )
+                                })
+                                .color(ui::TextColor::Muted)
+                                .render()
                         })
                     })
                     .flatten()
@@ -5640,7 +5641,7 @@ impl Editor {
             self.request_autoscroll(Autoscroll::fit(), cx);
             self.unmark_text(cx);
             self.refresh_copilot_suggestions(true, cx);
-            cx.emit(Event::Edited);
+            cx.emit(EditorEvent::Edited);
         }
     }
 
@@ -5655,7 +5656,7 @@ impl Editor {
             self.request_autoscroll(Autoscroll::fit(), cx);
             self.unmark_text(cx);
             self.refresh_copilot_suggestions(true, cx);
-            cx.emit(Event::Edited);
+            cx.emit(EditorEvent::Edited);
         }
     }
 
@@ -8124,7 +8125,7 @@ impl Editor {
                 log::error!("unexpectedly ended a transaction that wasn't started by this editor");
             }
 
-            cx.emit(Event::Edited);
+            cx.emit(EditorEvent::Edited);
             Some(tx_id)
         } else {
             None
@@ -8712,7 +8713,7 @@ impl Editor {
                 if self.has_active_copilot_suggestion(cx) {
                     self.update_visible_copilot_suggestion(cx);
                 }
-                cx.emit(Event::BufferEdited);
+                cx.emit(EditorEvent::BufferEdited);
                 cx.emit(ItemEvent::Edit);
                 cx.emit(ItemEvent::UpdateBreadcrumbs);
                 cx.emit(SearchEvent::MatchesInvalidated);
@@ -8751,7 +8752,7 @@ impl Editor {
                 predecessor,
                 excerpts,
             } => {
-                cx.emit(Event::ExcerptsAdded {
+                cx.emit(EditorEvent::ExcerptsAdded {
                     buffer: buffer.clone(),
                     predecessor: *predecessor,
                     excerpts: excerpts.clone(),
@@ -8760,7 +8761,7 @@ impl Editor {
             }
             multi_buffer::Event::ExcerptsRemoved { ids } => {
                 self.refresh_inlay_hints(InlayHintRefreshReason::ExcerptsRemoved(ids.clone()), cx);
-                cx.emit(Event::ExcerptsRemoved { ids: ids.clone() })
+                cx.emit(EditorEvent::ExcerptsRemoved { ids: ids.clone() })
             }
             multi_buffer::Event::Reparsed => {
                 cx.emit(ItemEvent::UpdateBreadcrumbs);
@@ -8774,7 +8775,7 @@ impl Editor {
                 cx.emit(ItemEvent::UpdateTab);
                 cx.emit(ItemEvent::UpdateBreadcrumbs);
             }
-            multi_buffer::Event::DiffBaseChanged => cx.emit(Event::DiffBaseChanged),
+            multi_buffer::Event::DiffBaseChanged => cx.emit(EditorEvent::DiffBaseChanged),
             multi_buffer::Event::Closed => cx.emit(ItemEvent::CloseItem),
             multi_buffer::Event::DiagnosticsUpdated => {
                 self.refresh_active_diagnostics(cx);
@@ -8968,12 +8969,12 @@ impl Editor {
         let telemetry = project.read(cx).client().telemetry().clone();
         let telemetry_settings = *TelemetrySettings::get_global(cx);
 
-        let event = ClickhouseEvent::Copilot {
+        telemetry.report_copilot_event(
+            telemetry_settings,
             suggestion_id,
             suggestion_accepted,
             file_extension,
-        };
-        telemetry.report_clickhouse_event(event, telemetry_settings);
+        )
     }
 
     #[cfg(any(test, feature = "test-support"))]
@@ -9020,14 +9021,14 @@ impl Editor {
             .show_copilot_suggestions;
 
         let telemetry = project.read(cx).client().telemetry().clone();
-        let event = ClickhouseEvent::Editor {
+        telemetry.report_editor_event(
+            telemetry_settings,
             file_extension,
             vim_mode,
             operation,
             copilot_enabled,
             copilot_enabled_for_language,
-        };
-        telemetry.report_clickhouse_event(event, telemetry_settings)
+        )
     }
 
     /// Copy the highlighted chunks to the clipboard as JSON. The format is an array of lines,
@@ -9114,7 +9115,7 @@ impl Editor {
         cx: &mut ViewContext<Self>,
     ) {
         if !self.input_enabled {
-            cx.emit(Event::InputIgnored { text: text.into() });
+            cx.emit(EditorEvent::InputIgnored { text: text.into() });
             return;
         }
         if let Some(relative_utf16_range) = relative_utf16_range {
@@ -9174,7 +9175,7 @@ impl Editor {
     }
 
     fn handle_focus(&mut self, cx: &mut ViewContext<Self>) {
-        cx.emit(Event::Focused);
+        cx.emit(EditorEvent::Focused);
 
         if let Some(rename) = self.pending_rename.as_ref() {
             let rename_editor_focus_handle = rename.editor.read(cx).focus_handle.clone();
@@ -9204,7 +9205,7 @@ impl Editor {
             .update(cx, |buffer, cx| buffer.remove_active_selections(cx));
         self.hide_context_menu(cx);
         hide_hover(self, cx);
-        cx.emit(Event::Blurred);
+        cx.emit(EditorEvent::Blurred);
         cx.notify();
     }
 }
@@ -9327,7 +9328,7 @@ impl Deref for EditorSnapshot {
 }
 
 #[derive(Clone, Debug, PartialEq, Eq)]
-pub enum Event {
+pub enum EditorEvent {
     InputIgnored {
         text: Arc<str>,
     },
@@ -9345,8 +9346,12 @@ pub enum Event {
     },
     BufferEdited,
     Edited,
+    Reparsed,
     Focused,
     Blurred,
+    DirtyChanged,
+    Saved,
+    TitleChanged,
     DiffBaseChanged,
     SelectionsChanged {
         local: bool,
@@ -9355,6 +9360,7 @@ pub enum Event {
         local: bool,
         autoscroll: bool,
     },
+    Closed,
 }
 
 pub struct EditorFocused(pub View<Editor>);
@@ -9369,7 +9375,7 @@ pub struct EditorReleased(pub WeakView<Editor>);
 //     }
 // }
 //
-impl EventEmitter<Event> for Editor {}
+impl EventEmitter<EditorEvent> for Editor {}
 
 impl FocusableView for Editor {
     fn focus_handle(&self, cx: &AppContext) -> FocusHandle {
@@ -9572,7 +9578,7 @@ impl InputHandler for Editor {
         cx: &mut ViewContext<Self>,
     ) {
         if !self.input_enabled {
-            cx.emit(Event::InputIgnored { text: text.into() });
+            cx.emit(EditorEvent::InputIgnored { text: text.into() });
             return;
         }
 
@@ -9602,7 +9608,7 @@ impl InputHandler for Editor {
                     })
             });
 
-            cx.emit(Event::InputHandled {
+            cx.emit(EditorEvent::InputHandled {
                 utf16_range_to_replace: range_to_replace,
                 text: text.into(),
             });
@@ -9633,7 +9639,7 @@ impl InputHandler for Editor {
         cx: &mut ViewContext<Self>,
     ) {
         if !self.input_enabled {
-            cx.emit(Event::InputIgnored { text: text.into() });
+            cx.emit(EditorEvent::InputIgnored { text: text.into() });
             return;
         }
 
@@ -9676,7 +9682,7 @@ impl InputHandler for Editor {
                     })
             });
 
-            cx.emit(Event::InputHandled {
+            cx.emit(EditorEvent::InputHandled {
                 utf16_range_to_replace: range_to_replace,
                 text: text.into(),
             });

crates/editor2/src/editor_tests.rs 🔗

@@ -3853,7 +3853,7 @@ async fn test_select_larger_smaller_syntax_node(cx: &mut gpui::TestAppContext) {
     let buffer = cx.build_model(|cx| MultiBuffer::singleton(buffer, cx));
     let (view, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
 
-    view.condition::<crate::Event>(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
+    view.condition::<crate::EditorEvent>(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
         .await;
 
     view.update(cx, |view, cx| {
@@ -4019,7 +4019,7 @@ async fn test_autoindent_selections(cx: &mut gpui::TestAppContext) {
     let buffer = cx.build_model(|cx| MultiBuffer::singleton(buffer, cx));
     let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
     editor
-        .condition::<crate::Event>(cx, |editor, cx| !editor.buffer.read(cx).is_parsing(cx))
+        .condition::<crate::EditorEvent>(cx, |editor, cx| !editor.buffer.read(cx).is_parsing(cx))
         .await;
 
     editor.update(cx, |editor, cx| {
@@ -4583,7 +4583,7 @@ async fn test_surround_with_pair(cx: &mut gpui::TestAppContext) {
     });
     let buffer = cx.build_model(|cx| MultiBuffer::singleton(buffer, cx));
     let (view, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
-    view.condition::<crate::Event>(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
+    view.condition::<crate::EditorEvent>(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
         .await;
 
     view.update(cx, |view, cx| {
@@ -4734,7 +4734,7 @@ async fn test_delete_autoclose_pair(cx: &mut gpui::TestAppContext) {
     let buffer = cx.build_model(|cx| MultiBuffer::singleton(buffer, cx));
     let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
     editor
-        .condition::<crate::Event>(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
+        .condition::<crate::EditorEvent>(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
         .await;
 
     editor.update(cx, |editor, cx| {
@@ -6295,7 +6295,7 @@ async fn test_extra_newline_insertion(cx: &mut gpui::TestAppContext) {
     });
     let buffer = cx.build_model(|cx| MultiBuffer::singleton(buffer, cx));
     let (view, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
-    view.condition::<crate::Event>(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
+    view.condition::<crate::EditorEvent>(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
         .await;
 
     view.update(cx, |view, cx| {

crates/editor2/src/element.rs 🔗

@@ -1972,6 +1972,7 @@ impl EditorElement {
                 TransformBlock::ExcerptHeader { .. } => false,
                 TransformBlock::Custom(block) => block.style() == BlockStyle::Fixed,
             });
+
         let mut render_block = |block: &TransformBlock,
                                 available_space: Size<AvailableSpace>,
                                 block_id: usize,
@@ -2005,6 +2006,7 @@ impl EditorElement {
                         editor_style: &self.style,
                     })
                 }
+
                 TransformBlock::ExcerptHeader {
                     buffer,
                     range,
@@ -2049,6 +2051,7 @@ impl EditorElement {
                         }
 
                         h_stack()
+                            .id("path header block")
                             .size_full()
                             .bg(gpui::red())
                             .child(
@@ -2061,6 +2064,7 @@ impl EditorElement {
                     } else {
                         let text_style = style.text.clone();
                         h_stack()
+                            .id("collapsed context")
                             .size_full()
                             .bg(gpui::red())
                             .child("⋯")

crates/editor2/src/items.rs 🔗

@@ -1,7 +1,7 @@
 use crate::{
     editor_settings::SeedQuerySetting, link_go_to_definition::hide_link_definition,
     movement::surrounding_word, persistence::DB, scroll::ScrollAnchor, Anchor, Autoscroll, Editor,
-    EditorSettings, Event, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot,
+    EditorEvent, EditorSettings, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot,
     NavigationData, ToPoint as _,
 };
 use anyhow::{anyhow, Context, Result};
@@ -41,11 +41,12 @@ use workspace::{
 
 pub const MAX_TAB_TITLE_LEN: usize = 24;
 
-impl FollowableEvents for Event {
+impl FollowableEvents for EditorEvent {
     fn to_follow_event(&self) -> Option<workspace::item::FollowEvent> {
         match self {
-            Event::Edited => Some(FollowEvent::Unfollow),
-            Event::SelectionsChanged { local } | Event::ScrollPositionChanged { local, .. } => {
+            EditorEvent::Edited => Some(FollowEvent::Unfollow),
+            EditorEvent::SelectionsChanged { local }
+            | EditorEvent::ScrollPositionChanged { local, .. } => {
                 if *local {
                     Some(FollowEvent::Unfollow)
                 } else {
@@ -60,7 +61,7 @@ impl FollowableEvents for Event {
 impl EventEmitter<ItemEvent> for Editor {}
 
 impl FollowableItem for Editor {
-    type FollowableEvent = Event;
+    type FollowableEvent = EditorEvent;
     fn remote_id(&self) -> Option<ViewId> {
         self.remote_id
     }
@@ -248,7 +249,7 @@ impl FollowableItem for Editor {
 
         match update {
             proto::update_view::Variant::Editor(update) => match event {
-                Event::ExcerptsAdded {
+                EditorEvent::ExcerptsAdded {
                     buffer,
                     predecessor,
                     excerpts,
@@ -269,20 +270,20 @@ impl FollowableItem for Editor {
                     }
                     true
                 }
-                Event::ExcerptsRemoved { ids } => {
+                EditorEvent::ExcerptsRemoved { ids } => {
                     update
                         .deleted_excerpts
                         .extend(ids.iter().map(ExcerptId::to_proto));
                     true
                 }
-                Event::ScrollPositionChanged { .. } => {
+                EditorEvent::ScrollPositionChanged { .. } => {
                     let scroll_anchor = self.scroll_manager.anchor();
                     update.scroll_top_anchor = Some(serialize_anchor(&scroll_anchor.anchor));
                     update.scroll_x = scroll_anchor.offset.x;
                     update.scroll_y = scroll_anchor.offset.y;
                     true
                 }
-                Event::SelectionsChanged { .. } => {
+                EditorEvent::SelectionsChanged { .. } => {
                     update.selections = self
                         .selections
                         .disjoint_anchors()

crates/editor2/src/scroll.rs 🔗

@@ -6,8 +6,8 @@ use crate::{
     display_map::{DisplaySnapshot, ToDisplayPoint},
     hover_popover::hide_hover,
     persistence::DB,
-    Anchor, DisplayPoint, Editor, EditorMode, Event, InlayHintRefreshReason, MultiBufferSnapshot,
-    ToPoint,
+    Anchor, DisplayPoint, Editor, EditorEvent, EditorMode, InlayHintRefreshReason,
+    MultiBufferSnapshot, ToPoint,
 };
 use gpui::{point, px, AppContext, Entity, Pixels, Styled, Task, ViewContext};
 use language::{Bias, Point};
@@ -224,7 +224,7 @@ impl ScrollManager {
         cx: &mut ViewContext<Editor>,
     ) {
         self.anchor = anchor;
-        cx.emit(Event::ScrollPositionChanged { local, autoscroll });
+        cx.emit(EditorEvent::ScrollPositionChanged { local, autoscroll });
         self.show_scrollbar(cx);
         self.autoscroll_request.take();
         if let Some(workspace_id) = workspace_id {

crates/editor2/src/test/editor_test_context.rs 🔗

@@ -71,7 +71,8 @@ impl<'a> EditorTestContext<'a> {
         &self,
         predicate: impl FnMut(&Editor, &AppContext) -> bool,
     ) -> impl Future<Output = ()> {
-        self.editor.condition::<crate::Event>(&self.cx, predicate)
+        self.editor
+            .condition::<crate::EditorEvent>(&self.cx, predicate)
     }
 
     #[track_caller]

crates/file_finder2/src/file_finder.rs 🔗

@@ -2,8 +2,10 @@ use collections::HashMap;
 use editor::{scroll::autoscroll::Autoscroll, Bias, Editor};
 use fuzzy::{CharBag, PathMatch, PathMatchCandidate};
 use gpui::{
-    actions, div, AppContext, Dismiss, Div, FocusHandle, InteractiveElement, ManagedView, Model,
-    ParentElement, Render, RenderOnce, Styled, Task, View, ViewContext, VisualContext, WeakView,
+    actions, div, AppContext, Component, Dismiss, Div, EventEmitter, FocusHandle, FocusableView,
+    InteractiveComponent, InteractiveElement, ManagedView, Manager, Model, ParentComponent,
+    ParentElement, Render, RenderOnce, Styled, Styled, Task, View, ViewContext, VisualContext,
+    WeakView,
 };
 use picker::{Picker, PickerDelegate};
 use project::{PathMatchCandidateSet, Project, ProjectPath, WorktreeId};
@@ -110,7 +112,8 @@ impl FileFinder {
     }
 }
 
-impl ManagedView for FileFinder {
+impl EventEmitter<Manager> for FileFinder {}
+impl FocusableView for FileFinder {
     fn focus_handle(&self, cx: &AppContext) -> FocusHandle {
         self.picker.focus_handle(cx)
     }
@@ -687,7 +690,9 @@ impl PickerDelegate for FileFinderDelegate {
                                 .log_err();
                         }
                     }
-                    finder.update(&mut cx, |_, cx| cx.emit(Dismiss)).ok()?;
+                    finder
+                        .update(&mut cx, |_, cx| cx.emit(Manager::Dismiss))
+                        .ok()?;
 
                     Some(())
                 })
@@ -698,7 +703,7 @@ impl PickerDelegate for FileFinderDelegate {
 
     fn dismissed(&mut self, cx: &mut ViewContext<Picker<FileFinderDelegate>>) {
         self.file_finder
-            .update(cx, |_, cx| cx.emit(Dismiss))
+            .update(cx, |_, cx| cx.emit(Manager::Dismiss))
             .log_err();
     }
 

crates/go_to_line2/src/go_to_line.rs 🔗

@@ -1,7 +1,8 @@
 use editor::{display_map::ToDisplayPoint, scroll::autoscroll::Autoscroll, Editor};
 use gpui::{
-    actions, div, prelude::*, AppContext, Dismiss, Div, FocusHandle, ManagedView, ParentElement,
-    Render, SharedString, Styled, Subscription, View, ViewContext, VisualContext, WindowContext,
+    actions, div, prelude::*, AppContext, Div, EventEmitter, FocusHandle, FocusableView, Manager,
+    ParentComponent, Render, SharedString, Styled, Subscription, View, ViewContext, VisualContext,
+    WindowContext,
 };
 use text::{Bias, Point};
 use theme::ActiveTheme;
@@ -23,11 +24,12 @@ pub struct GoToLine {
     _subscriptions: Vec<Subscription>,
 }
 
-impl ManagedView for GoToLine {
+impl FocusableView for GoToLine {
     fn focus_handle(&self, cx: &AppContext) -> FocusHandle {
-        self.line_editor.focus_handle(cx)
+        self.active_editor.focus_handle(cx)
     }
 }
+impl EventEmitter<Manager> for GoToLine {}
 
 impl GoToLine {
     fn register(workspace: &mut Workspace, _: &mut ViewContext<Workspace>) {
@@ -82,13 +84,13 @@ impl GoToLine {
     fn on_line_editor_event(
         &mut self,
         _: View<Editor>,
-        event: &editor::Event,
+        event: &editor::EditorEvent,
         cx: &mut ViewContext<Self>,
     ) {
         match event {
             // todo!() this isn't working...
-            editor::Event::Blurred => cx.emit(Dismiss),
-            editor::Event::BufferEdited { .. } => self.highlight_current_line(cx),
+            editor::EditorEvent::Blurred => cx.emit(Manager::Dismiss),
+            editor::EditorEvent::BufferEdited { .. } => self.highlight_current_line(cx),
             _ => {}
         }
     }
@@ -122,7 +124,7 @@ impl GoToLine {
     }
 
     fn cancel(&mut self, _: &menu::Cancel, cx: &mut ViewContext<Self>) {
-        cx.emit(Dismiss);
+        cx.emit(Manager::Dismiss);
     }
 
     fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext<Self>) {
@@ -139,7 +141,7 @@ impl GoToLine {
             self.prev_scroll_position.take();
         }
 
-        cx.emit(Dismiss);
+        cx.emit(Manager::Dismiss);
     }
 }
 

crates/gpui2/Cargo.toml 🔗

@@ -47,7 +47,7 @@ serde_derive.workspace = true
 serde_json.workspace = true
 smallvec.workspace = true
 smol.workspace = true
-taffy = { git = "https://github.com/DioxusLabs/taffy", rev = "4fb530bdd71609bb1d3f76c6a8bde1ba82805d5e" }
+taffy = { git = "https://github.com/DioxusLabs/taffy", rev = "1876f72bee5e376023eaa518aa7b8a34c769bd1b" }
 thiserror.workspace = true
 time.workspace = true
 tiny-skia = "0.5"

crates/gpui2/src/app.rs 🔗

@@ -492,6 +492,10 @@ impl AppContext {
         self.platform.open_url(url);
     }
 
+    pub fn app_path(&self) -> Result<PathBuf> {
+        self.platform.app_path()
+    }
+
     pub fn path_for_auxiliary_executable(&self, name: &str) -> Result<PathBuf> {
         self.platform.path_for_auxiliary_executable(name)
     }

crates/gpui2/src/app/async_context.rs 🔗

@@ -1,6 +1,6 @@
 use crate::{
     AnyView, AnyWindowHandle, AppCell, AppContext, BackgroundExecutor, Context, FocusableView,
-    ForegroundExecutor, Model, ModelContext, Render, Result, Task, View, ViewContext,
+    ForegroundExecutor, Manager, Model, ModelContext, Render, Result, Task, View, ViewContext,
     VisualContext, WindowContext, WindowHandle,
 };
 use anyhow::{anyhow, Context as _};
@@ -320,4 +320,13 @@ impl VisualContext for AsyncWindowContext {
             view.read(cx).focus_handle(cx).clone().focus(cx);
         })
     }
+
+    fn dismiss_view<V>(&mut self, view: &View<V>) -> Self::Result<()>
+    where
+        V: crate::ManagedView,
+    {
+        self.window.update(self, |_, cx| {
+            view.update(cx, |_, cx| cx.emit(Manager::Dismiss))
+        })
+    }
 }

crates/gpui2/src/app/entity_map.rs 🔗

@@ -71,11 +71,12 @@ impl EntityMap {
     #[track_caller]
     pub fn lease<'a, T>(&mut self, model: &'a Model<T>) -> Lease<'a, T> {
         self.assert_valid_context(model);
-        let entity = Some(
-            self.entities
-                .remove(model.entity_id)
-                .expect("Circular entity lease. Is the entity already being updated?"),
-        );
+        let entity = Some(self.entities.remove(model.entity_id).unwrap_or_else(|| {
+            panic!(
+                "Circular entity lease of {}. Is it already being updated?",
+                std::any::type_name::<T>()
+            )
+        }));
         Lease {
             model,
             entity,

crates/gpui2/src/app/test_context.rs 🔗

@@ -386,6 +386,32 @@ impl<T: Send> Model<T> {
     }
 }
 
+impl<V: 'static> View<V> {
+    pub fn next_notification(&self, cx: &TestAppContext) -> impl Future<Output = ()> {
+        use postage::prelude::{Sink as _, Stream as _};
+
+        let (mut tx, mut rx) = postage::mpsc::channel(1);
+        let mut cx = cx.app.app.borrow_mut();
+        let subscription = cx.observe(self, move |_, _| {
+            tx.try_send(()).ok();
+        });
+
+        let duration = if std::env::var("CI").is_ok() {
+            Duration::from_secs(5)
+        } else {
+            Duration::from_secs(1)
+        };
+
+        async move {
+            let notification = crate::util::timeout(duration, rx.recv())
+                .await
+                .expect("next notification timed out");
+            drop(subscription);
+            notification.expect("model dropped while test was waiting for its next notification")
+        }
+    }
+}
+
 impl<V> View<V> {
     pub fn condition<Evt>(
         &self,
@@ -579,6 +605,17 @@ impl<'a> VisualContext for VisualTestContext<'a> {
             })
             .unwrap()
     }
+
+    fn dismiss_view<V>(&mut self, view: &View<V>) -> Self::Result<()>
+    where
+        V: crate::ManagedView,
+    {
+        self.window
+            .update(self.cx, |_, cx| {
+                view.update(cx, |_, cx| cx.emit(crate::Manager::Dismiss))
+            })
+            .unwrap()
+    }
 }
 
 impl AnyWindowHandle {

crates/gpui2/src/elements/div.rs 🔗

@@ -1124,9 +1124,14 @@ where
                     }
                 }
             }
+            // if self.hover_style.is_some() {
             if bounds.contains_point(&mouse_position) {
+                // eprintln!("div hovered {bounds:?} {mouse_position:?}");
                 style.refine(&self.hover_style);
+            } else {
+                // eprintln!("div NOT hovered {bounds:?} {mouse_position:?}");
             }
+            // }
 
             if let Some(drag) = cx.active_drag.take() {
                 for (state_type, group_drag_style) in &self.group_drag_over_styles {

crates/gpui2/src/elements/img.rs 🔗

@@ -70,7 +70,7 @@ impl<V> Element<V> for Img<V> {
                     if let Some(data) = image_future
                         .clone()
                         .now_or_never()
-                        .and_then(ResultExt::log_err)
+                        .and_then(|result| result.ok())
                     {
                         let corner_radii = corner_radii.to_pixels(bounds.size, cx.rem_size());
                         cx.with_z_index(1, |cx| {
@@ -79,7 +79,7 @@ impl<V> Element<V> for Img<V> {
                         });
                     } else {
                         cx.spawn(|_, mut cx| async move {
-                            if image_future.await.log_err().is_some() {
+                            if image_future.await.ok().is_some() {
                                 cx.on_next_frame(|cx| cx.notify());
                             }
                         })

crates/gpui2/src/elements/text.rs 🔗

@@ -130,19 +130,34 @@ impl<V: 'static> Element<V> for StyledText {
 
         let layout_id = cx.request_measured_layout(Default::default(), rem_size, {
             let element_state = element_state.clone();
-            move |known_dimensions, _| {
+            move |known_dimensions, available_space| {
+                let wrap_width = known_dimensions.width.or(match available_space.width {
+                    crate::AvailableSpace::Definite(x) => Some(x),
+                    _ => None,
+                });
+
+                if let Some(text_state) = element_state.0.lock().as_ref() {
+                    if text_state.size.is_some()
+                        && (wrap_width.is_none() || wrap_width == text_state.wrap_width)
+                    {
+                        return text_state.size.unwrap();
+                    }
+                }
+
                 let Some(lines) = text_system
                     .shape_text(
                         &text,
                         font_size,
                         &runs[..],
-                        known_dimensions.width, // Wrap if we know the width.
+                        wrap_width, // Wrap if we know the width.
                     )
                     .log_err()
                 else {
                     element_state.lock().replace(TextStateInner {
                         lines: Default::default(),
                         line_height,
+                        wrap_width,
+                        size: Some(Size::default()),
                     });
                     return Size::default();
                 };
@@ -154,9 +169,12 @@ impl<V: 'static> Element<V> for StyledText {
                     size.width = size.width.max(line_size.width);
                 }
 
-                element_state
-                    .lock()
-                    .replace(TextStateInner { lines, line_height });
+                element_state.lock().replace(TextStateInner {
+                    lines,
+                    line_height,
+                    wrap_width,
+                    size: Some(size),
+                });
 
                 size
             }
@@ -205,6 +223,8 @@ pub struct TextState(Arc<Mutex<Option<TextStateInner>>>);
 struct TextStateInner {
     lines: SmallVec<[WrappedLine; 1]>,
     line_height: Pixels,
+    wrap_width: Option<Pixels>,
+    size: Option<Size<Pixels>>,
 }
 
 impl TextState {

crates/gpui2/src/gpui2.rs 🔗

@@ -139,6 +139,10 @@ pub trait VisualContext: Context {
     fn focus_view<V>(&mut self, view: &View<V>) -> Self::Result<()>
     where
         V: FocusableView;
+
+    fn dismiss_view<V>(&mut self, view: &View<V>) -> Self::Result<()>
+    where
+        V: ManagedView;
 }
 
 pub trait Entity<T>: Sealed {

crates/gpui2/src/image_cache.rs 🔗

@@ -2,7 +2,7 @@ use crate::{ImageData, ImageId, SharedString};
 use collections::HashMap;
 use futures::{
     future::{BoxFuture, Shared},
-    AsyncReadExt, FutureExt,
+    AsyncReadExt, FutureExt, TryFutureExt,
 };
 use image::ImageError;
 use parking_lot::Mutex;
@@ -88,6 +88,14 @@ impl ImageCache {
                         Ok(Arc::new(ImageData::new(image)))
                     }
                 }
+                .map_err({
+                    let uri = uri.clone();
+
+                    move |error| {
+                        log::log!(log::Level::Error, "{:?} {:?}", &uri, &error);
+                        error
+                    }
+                })
                 .boxed()
                 .shared();
 

crates/gpui2/src/platform/mac/window.rs 🔗

@@ -1205,10 +1205,7 @@ extern "C" fn handle_view_event(this: &Object, _: Sel, native_event: id) {
 
             InputEvent::MouseMove(_) if !(is_active || lock.kind == WindowKind::PopUp) => return,
 
-            InputEvent::MouseUp(MouseUpEvent {
-                button: MouseButton::Left,
-                ..
-            }) => {
+            InputEvent::MouseUp(MouseUpEvent { .. }) => {
                 lock.synthetic_drag_counter += 1;
             }
 

crates/gpui2/src/taffy.rs 🔗

@@ -5,12 +5,14 @@ use std::fmt::Debug;
 use taffy::{
     geometry::{Point as TaffyPoint, Rect as TaffyRect, Size as TaffySize},
     style::AvailableSpace as TaffyAvailableSpace,
-    tree::{Measurable, MeasureFunc, NodeId},
+    tree::NodeId,
     Taffy,
 };
 
+type Measureable = dyn Fn(Size<Option<Pixels>>, Size<AvailableSpace>) -> Size<Pixels> + Send + Sync;
+
 pub struct TaffyLayoutEngine {
-    taffy: Taffy,
+    taffy: Taffy<Box<Measureable>>,
     children_to_parents: HashMap<LayoutId, LayoutId>,
     absolute_layout_bounds: HashMap<LayoutId, Bounds<Pixels>>,
     computed_layouts: HashSet<LayoutId>,
@@ -70,9 +72,9 @@ impl TaffyLayoutEngine {
     ) -> LayoutId {
         let style = style.to_taffy(rem_size);
 
-        let measurable = Box::new(Measureable(measure)) as Box<dyn Measurable>;
+        let measurable = Box::new(measure);
         self.taffy
-            .new_leaf_with_measure(style, MeasureFunc::Boxed(measurable))
+            .new_leaf_with_context(style, measurable)
             .expect(EXPECT_MESSAGE)
             .into()
     }
@@ -154,7 +156,22 @@ impl TaffyLayoutEngine {
 
         // let started_at = std::time::Instant::now();
         self.taffy
-            .compute_layout(id.into(), available_space.into())
+            .compute_layout_with_measure(
+                id.into(),
+                available_space.into(),
+                |known_dimensions, available_space, _node_id, context| {
+                    let Some(measure) = context else {
+                        return taffy::geometry::Size::default();
+                    };
+
+                    let known_dimensions = Size {
+                        width: known_dimensions.width.map(Pixels),
+                        height: known_dimensions.height.map(Pixels),
+                    };
+
+                    measure(known_dimensions, available_space.into()).into()
+                },
+            )
             .expect(EXPECT_MESSAGE);
         // println!("compute_layout took {:?}", started_at.elapsed());
     }
@@ -202,25 +219,6 @@ impl From<LayoutId> for NodeId {
     }
 }
 
-struct Measureable<F>(F);
-
-impl<F> taffy::tree::Measurable for Measureable<F>
-where
-    F: Fn(Size<Option<Pixels>>, Size<AvailableSpace>) -> Size<Pixels> + Send + Sync,
-{
-    fn measure(
-        &self,
-        known_dimensions: TaffySize<Option<f32>>,
-        available_space: TaffySize<TaffyAvailableSpace>,
-    ) -> TaffySize<f32> {
-        let known_dimensions: Size<Option<f32>> = known_dimensions.into();
-        let known_dimensions: Size<Option<Pixels>> = known_dimensions.map(|d| d.map(Into::into));
-        let available_space = available_space.into();
-        let size = (self.0)(known_dimensions, available_space);
-        size.into()
-    }
-}
-
 trait ToTaffy<Output> {
     fn to_taffy(&self, rem_size: Pixels) -> Output;
 }

crates/gpui2/src/view.rs 🔗

@@ -179,6 +179,10 @@ impl AnyView {
         self.model.entity_type
     }
 
+    pub fn entity_id(&self) -> EntityId {
+        self.model.entity_id()
+    }
+
     pub(crate) fn draw(
         &self,
         origin: Point<Pixels>,
@@ -309,18 +313,6 @@ impl<V: 'static + Render<V>> From<WeakView<V>> for AnyWeakView {
     }
 }
 
-impl<F, E> Render<F> for F
-where
-    F: 'static + FnMut(&mut WindowContext) -> E,
-    E: 'static + Send + Element<F>,
-{
-    type Element = E;
-
-    fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element {
-        (self)(cx)
-    }
-}
-
 pub struct RenderViewWith<E, V> {
     view: View<V>,
     element: Option<E>,

crates/gpui2/src/window.rs 🔗

@@ -6,8 +6,8 @@ use crate::{
     InputEvent, IsZero, KeyBinding, KeyContext, KeyDownEvent, LayoutId, Model, ModelContext,
     Modifiers, MonochromeSprite, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Path,
     Pixels, PlatformAtlas, PlatformDisplay, PlatformInputHandler, PlatformWindow, Point,
-    PolychromeSprite, PromptLevel, Quad, RenderGlyphParams, RenderImageParams, RenderSvgParams,
-    Render, ScaledPixels, SceneBuilder, Shadow, SharedString, Size, Style, SubscriberSet,
+    PolychromeSprite, PromptLevel, Quad, Render, RenderGlyphParams, RenderImageParams,
+    RenderSvgParams, ScaledPixels, SceneBuilder, Shadow, SharedString, Size, Style, SubscriberSet,
     Subscription, TaffyLayoutEngine, Task, Underline, UnderlineStyle, View, VisualContext,
     WeakView, WindowBounds, WindowOptions, SUBPIXEL_VARIANTS,
 };
@@ -193,17 +193,12 @@ pub trait FocusableView: 'static + Render<Self> {
 
 /// ManagedView is a view (like a Modal, Popover, Menu, etc.)
 /// where the lifecycle of the view is handled by another view.
-pub trait ManagedView: 'static + Render<Self> {
-    fn focus_handle(&self, cx: &AppContext) -> FocusHandle;
-}
+pub trait ManagedView: FocusableView + EventEmitter<Manager> {}
 
-pub struct Dismiss;
-impl<T: ManagedView> EventEmitter<Dismiss> for T {}
+impl<M: FocusableView + EventEmitter<Manager>> ManagedView for M {}
 
-impl<T: ManagedView> FocusableView for T {
-    fn focus_handle(&self, cx: &AppContext) -> FocusHandle {
-        self.focus_handle(cx)
-    }
+pub enum Manager {
+    Dismiss,
 }
 
 // Holds the state for a specific window.
@@ -1582,6 +1577,13 @@ impl VisualContext for WindowContext<'_> {
             view.focus_handle(cx).clone().focus(cx);
         })
     }
+
+    fn dismiss_view<V>(&mut self, view: &View<V>) -> Self::Result<()>
+    where
+        V: ManagedView,
+    {
+        self.update_view(view, |_, cx| cx.emit(Manager::Dismiss))
+    }
 }
 
 impl<'a> std::ops::Deref for WindowContext<'a> {
@@ -2275,6 +2277,13 @@ impl<'a, V: 'static> ViewContext<'a, V> {
     {
         self.defer(|view, cx| view.focus_handle(cx).focus(cx))
     }
+
+    pub fn dismiss_self(&mut self)
+    where
+        V: ManagedView,
+    {
+        self.defer(|_, cx| cx.emit(Manager::Dismiss))
+    }
 }
 
 impl<V> Context for ViewContext<'_, V> {
@@ -2354,6 +2363,10 @@ impl<V: 'static> VisualContext for ViewContext<'_, V> {
     fn focus_view<W: FocusableView>(&mut self, view: &View<W>) -> Self::Result<()> {
         self.window_cx.focus_view(view)
     }
+
+    fn dismiss_view<W: ManagedView>(&mut self, view: &View<W>) -> Self::Result<()> {
+        self.window_cx.dismiss_view(view)
+    }
 }
 
 impl<'a, V> std::ops::Deref for ViewContext<'a, V> {
@@ -2398,6 +2411,17 @@ impl<V: 'static + Render<V>> WindowHandle<V> {
         }
     }
 
+    pub fn root<C>(&self, cx: &mut C) -> Result<View<V>>
+    where
+        C: Context,
+    {
+        Flatten::flatten(cx.update_window(self.any_handle, |root_view, _| {
+            root_view
+                .downcast::<V>()
+                .map_err(|_| anyhow!("the type of the window's root view has changed"))
+        }))
+    }
+
     pub fn update<C, R>(
         &self,
         cx: &mut C,
@@ -2543,6 +2567,18 @@ pub enum ElementId {
     FocusHandle(FocusId),
 }
 
+impl TryInto<SharedString> for ElementId {
+    type Error = anyhow::Error;
+
+    fn try_into(self) -> anyhow::Result<SharedString> {
+        if let ElementId::Name(name) = self {
+            Ok(name)
+        } else {
+            Err(anyhow!("element id is not string"))
+        }
+    }
+}
+
 impl From<EntityId> for ElementId {
     fn from(id: EntityId) -> Self {
         ElementId::View(id)

crates/journal2/Cargo.toml 🔗

@@ -9,7 +9,7 @@ path = "src/journal2.rs"
 doctest = false
 
 [dependencies]
-editor = { path = "../editor" }
+editor = { package = "editor2", path = "../editor2" }
 gpui = { package = "gpui2", path = "../gpui2" }
 util = { path = "../util" }
 workspace2 = { path = "../workspace2" }
@@ -24,4 +24,4 @@ log.workspace = true
 shellexpand = "2.1.0"
 
 [dev-dependencies]
-editor = { path = "../editor", features = ["test-support"] }
+editor = { package="editor2", path = "../editor2", features = ["test-support"] }

crates/picker2/src/picker2.rs 🔗

@@ -143,10 +143,10 @@ impl<D: PickerDelegate> Picker<D> {
     fn on_input_editor_event(
         &mut self,
         _: View<Editor>,
-        event: &editor::Event,
+        event: &editor::EditorEvent,
         cx: &mut ViewContext<Self>,
     ) {
-        if let editor::Event::BufferEdited = event {
+        if let editor::EditorEvent::BufferEdited = event {
             let query = self.editor.read(cx).text(cx);
             self.update_matches(query, cx);
         }

crates/project/src/ignore.rs 🔗

@@ -20,10 +20,6 @@ impl IgnoreStack {
         Arc::new(Self::All)
     }
 
-    pub fn is_all(&self) -> bool {
-        matches!(self, IgnoreStack::All)
-    }
-
     pub fn append(self: Arc<Self>, abs_base_path: Arc<Path>, ignore: Arc<Gitignore>) -> Arc<Self> {
         match self.as_ref() {
             IgnoreStack::All => self,

crates/project/src/project.rs 🔗

@@ -5548,7 +5548,16 @@ impl Project {
             .collect::<Vec<_>>();
 
         let background = cx.background().clone();
-        let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
+        let path_count: usize = snapshots
+            .iter()
+            .map(|s| {
+                if query.include_ignored() {
+                    s.file_count()
+                } else {
+                    s.visible_file_count()
+                }
+            })
+            .sum();
         if path_count == 0 {
             let (_, rx) = smol::channel::bounded(1024);
             return rx;
@@ -5561,8 +5570,16 @@ impl Project {
             .iter()
             .filter_map(|(_, b)| {
                 let buffer = b.upgrade(cx)?;
-                let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
-                if let Some(path) = snapshot.file().map(|file| file.path()) {
+                let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
+                    let is_ignored = buffer
+                        .project_path(cx)
+                        .and_then(|path| self.entry_for_path(&path, cx))
+                        .map_or(false, |entry| entry.is_ignored);
+                    (is_ignored, buffer.snapshot())
+                });
+                if is_ignored && !query.include_ignored() {
+                    return None;
+                } else if let Some(path) = snapshot.file().map(|file| file.path()) {
                     Some((path.clone(), (buffer, snapshot)))
                 } else {
                     unnamed_files.push(buffer);
@@ -5735,7 +5752,12 @@ impl Project {
                         let mut snapshot_start_ix = 0;
                         let mut abs_path = PathBuf::new();
                         for snapshot in snapshots {
-                            let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count();
+                            let snapshot_end_ix = snapshot_start_ix
+                                + if query.include_ignored() {
+                                    snapshot.file_count()
+                                } else {
+                                    snapshot.visible_file_count()
+                                };
                             if worker_end_ix <= snapshot_start_ix {
                                 break;
                             } else if worker_start_ix > snapshot_end_ix {
@@ -5748,7 +5770,7 @@ impl Project {
                                     cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
 
                                 for entry in snapshot
-                                    .files(false, start_in_snapshot)
+                                    .files(query.include_ignored(), start_in_snapshot)
                                     .take(end_in_snapshot - start_in_snapshot)
                                 {
                                     if matching_paths_tx.is_closed() {

crates/project/src/project_settings.rs 🔗

@@ -10,6 +10,8 @@ pub struct ProjectSettings {
     pub lsp: HashMap<Arc<str>, LspSettings>,
     #[serde(default)]
     pub git: GitSettings,
+    #[serde(default)]
+    pub file_scan_exclusions: Option<Vec<String>>,
 }
 
 #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]

crates/project/src/project_tests.rs 🔗

@@ -3598,7 +3598,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
     assert_eq!(
         search(
             &project,
-            SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
+            SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
             cx
         )
         .await
@@ -3623,7 +3623,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
     assert_eq!(
         search(
             &project,
-            SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
+            SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
             cx
         )
         .await
@@ -3662,6 +3662,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 vec![PathMatcher::new("*.odd").unwrap()],
                 Vec::new()
             )
@@ -3681,6 +3682,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 vec![PathMatcher::new("*.rs").unwrap()],
                 Vec::new()
             )
@@ -3703,6 +3705,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 vec![
                     PathMatcher::new("*.ts").unwrap(),
                     PathMatcher::new("*.odd").unwrap(),
@@ -3727,6 +3730,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 vec![
                     PathMatcher::new("*.rs").unwrap(),
                     PathMatcher::new("*.ts").unwrap(),
@@ -3774,6 +3778,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 Vec::new(),
                 vec![PathMatcher::new("*.odd").unwrap()],
             )
@@ -3798,6 +3803,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 Vec::new(),
                 vec![PathMatcher::new("*.rs").unwrap()],
             )
@@ -3820,6 +3826,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 Vec::new(),
                 vec![
                     PathMatcher::new("*.ts").unwrap(),
@@ -3844,6 +3851,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 Vec::new(),
                 vec![
                     PathMatcher::new("*.rs").unwrap(),
@@ -3885,6 +3893,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
                 search_query,
                 false,
                 true,
+                false,
                 vec![PathMatcher::new("*.odd").unwrap()],
                 vec![PathMatcher::new("*.odd").unwrap()],
             )
@@ -3904,6 +3913,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
                 search_query,
                 false,
                 true,
+                false,
                 vec![PathMatcher::new("*.ts").unwrap()],
                 vec![PathMatcher::new("*.ts").unwrap()],
             ).unwrap(),
@@ -3922,6 +3932,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
                 search_query,
                 false,
                 true,
+                false,
                 vec![
                     PathMatcher::new("*.ts").unwrap(),
                     PathMatcher::new("*.odd").unwrap()
@@ -3947,6 +3958,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
                 search_query,
                 false,
                 true,
+                false,
                 vec![
                     PathMatcher::new("*.ts").unwrap(),
                     PathMatcher::new("*.odd").unwrap()

crates/project/src/search.rs 🔗

@@ -39,6 +39,7 @@ pub enum SearchQuery {
         replacement: Option<String>,
         whole_word: bool,
         case_sensitive: bool,
+        include_ignored: bool,
         inner: SearchInputs,
     },
 
@@ -48,6 +49,7 @@ pub enum SearchQuery {
         multiline: bool,
         whole_word: bool,
         case_sensitive: bool,
+        include_ignored: bool,
         inner: SearchInputs,
     },
 }
@@ -57,6 +59,7 @@ impl SearchQuery {
         query: impl ToString,
         whole_word: bool,
         case_sensitive: bool,
+        include_ignored: bool,
         files_to_include: Vec<PathMatcher>,
         files_to_exclude: Vec<PathMatcher>,
     ) -> Result<Self> {
@@ -74,6 +77,7 @@ impl SearchQuery {
             replacement: None,
             whole_word,
             case_sensitive,
+            include_ignored,
             inner,
         })
     }
@@ -82,6 +86,7 @@ impl SearchQuery {
         query: impl ToString,
         whole_word: bool,
         case_sensitive: bool,
+        include_ignored: bool,
         files_to_include: Vec<PathMatcher>,
         files_to_exclude: Vec<PathMatcher>,
     ) -> Result<Self> {
@@ -111,6 +116,7 @@ impl SearchQuery {
             multiline,
             whole_word,
             case_sensitive,
+            include_ignored,
             inner,
         })
     }
@@ -121,6 +127,7 @@ impl SearchQuery {
                 message.query,
                 message.whole_word,
                 message.case_sensitive,
+                message.include_ignored,
                 deserialize_path_matches(&message.files_to_include)?,
                 deserialize_path_matches(&message.files_to_exclude)?,
             )
@@ -129,6 +136,7 @@ impl SearchQuery {
                 message.query,
                 message.whole_word,
                 message.case_sensitive,
+                message.include_ignored,
                 deserialize_path_matches(&message.files_to_include)?,
                 deserialize_path_matches(&message.files_to_exclude)?,
             )
@@ -156,6 +164,7 @@ impl SearchQuery {
             regex: self.is_regex(),
             whole_word: self.whole_word(),
             case_sensitive: self.case_sensitive(),
+            include_ignored: self.include_ignored(),
             files_to_include: self
                 .files_to_include()
                 .iter()
@@ -336,6 +345,17 @@ impl SearchQuery {
         }
     }
 
+    pub fn include_ignored(&self) -> bool {
+        match self {
+            Self::Text {
+                include_ignored, ..
+            } => *include_ignored,
+            Self::Regex {
+                include_ignored, ..
+            } => *include_ignored,
+        }
+    }
+
     pub fn is_regex(&self) -> bool {
         matches!(self, Self::Regex { .. })
     }

crates/project/src/worktree.rs 🔗

@@ -1,5 +1,6 @@
 use crate::{
-    copy_recursive, ignore::IgnoreStack, DiagnosticSummary, ProjectEntryId, RemoveOptions,
+    copy_recursive, ignore::IgnoreStack, project_settings::ProjectSettings, DiagnosticSummary,
+    ProjectEntryId, RemoveOptions,
 };
 use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
 use anyhow::{anyhow, Context, Result};
@@ -21,7 +22,10 @@ use futures::{
 };
 use fuzzy::CharBag;
 use git::{DOT_GIT, GITIGNORE};
-use gpui::{executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task};
+use gpui::{
+    executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Subscription, Task,
+};
+use itertools::Itertools;
 use language::{
     proto::{
         deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending,
@@ -36,6 +40,7 @@ use postage::{
     prelude::{Sink as _, Stream as _},
     watch,
 };
+use settings::SettingsStore;
 use smol::channel::{self, Sender};
 use std::{
     any::Any,
@@ -55,7 +60,10 @@ use std::{
     time::{Duration, SystemTime},
 };
 use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
-use util::{paths::HOME, ResultExt};
+use util::{
+    paths::{PathMatcher, HOME},
+    ResultExt,
+};
 
 #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
 pub struct WorktreeId(usize);
@@ -70,7 +78,8 @@ pub struct LocalWorktree {
     scan_requests_tx: channel::Sender<ScanRequest>,
     path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>,
     is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
-    _background_scanner_task: Task<()>,
+    _settings_subscription: Subscription,
+    _background_scanner_tasks: Vec<Task<()>>,
     share: Option<ShareState>,
     diagnostics: HashMap<
         Arc<Path>,
@@ -216,6 +225,7 @@ pub struct LocalSnapshot {
     /// All of the git repositories in the worktree, indexed by the project entry
     /// id of their parent directory.
     git_repositories: TreeMap<ProjectEntryId, LocalRepositoryEntry>,
+    file_scan_exclusions: Vec<PathMatcher>,
 }
 
 struct BackgroundScannerState {
@@ -299,17 +309,54 @@ impl Worktree {
             .await
             .context("failed to stat worktree path")?;
 
+        let closure_fs = Arc::clone(&fs);
+        let closure_next_entry_id = Arc::clone(&next_entry_id);
+        let closure_abs_path = abs_path.to_path_buf();
         Ok(cx.add_model(move |cx: &mut ModelContext<Worktree>| {
+            let settings_subscription = cx.observe_global::<SettingsStore, _>(move |this, cx| {
+                if let Self::Local(this) = this {
+                    let new_file_scan_exclusions =
+                        file_scan_exclusions(settings::get::<ProjectSettings>(cx));
+                    if new_file_scan_exclusions != this.snapshot.file_scan_exclusions {
+                        this.snapshot.file_scan_exclusions = new_file_scan_exclusions;
+                        log::info!(
+                            "Re-scanning directories, new scan exclude files: {:?}",
+                            this.snapshot
+                                .file_scan_exclusions
+                                .iter()
+                                .map(ToString::to_string)
+                                .collect::<Vec<_>>()
+                        );
+
+                        let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
+                        let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) =
+                            channel::unbounded();
+                        this.scan_requests_tx = scan_requests_tx;
+                        this.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx;
+                        this._background_scanner_tasks = start_background_scan_tasks(
+                            &closure_abs_path,
+                            this.snapshot(),
+                            scan_requests_rx,
+                            path_prefixes_to_scan_rx,
+                            Arc::clone(&closure_next_entry_id),
+                            Arc::clone(&closure_fs),
+                            cx,
+                        );
+                        this.is_scanning = watch::channel_with(true);
+                    }
+                }
+            });
+
             let root_name = abs_path
                 .file_name()
                 .map_or(String::new(), |f| f.to_string_lossy().to_string());
-
             let mut snapshot = LocalSnapshot {
+                file_scan_exclusions: file_scan_exclusions(settings::get::<ProjectSettings>(cx)),
                 ignores_by_parent_abs_path: Default::default(),
                 git_repositories: Default::default(),
                 snapshot: Snapshot {
                     id: WorktreeId::from_usize(cx.model_id()),
-                    abs_path: abs_path.clone(),
+                    abs_path: abs_path.to_path_buf().into(),
                     root_name: root_name.clone(),
                     root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
                     entries_by_path: Default::default(),
@@ -334,60 +381,23 @@ impl Worktree {
 
             let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
             let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
-            let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
-
-            cx.spawn_weak(|this, mut cx| async move {
-                while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) {
-                    this.update(&mut cx, |this, cx| {
-                        let this = this.as_local_mut().unwrap();
-                        match state {
-                            ScanState::Started => {
-                                *this.is_scanning.0.borrow_mut() = true;
-                            }
-                            ScanState::Updated {
-                                snapshot,
-                                changes,
-                                barrier,
-                                scanning,
-                            } => {
-                                *this.is_scanning.0.borrow_mut() = scanning;
-                                this.set_snapshot(snapshot, changes, cx);
-                                drop(barrier);
-                            }
-                        }
-                        cx.notify();
-                    });
-                }
-            })
-            .detach();
-
-            let background_scanner_task = cx.background().spawn({
-                let fs = fs.clone();
-                let snapshot = snapshot.clone();
-                let background = cx.background().clone();
-                async move {
-                    let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
-                    BackgroundScanner::new(
-                        snapshot,
-                        next_entry_id,
-                        fs,
-                        scan_states_tx,
-                        background,
-                        scan_requests_rx,
-                        path_prefixes_to_scan_rx,
-                    )
-                    .run(events)
-                    .await;
-                }
-            });
-
+            let task_snapshot = snapshot.clone();
             Worktree::Local(LocalWorktree {
                 snapshot,
                 is_scanning: watch::channel_with(true),
                 share: None,
                 scan_requests_tx,
                 path_prefixes_to_scan_tx,
-                _background_scanner_task: background_scanner_task,
+                _settings_subscription: settings_subscription,
+                _background_scanner_tasks: start_background_scan_tasks(
+                    &abs_path,
+                    task_snapshot,
+                    scan_requests_rx,
+                    path_prefixes_to_scan_rx,
+                    Arc::clone(&next_entry_id),
+                    Arc::clone(&fs),
+                    cx,
+                ),
                 diagnostics: Default::default(),
                 diagnostic_summaries: Default::default(),
                 client,
@@ -584,6 +594,76 @@ impl Worktree {
     }
 }
 
+fn start_background_scan_tasks(
+    abs_path: &Path,
+    snapshot: LocalSnapshot,
+    scan_requests_rx: channel::Receiver<ScanRequest>,
+    path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
+    next_entry_id: Arc<AtomicUsize>,
+    fs: Arc<dyn Fs>,
+    cx: &mut ModelContext<'_, Worktree>,
+) -> Vec<Task<()>> {
+    let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
+    let background_scanner = cx.background().spawn({
+        let abs_path = abs_path.to_path_buf();
+        let background = cx.background().clone();
+        async move {
+            let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
+            BackgroundScanner::new(
+                snapshot,
+                next_entry_id,
+                fs,
+                scan_states_tx,
+                background,
+                scan_requests_rx,
+                path_prefixes_to_scan_rx,
+            )
+            .run(events)
+            .await;
+        }
+    });
+    let scan_state_updater = cx.spawn_weak(|this, mut cx| async move {
+        while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) {
+            this.update(&mut cx, |this, cx| {
+                let this = this.as_local_mut().unwrap();
+                match state {
+                    ScanState::Started => {
+                        *this.is_scanning.0.borrow_mut() = true;
+                    }
+                    ScanState::Updated {
+                        snapshot,
+                        changes,
+                        barrier,
+                        scanning,
+                    } => {
+                        *this.is_scanning.0.borrow_mut() = scanning;
+                        this.set_snapshot(snapshot, changes, cx);
+                        drop(barrier);
+                    }
+                }
+                cx.notify();
+            });
+        }
+    });
+    vec![background_scanner, scan_state_updater]
+}
+
+fn file_scan_exclusions(project_settings: &ProjectSettings) -> Vec<PathMatcher> {
+    project_settings.file_scan_exclusions.as_deref().unwrap_or(&[]).iter()
+    .sorted()
+    .filter_map(|pattern| {
+        PathMatcher::new(pattern)
+            .map(Some)
+            .unwrap_or_else(|e| {
+                log::error!(
+                    "Skipping pattern {pattern} in `file_scan_exclusions` project settings due to parsing error: {e:#}"
+                );
+                None
+            })
+    })
+    .collect()
+}
+
 impl LocalWorktree {
     pub fn contains_abs_path(&self, path: &Path) -> bool {
         path.starts_with(&self.abs_path)
@@ -1481,7 +1561,7 @@ impl Snapshot {
         self.entries_by_id.get(&entry_id, &()).is_some()
     }
 
-    pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
+    fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
         let entry = Entry::try_from((&self.root_char_bag, entry))?;
         let old_entry = self.entries_by_id.insert_or_replace(
             PathEntry {
@@ -2145,6 +2225,12 @@ impl LocalSnapshot {
         paths.sort_by(|a, b| a.0.cmp(b.0));
         paths
     }
+
+    fn is_abs_path_excluded(&self, abs_path: &Path) -> bool {
+        self.file_scan_exclusions
+            .iter()
+            .any(|exclude_matcher| exclude_matcher.is_match(abs_path))
+    }
 }
 
 impl BackgroundScannerState {
@@ -2167,7 +2253,7 @@ impl BackgroundScannerState {
         let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true);
         let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path);
         let mut containing_repository = None;
-        if !ignore_stack.is_all() {
+        if !ignore_stack.is_abs_path_ignored(&abs_path, true) {
             if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) {
                 if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) {
                     containing_repository = Some((
@@ -2378,18 +2464,30 @@ impl BackgroundScannerState {
 
         // Remove any git repositories whose .git entry no longer exists.
         let snapshot = &mut self.snapshot;
-        let mut repositories = mem::take(&mut snapshot.git_repositories);
-        let mut repository_entries = mem::take(&mut snapshot.repository_entries);
-        repositories.retain(|work_directory_id, _| {
-            snapshot
-                .entry_for_id(*work_directory_id)
+        let mut ids_to_preserve = HashSet::default();
+        for (&work_directory_id, entry) in snapshot.git_repositories.iter() {
+            let exists_in_snapshot = snapshot
+                .entry_for_id(work_directory_id)
                 .map_or(false, |entry| {
                     snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some()
-                })
-        });
-        repository_entries.retain(|_, entry| repositories.get(&entry.work_directory.0).is_some());
-        snapshot.git_repositories = repositories;
-        snapshot.repository_entries = repository_entries;
+                });
+            if exists_in_snapshot {
+                ids_to_preserve.insert(work_directory_id);
+            } else {
+                let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
+                if snapshot.is_abs_path_excluded(&git_dir_abs_path)
+                    && !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
+                {
+                    ids_to_preserve.insert(work_directory_id);
+                }
+            }
+        }
+        snapshot
+            .git_repositories
+            .retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id));
+        snapshot
+            .repository_entries
+            .retain(|_, entry| ids_to_preserve.contains(&entry.work_directory.0));
     }
 
     fn build_git_repository(
@@ -3094,7 +3192,7 @@ impl BackgroundScanner {
                 let ignore_stack = state
                     .snapshot
                     .ignore_stack_for_abs_path(&root_abs_path, true);
-                if ignore_stack.is_all() {
+                if ignore_stack.is_abs_path_ignored(&root_abs_path, true) {
                     root_entry.is_ignored = true;
                     state.insert_entry(root_entry.clone(), self.fs.as_ref());
                 }
@@ -3231,14 +3329,22 @@ impl BackgroundScanner {
                         return false;
                     };
 
-                let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
-                    snapshot
-                        .entry_for_path(parent)
-                        .map_or(false, |entry| entry.kind == EntryKind::Dir)
-                });
-                if !parent_dir_is_loaded {
-                    log::debug!("ignoring event {relative_path:?} within unloaded directory");
-                    return false;
+                if !is_git_related(&abs_path) {
+                    let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
+                        snapshot
+                            .entry_for_path(parent)
+                            .map_or(false, |entry| entry.kind == EntryKind::Dir)
+                    });
+                    if !parent_dir_is_loaded {
+                        log::debug!("ignoring event {relative_path:?} within unloaded directory");
+                        return false;
+                    }
+                    if snapshot.is_abs_path_excluded(abs_path) {
+                        log::debug!(
+                        "ignoring FS event for path {relative_path:?} within excluded directory"
+                    );
+                        return false;
+                    }
                 }
 
                 relative_paths.push(relative_path);
@@ -3401,18 +3507,26 @@ impl BackgroundScanner {
     }
 
     async fn scan_dir(&self, job: &ScanJob) -> Result<()> {
-        log::debug!("scan directory {:?}", job.path);
-
-        let mut ignore_stack = job.ignore_stack.clone();
-        let mut new_ignore = None;
-        let (root_abs_path, root_char_bag, next_entry_id) = {
-            let snapshot = &self.state.lock().snapshot;
-            (
-                snapshot.abs_path().clone(),
-                snapshot.root_char_bag,
-                self.next_entry_id.clone(),
-            )
-        };
+        let root_abs_path;
+        let mut ignore_stack;
+        let mut new_ignore;
+        let root_char_bag;
+        let next_entry_id;
+        {
+            let state = self.state.lock();
+            let snapshot = &state.snapshot;
+            root_abs_path = snapshot.abs_path().clone();
+            if snapshot.is_abs_path_excluded(&job.abs_path) {
+                log::error!("skipping excluded directory {:?}", job.path);
+                return Ok(());
+            }
+            log::debug!("scanning directory {:?}", job.path);
+            ignore_stack = job.ignore_stack.clone();
+            new_ignore = None;
+            root_char_bag = snapshot.root_char_bag;
+            next_entry_id = self.next_entry_id.clone();
+            drop(state);
+        }
 
         let mut dotgit_path = None;
         let mut root_canonical_path = None;
@@ -3427,18 +3541,8 @@ impl BackgroundScanner {
                     continue;
                 }
             };
-
             let child_name = child_abs_path.file_name().unwrap();
             let child_path: Arc<Path> = job.path.join(child_name).into();
-            let child_metadata = match self.fs.metadata(&child_abs_path).await {
-                Ok(Some(metadata)) => metadata,
-                Ok(None) => continue,
-                Err(err) => {
-                    log::error!("error processing {:?}: {:?}", child_abs_path, err);
-                    continue;
-                }
-            };
-
             // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
             if child_name == *GITIGNORE {
                 match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
@@ -3482,6 +3586,26 @@ impl BackgroundScanner {
                 dotgit_path = Some(child_path.clone());
             }
 
+            {
+                let mut state = self.state.lock();
+                if state.snapshot.is_abs_path_excluded(&child_abs_path) {
+                    let relative_path = job.path.join(child_name);
+                    log::debug!("skipping excluded child entry {relative_path:?}");
+                    state.remove_path(&relative_path);
+                    continue;
+                }
+                drop(state);
+            }
+
+            let child_metadata = match self.fs.metadata(&child_abs_path).await {
+                Ok(Some(metadata)) => metadata,
+                Ok(None) => continue,
+                Err(err) => {
+                    log::error!("error processing {child_abs_path:?}: {err:?}");
+                    continue;
+                }
+            };
+
             let mut child_entry = Entry::new(
                 child_path.clone(),
                 &child_metadata,
@@ -3662,19 +3786,16 @@ impl BackgroundScanner {
                         self.next_entry_id.as_ref(),
                         state.snapshot.root_char_bag,
                     );
-                    fs_entry.is_ignored = ignore_stack.is_all();
+                    let is_dir = fs_entry.is_dir();
+                    fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir);
                     fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path);
 
-                    if !fs_entry.is_ignored {
-                        if !fs_entry.is_dir() {
-                            if let Some((work_dir, repo)) =
-                                state.snapshot.local_repo_for_path(&path)
-                            {
-                                if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
-                                    let repo_path = RepoPath(repo_path.into());
-                                    let repo = repo.repo_ptr.lock();
-                                    fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime);
-                                }
+                    if !is_dir && !fs_entry.is_ignored {
+                        if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) {
+                            if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
+                                let repo_path = RepoPath(repo_path.into());
+                                let repo = repo.repo_ptr.lock();
+                                fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime);
                             }
                         }
                     }
@@ -3833,8 +3954,7 @@ impl BackgroundScanner {
                     ignore_stack.clone()
                 };
 
-                // Scan any directories that were previously ignored and weren't
-                // previously scanned.
+                // Scan any directories that were previously ignored and weren't previously scanned.
                 if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() {
                     let state = self.state.lock();
                     if state.should_scan_directory(&entry) {
@@ -4010,6 +4130,12 @@ impl BackgroundScanner {
     }
 }
 
+fn is_git_related(abs_path: &Path) -> bool {
+    abs_path
+        .components()
+        .any(|c| c.as_os_str() == *DOT_GIT || c.as_os_str() == *GITIGNORE)
+}
+
 fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
     let mut result = root_char_bag;
     result.extend(

crates/project/src/worktree_tests.rs 🔗

@@ -1,6 +1,7 @@
 use crate::{
+    project_settings::ProjectSettings,
     worktree::{Event, Snapshot, WorktreeModelHandle},
-    Entry, EntryKind, PathChange, Worktree,
+    Entry, EntryKind, PathChange, Project, Worktree,
 };
 use anyhow::Result;
 use client::Client;
@@ -12,6 +13,7 @@ use postage::stream::Stream;
 use pretty_assertions::assert_eq;
 use rand::prelude::*;
 use serde_json::json;
+use settings::SettingsStore;
 use std::{
     env,
     fmt::Write,
@@ -23,6 +25,7 @@ use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
 
 #[gpui::test]
 async fn test_traversal(cx: &mut TestAppContext) {
+    init_test(cx);
     let fs = FakeFs::new(cx.background());
     fs.insert_tree(
         "/root",
@@ -78,6 +81,7 @@ async fn test_traversal(cx: &mut TestAppContext) {
 
 #[gpui::test]
 async fn test_descendent_entries(cx: &mut TestAppContext) {
+    init_test(cx);
     let fs = FakeFs::new(cx.background());
     fs.insert_tree(
         "/root",
@@ -185,6 +189,7 @@ async fn test_descendent_entries(cx: &mut TestAppContext) {
 
 #[gpui::test(iterations = 10)]
 async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
+    init_test(cx);
     let fs = FakeFs::new(cx.background());
     fs.insert_tree(
         "/root",
@@ -264,6 +269,7 @@ async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppCo
 
 #[gpui::test]
 async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
+    init_test(cx);
     let fs = FakeFs::new(cx.background());
     fs.insert_tree(
         "/root",
@@ -439,6 +445,7 @@ async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
 
 #[gpui::test]
 async fn test_open_gitignored_files(cx: &mut TestAppContext) {
+    init_test(cx);
     let fs = FakeFs::new(cx.background());
     fs.insert_tree(
         "/root",
@@ -599,6 +606,7 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
 
 #[gpui::test]
 async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
+    init_test(cx);
     let fs = FakeFs::new(cx.background());
     fs.insert_tree(
         "/root",
@@ -722,6 +730,14 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
 
 #[gpui::test(iterations = 10)]
 async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
+    init_test(cx);
+    cx.update(|cx| {
+        cx.update_global::<SettingsStore, _, _>(|store, cx| {
+            store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                project_settings.file_scan_exclusions = Some(Vec::new());
+            });
+        });
+    });
     let fs = FakeFs::new(cx.background());
     fs.insert_tree(
         "/root",
@@ -827,6 +843,7 @@ async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
 
 #[gpui::test]
 async fn test_write_file(cx: &mut TestAppContext) {
+    init_test(cx);
     let dir = temp_tree(json!({
         ".git": {},
         ".gitignore": "ignored-dir\n",
@@ -877,8 +894,105 @@ async fn test_write_file(cx: &mut TestAppContext) {
     });
 }
 
+#[gpui::test]
+async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
+    init_test(cx);
+    let dir = temp_tree(json!({
+        ".gitignore": "**/target\n/node_modules\n",
+        "target": {
+            "index": "blah2"
+        },
+        "node_modules": {
+            ".DS_Store": "",
+            "prettier": {
+                "package.json": "{}",
+            },
+        },
+        "src": {
+            ".DS_Store": "",
+            "foo": {
+                "foo.rs": "mod another;\n",
+                "another.rs": "// another",
+            },
+            "bar": {
+                "bar.rs": "// bar",
+            },
+            "lib.rs": "mod foo;\nmod bar;\n",
+        },
+        ".DS_Store": "",
+    }));
+    cx.update(|cx| {
+        cx.update_global::<SettingsStore, _, _>(|store, cx| {
+            store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                project_settings.file_scan_exclusions =
+                    Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
+            });
+        });
+    });
+
+    let tree = Worktree::local(
+        build_client(cx),
+        dir.path(),
+        true,
+        Arc::new(RealFs),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+    tree.flush_fs_events(cx).await;
+    tree.read_with(cx, |tree, _| {
+        check_worktree_entries(
+            tree,
+            &[
+                "src/foo/foo.rs",
+                "src/foo/another.rs",
+                "node_modules/.DS_Store",
+                "src/.DS_Store",
+                ".DS_Store",
+            ],
+            &["target", "node_modules"],
+            &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
+        )
+    });
+
+    cx.update(|cx| {
+        cx.update_global::<SettingsStore, _, _>(|store, cx| {
+            store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                project_settings.file_scan_exclusions =
+                    Some(vec!["**/node_modules/**".to_string()]);
+            });
+        });
+    });
+    tree.flush_fs_events(cx).await;
+    cx.foreground().run_until_parked();
+    tree.read_with(cx, |tree, _| {
+        check_worktree_entries(
+            tree,
+            &[
+                "node_modules/prettier/package.json",
+                "node_modules/.DS_Store",
+                "node_modules",
+            ],
+            &["target"],
+            &[
+                ".gitignore",
+                "src/lib.rs",
+                "src/bar/bar.rs",
+                "src/foo/foo.rs",
+                "src/foo/another.rs",
+                "src/.DS_Store",
+                ".DS_Store",
+            ],
+        )
+    });
+}
+
 #[gpui::test(iterations = 30)]
 async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
+    init_test(cx);
     let fs = FakeFs::new(cx.background());
     fs.insert_tree(
         "/root",
@@ -938,6 +1052,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
 
 #[gpui::test]
 async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
+    init_test(cx);
     let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
 
     let fs_fake = FakeFs::new(cx.background());
@@ -1054,6 +1169,7 @@ async fn test_random_worktree_operations_during_initial_scan(
     cx: &mut TestAppContext,
     mut rng: StdRng,
 ) {
+    init_test(cx);
     let operations = env::var("OPERATIONS")
         .map(|o| o.parse().unwrap())
         .unwrap_or(5);
@@ -1143,6 +1259,7 @@ async fn test_random_worktree_operations_during_initial_scan(
 
 #[gpui::test(iterations = 100)]
 async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
+    init_test(cx);
     let operations = env::var("OPERATIONS")
         .map(|o| o.parse().unwrap())
         .unwrap_or(40);
@@ -1557,6 +1674,7 @@ fn random_filename(rng: &mut impl Rng) -> String {
 
 #[gpui::test]
 async fn test_rename_work_directory(cx: &mut TestAppContext) {
+    init_test(cx);
     let root = temp_tree(json!({
         "projects": {
             "project1": {
@@ -1627,6 +1745,7 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) {
 
 #[gpui::test]
 async fn test_git_repository_for_path(cx: &mut TestAppContext) {
+    init_test(cx);
     let root = temp_tree(json!({
         "c.txt": "",
         "dir1": {
@@ -1747,6 +1866,15 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) {
 
 #[gpui::test]
 async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
+    init_test(cx);
+    cx.update(|cx| {
+        cx.update_global::<SettingsStore, _, _>(|store, cx| {
+            store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                project_settings.file_scan_exclusions =
+                    Some(vec!["**/.git".to_string(), "**/.gitignore".to_string()]);
+            });
+        });
+    });
     const IGNORE_RULE: &'static str = "**/target";
 
     let root = temp_tree(json!({
@@ -1935,6 +2063,7 @@ async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppCont
 
 #[gpui::test]
 async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
+    init_test(cx);
     let fs = FakeFs::new(cx.background());
     fs.insert_tree(
         "/root",
@@ -2139,3 +2268,44 @@ fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Sta
         .map(|status| (status.path().unwrap().to_string(), status.status()))
         .collect()
 }
+
+#[track_caller]
+fn check_worktree_entries(
+    tree: &Worktree,
+    expected_excluded_paths: &[&str],
+    expected_ignored_paths: &[&str],
+    expected_tracked_paths: &[&str],
+) {
+    for path in expected_excluded_paths {
+        let entry = tree.entry_for_path(path);
+        assert!(
+            entry.is_none(),
+            "expected path '{path}' to be excluded, but got entry: {entry:?}",
+        );
+    }
+    for path in expected_ignored_paths {
+        let entry = tree
+            .entry_for_path(path)
+            .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'"));
+        assert!(
+            entry.is_ignored,
+            "expected path '{path}' to be ignored, but got entry: {entry:?}",
+        );
+    }
+    for path in expected_tracked_paths {
+        let entry = tree
+            .entry_for_path(path)
+            .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
+        assert!(
+            !entry.is_ignored,
+            "expected path '{path}' to be tracked, but got entry: {entry:?}",
+        );
+    }
+}
+
+fn init_test(cx: &mut gpui::TestAppContext) {
+    cx.update(|cx| {
+        cx.set_global(SettingsStore::test(cx));
+        Project::init_settings(cx);
+    });
+}

crates/project2/src/ignore.rs 🔗

@@ -20,10 +20,6 @@ impl IgnoreStack {
         Arc::new(Self::All)
     }
 
-    pub fn is_all(&self) -> bool {
-        matches!(self, IgnoreStack::All)
-    }
-
     pub fn append(self: Arc<Self>, abs_base_path: Arc<Path>, ignore: Arc<Gitignore>) -> Arc<Self> {
         match self.as_ref() {
             IgnoreStack::All => self,

crates/project2/src/project2.rs 🔗

@@ -5618,7 +5618,16 @@ impl Project {
             .collect::<Vec<_>>();
 
         let background = cx.background_executor().clone();
-        let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
+        let path_count: usize = snapshots
+            .iter()
+            .map(|s| {
+                if query.include_ignored() {
+                    s.file_count()
+                } else {
+                    s.visible_file_count()
+                }
+            })
+            .sum();
         if path_count == 0 {
             let (_, rx) = smol::channel::bounded(1024);
             return rx;
@@ -5631,8 +5640,16 @@ impl Project {
             .iter()
             .filter_map(|(_, b)| {
                 let buffer = b.upgrade()?;
-                let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
-                if let Some(path) = snapshot.file().map(|file| file.path()) {
+                let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
+                    let is_ignored = buffer
+                        .project_path(cx)
+                        .and_then(|path| self.entry_for_path(&path, cx))
+                        .map_or(false, |entry| entry.is_ignored);
+                    (is_ignored, buffer.snapshot())
+                });
+                if is_ignored && !query.include_ignored() {
+                    return None;
+                } else if let Some(path) = snapshot.file().map(|file| file.path()) {
                     Some((path.clone(), (buffer, snapshot)))
                 } else {
                     unnamed_files.push(buffer);
@@ -5806,7 +5823,12 @@ impl Project {
                         let mut snapshot_start_ix = 0;
                         let mut abs_path = PathBuf::new();
                         for snapshot in snapshots {
-                            let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count();
+                            let snapshot_end_ix = snapshot_start_ix
+                                + if query.include_ignored() {
+                                    snapshot.file_count()
+                                } else {
+                                    snapshot.visible_file_count()
+                                };
                             if worker_end_ix <= snapshot_start_ix {
                                 break;
                             } else if worker_start_ix > snapshot_end_ix {
@@ -5819,7 +5841,7 @@ impl Project {
                                     cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
 
                                 for entry in snapshot
-                                    .files(false, start_in_snapshot)
+                                    .files(query.include_ignored(), start_in_snapshot)
                                     .take(end_in_snapshot - start_in_snapshot)
                                 {
                                     if matching_paths_tx.is_closed() {

crates/project2/src/project_settings.rs 🔗

@@ -11,6 +11,8 @@ pub struct ProjectSettings {
     pub lsp: HashMap<Arc<str>, LspSettings>,
     #[serde(default)]
     pub git: GitSettings,
+    #[serde(default)]
+    pub file_scan_exclusions: Option<Vec<String>>,
 }
 
 #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]

crates/project2/src/project_tests.rs 🔗

@@ -3730,7 +3730,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
     assert_eq!(
         search(
             &project,
-            SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
+            SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
             cx
         )
         .await
@@ -3755,7 +3755,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
     assert_eq!(
         search(
             &project,
-            SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
+            SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
             cx
         )
         .await
@@ -3794,6 +3794,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 vec![PathMatcher::new("*.odd").unwrap()],
                 Vec::new()
             )
@@ -3813,6 +3814,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 vec![PathMatcher::new("*.rs").unwrap()],
                 Vec::new()
             )
@@ -3835,6 +3837,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 vec![
                     PathMatcher::new("*.ts").unwrap(),
                     PathMatcher::new("*.odd").unwrap(),
@@ -3859,6 +3862,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 vec![
                     PathMatcher::new("*.rs").unwrap(),
                     PathMatcher::new("*.ts").unwrap(),
@@ -3906,6 +3910,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 Vec::new(),
                 vec![PathMatcher::new("*.odd").unwrap()],
             )
@@ -3930,6 +3935,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 Vec::new(),
                 vec![PathMatcher::new("*.rs").unwrap()],
             )
@@ -3952,6 +3958,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 Vec::new(),
                 vec![
                     PathMatcher::new("*.ts").unwrap(),
@@ -3976,6 +3983,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 Vec::new(),
                 vec![
                     PathMatcher::new("*.rs").unwrap(),
@@ -4017,6 +4025,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
                 search_query,
                 false,
                 true,
+                false,
                 vec![PathMatcher::new("*.odd").unwrap()],
                 vec![PathMatcher::new("*.odd").unwrap()],
             )
@@ -4036,6 +4045,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
                 search_query,
                 false,
                 true,
+                false,
                 vec![PathMatcher::new("*.ts").unwrap()],
                 vec![PathMatcher::new("*.ts").unwrap()],
             ).unwrap(),
@@ -4054,6 +4064,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
                 search_query,
                 false,
                 true,
+                false,
                 vec![
                     PathMatcher::new("*.ts").unwrap(),
                     PathMatcher::new("*.odd").unwrap()
@@ -4079,6 +4090,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
                 search_query,
                 false,
                 true,
+                false,
                 vec![
                     PathMatcher::new("*.ts").unwrap(),
                     PathMatcher::new("*.odd").unwrap()

crates/project2/src/search.rs 🔗

@@ -39,6 +39,7 @@ pub enum SearchQuery {
         replacement: Option<String>,
         whole_word: bool,
         case_sensitive: bool,
+        include_ignored: bool,
         inner: SearchInputs,
     },
 
@@ -48,6 +49,7 @@ pub enum SearchQuery {
         multiline: bool,
         whole_word: bool,
         case_sensitive: bool,
+        include_ignored: bool,
         inner: SearchInputs,
     },
 }
@@ -57,6 +59,7 @@ impl SearchQuery {
         query: impl ToString,
         whole_word: bool,
         case_sensitive: bool,
+        include_ignored: bool,
         files_to_include: Vec<PathMatcher>,
         files_to_exclude: Vec<PathMatcher>,
     ) -> Result<Self> {
@@ -74,6 +77,7 @@ impl SearchQuery {
             replacement: None,
             whole_word,
             case_sensitive,
+            include_ignored,
             inner,
         })
     }
@@ -82,6 +86,7 @@ impl SearchQuery {
         query: impl ToString,
         whole_word: bool,
         case_sensitive: bool,
+        include_ignored: bool,
         files_to_include: Vec<PathMatcher>,
         files_to_exclude: Vec<PathMatcher>,
     ) -> Result<Self> {
@@ -111,6 +116,7 @@ impl SearchQuery {
             multiline,
             whole_word,
             case_sensitive,
+            include_ignored,
             inner,
         })
     }
@@ -121,6 +127,7 @@ impl SearchQuery {
                 message.query,
                 message.whole_word,
                 message.case_sensitive,
+                message.include_ignored,
                 deserialize_path_matches(&message.files_to_include)?,
                 deserialize_path_matches(&message.files_to_exclude)?,
             )
@@ -129,6 +136,7 @@ impl SearchQuery {
                 message.query,
                 message.whole_word,
                 message.case_sensitive,
+                message.include_ignored,
                 deserialize_path_matches(&message.files_to_include)?,
                 deserialize_path_matches(&message.files_to_exclude)?,
             )
@@ -156,6 +164,7 @@ impl SearchQuery {
             regex: self.is_regex(),
             whole_word: self.whole_word(),
             case_sensitive: self.case_sensitive(),
+            include_ignored: self.include_ignored(),
             files_to_include: self
                 .files_to_include()
                 .iter()
@@ -336,6 +345,17 @@ impl SearchQuery {
         }
     }
 
+    pub fn include_ignored(&self) -> bool {
+        match self {
+            Self::Text {
+                include_ignored, ..
+            } => *include_ignored,
+            Self::Regex {
+                include_ignored, ..
+            } => *include_ignored,
+        }
+    }
+
     pub fn is_regex(&self) -> bool {
         matches!(self, Self::Regex { .. })
     }

crates/project2/src/worktree.rs 🔗

@@ -1,5 +1,6 @@
 use crate::{
-    copy_recursive, ignore::IgnoreStack, DiagnosticSummary, ProjectEntryId, RemoveOptions,
+    copy_recursive, ignore::IgnoreStack, project_settings::ProjectSettings, DiagnosticSummary,
+    ProjectEntryId, RemoveOptions,
 };
 use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
 use anyhow::{anyhow, Context as _, Result};
@@ -25,6 +26,7 @@ use gpui::{
     AppContext, AsyncAppContext, BackgroundExecutor, Context, EventEmitter, Model, ModelContext,
     Task,
 };
+use itertools::Itertools;
 use language::{
     proto::{
         deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending,
@@ -39,6 +41,7 @@ use postage::{
     prelude::{Sink as _, Stream as _},
     watch,
 };
+use settings::{Settings, SettingsStore};
 use smol::channel::{self, Sender};
 use std::{
     any::Any,
@@ -58,7 +61,10 @@ use std::{
     time::{Duration, SystemTime},
 };
 use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
-use util::{paths::HOME, ResultExt};
+use util::{
+    paths::{PathMatcher, HOME},
+    ResultExt,
+};
 
 #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
 pub struct WorktreeId(usize);
@@ -73,7 +79,7 @@ pub struct LocalWorktree {
     scan_requests_tx: channel::Sender<ScanRequest>,
     path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>,
     is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
-    _background_scanner_task: Task<()>,
+    _background_scanner_tasks: Vec<Task<()>>,
     share: Option<ShareState>,
     diagnostics: HashMap<
         Arc<Path>,
@@ -219,6 +225,7 @@ pub struct LocalSnapshot {
     /// All of the git repositories in the worktree, indexed by the project entry
     /// id of their parent directory.
     git_repositories: TreeMap<ProjectEntryId, LocalRepositoryEntry>,
+    file_scan_exclusions: Vec<PathMatcher>,
 }
 
 struct BackgroundScannerState {
@@ -302,17 +309,56 @@ impl Worktree {
             .await
             .context("failed to stat worktree path")?;
 
+        let closure_fs = Arc::clone(&fs);
+        let closure_next_entry_id = Arc::clone(&next_entry_id);
+        let closure_abs_path = abs_path.to_path_buf();
         cx.build_model(move |cx: &mut ModelContext<Worktree>| {
+            cx.observe_global::<SettingsStore>(move |this, cx| {
+                if let Self::Local(this) = this {
+                    let new_file_scan_exclusions =
+                        file_scan_exclusions(ProjectSettings::get_global(cx));
+                    if new_file_scan_exclusions != this.snapshot.file_scan_exclusions {
+                        this.snapshot.file_scan_exclusions = new_file_scan_exclusions;
+                        log::info!(
+                            "Re-scanning directories, new scan exclude files: {:?}",
+                            this.snapshot
+                                .file_scan_exclusions
+                                .iter()
+                                .map(ToString::to_string)
+                                .collect::<Vec<_>>()
+                        );
+
+                        let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
+                        let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) =
+                            channel::unbounded();
+                        this.scan_requests_tx = scan_requests_tx;
+                        this.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx;
+                        this._background_scanner_tasks = start_background_scan_tasks(
+                            &closure_abs_path,
+                            this.snapshot(),
+                            scan_requests_rx,
+                            path_prefixes_to_scan_rx,
+                            Arc::clone(&closure_next_entry_id),
+                            Arc::clone(&closure_fs),
+                            cx,
+                        );
+                        this.is_scanning = watch::channel_with(true);
+                    }
+                }
+            })
+            .detach();
+
             let root_name = abs_path
                 .file_name()
                 .map_or(String::new(), |f| f.to_string_lossy().to_string());
 
             let mut snapshot = LocalSnapshot {
+                file_scan_exclusions: file_scan_exclusions(ProjectSettings::get_global(cx)),
                 ignores_by_parent_abs_path: Default::default(),
                 git_repositories: Default::default(),
                 snapshot: Snapshot {
                     id: WorktreeId::from_usize(cx.entity_id().as_u64() as usize),
-                    abs_path: abs_path.clone(),
+                    abs_path: abs_path.to_path_buf().into(),
                     root_name: root_name.clone(),
                     root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
                     entries_by_path: Default::default(),
@@ -337,61 +383,22 @@ impl Worktree {
 
             let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
             let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
-            let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
-
-            cx.spawn(|this, mut cx| async move {
-                while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade()) {
-                    this.update(&mut cx, |this, cx| {
-                        let this = this.as_local_mut().unwrap();
-                        match state {
-                            ScanState::Started => {
-                                *this.is_scanning.0.borrow_mut() = true;
-                            }
-                            ScanState::Updated {
-                                snapshot,
-                                changes,
-                                barrier,
-                                scanning,
-                            } => {
-                                *this.is_scanning.0.borrow_mut() = scanning;
-                                this.set_snapshot(snapshot, changes, cx);
-                                drop(barrier);
-                            }
-                        }
-                        cx.notify();
-                    })
-                    .ok();
-                }
-            })
-            .detach();
-
-            let background_scanner_task = cx.background_executor().spawn({
-                let fs = fs.clone();
-                let snapshot = snapshot.clone();
-                let background = cx.background_executor().clone();
-                async move {
-                    let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
-                    BackgroundScanner::new(
-                        snapshot,
-                        next_entry_id,
-                        fs,
-                        scan_states_tx,
-                        background,
-                        scan_requests_rx,
-                        path_prefixes_to_scan_rx,
-                    )
-                    .run(events)
-                    .await;
-                }
-            });
-
+            let task_snapshot = snapshot.clone();
             Worktree::Local(LocalWorktree {
                 snapshot,
                 is_scanning: watch::channel_with(true),
                 share: None,
                 scan_requests_tx,
                 path_prefixes_to_scan_tx,
-                _background_scanner_task: background_scanner_task,
+                _background_scanner_tasks: start_background_scan_tasks(
+                    &abs_path,
+                    task_snapshot,
+                    scan_requests_rx,
+                    path_prefixes_to_scan_rx,
+                    Arc::clone(&next_entry_id),
+                    Arc::clone(&fs),
+                    cx,
+                ),
                 diagnostics: Default::default(),
                 diagnostic_summaries: Default::default(),
                 client,
@@ -584,6 +591,77 @@ impl Worktree {
     }
 }
 
+fn start_background_scan_tasks(
+    abs_path: &Path,
+    snapshot: LocalSnapshot,
+    scan_requests_rx: channel::Receiver<ScanRequest>,
+    path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
+    next_entry_id: Arc<AtomicUsize>,
+    fs: Arc<dyn Fs>,
+    cx: &mut ModelContext<'_, Worktree>,
+) -> Vec<Task<()>> {
+    let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
+    let background_scanner = cx.background_executor().spawn({
+        let abs_path = abs_path.to_path_buf();
+        let background = cx.background_executor().clone();
+        async move {
+            let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
+            BackgroundScanner::new(
+                snapshot,
+                next_entry_id,
+                fs,
+                scan_states_tx,
+                background,
+                scan_requests_rx,
+                path_prefixes_to_scan_rx,
+            )
+            .run(events)
+            .await;
+        }
+    });
+    let scan_state_updater = cx.spawn(|this, mut cx| async move {
+        while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade()) {
+            this.update(&mut cx, |this, cx| {
+                let this = this.as_local_mut().unwrap();
+                match state {
+                    ScanState::Started => {
+                        *this.is_scanning.0.borrow_mut() = true;
+                    }
+                    ScanState::Updated {
+                        snapshot,
+                        changes,
+                        barrier,
+                        scanning,
+                    } => {
+                        *this.is_scanning.0.borrow_mut() = scanning;
+                        this.set_snapshot(snapshot, changes, cx);
+                        drop(barrier);
+                    }
+                }
+                cx.notify();
+            })
+            .ok();
+        }
+    });
+    vec![background_scanner, scan_state_updater]
+}
+
+fn file_scan_exclusions(project_settings: &ProjectSettings) -> Vec<PathMatcher> {
+    project_settings.file_scan_exclusions.as_deref().unwrap_or(&[]).iter()
+    .sorted()
+    .filter_map(|pattern| {
+        PathMatcher::new(pattern)
+            .map(Some)
+            .unwrap_or_else(|e| {
+                log::error!(
+                    "Skipping pattern {pattern} in `file_scan_exclusions` project settings due to parsing error: {e:#}"
+                );
+                None
+            })
+    })
+    .collect()
+}
+
 impl LocalWorktree {
     pub fn contains_abs_path(&self, path: &Path) -> bool {
         path.starts_with(&self.abs_path)
@@ -1482,7 +1560,7 @@ impl Snapshot {
         self.entries_by_id.get(&entry_id, &()).is_some()
     }
 
-    pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
+    fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
         let entry = Entry::try_from((&self.root_char_bag, entry))?;
         let old_entry = self.entries_by_id.insert_or_replace(
             PathEntry {
@@ -2143,6 +2221,12 @@ impl LocalSnapshot {
         paths.sort_by(|a, b| a.0.cmp(b.0));
         paths
     }
+
+    fn is_abs_path_excluded(&self, abs_path: &Path) -> bool {
+        self.file_scan_exclusions
+            .iter()
+            .any(|exclude_matcher| exclude_matcher.is_match(abs_path))
+    }
 }
 
 impl BackgroundScannerState {
@@ -2165,7 +2249,7 @@ impl BackgroundScannerState {
         let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true);
         let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path);
         let mut containing_repository = None;
-        if !ignore_stack.is_all() {
+        if !ignore_stack.is_abs_path_ignored(&abs_path, true) {
             if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) {
                 if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) {
                     containing_repository = Some((
@@ -2376,18 +2460,30 @@ impl BackgroundScannerState {
 
         // Remove any git repositories whose .git entry no longer exists.
         let snapshot = &mut self.snapshot;
-        let mut repositories = mem::take(&mut snapshot.git_repositories);
-        let mut repository_entries = mem::take(&mut snapshot.repository_entries);
-        repositories.retain(|work_directory_id, _| {
-            snapshot
-                .entry_for_id(*work_directory_id)
+        let mut ids_to_preserve = HashSet::default();
+        for (&work_directory_id, entry) in snapshot.git_repositories.iter() {
+            let exists_in_snapshot = snapshot
+                .entry_for_id(work_directory_id)
                 .map_or(false, |entry| {
                     snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some()
-                })
-        });
-        repository_entries.retain(|_, entry| repositories.get(&entry.work_directory.0).is_some());
-        snapshot.git_repositories = repositories;
-        snapshot.repository_entries = repository_entries;
+                });
+            if exists_in_snapshot {
+                ids_to_preserve.insert(work_directory_id);
+            } else {
+                let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
+                if snapshot.is_abs_path_excluded(&git_dir_abs_path)
+                    && !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
+                {
+                    ids_to_preserve.insert(work_directory_id);
+                }
+            }
+        }
+        snapshot
+            .git_repositories
+            .retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id));
+        snapshot
+            .repository_entries
+            .retain(|_, entry| ids_to_preserve.contains(&entry.work_directory.0));
     }
 
     fn build_git_repository(
@@ -3085,7 +3181,7 @@ impl BackgroundScanner {
                 let ignore_stack = state
                     .snapshot
                     .ignore_stack_for_abs_path(&root_abs_path, true);
-                if ignore_stack.is_all() {
+                if ignore_stack.is_abs_path_ignored(&root_abs_path, true) {
                     root_entry.is_ignored = true;
                     state.insert_entry(root_entry.clone(), self.fs.as_ref());
                 }
@@ -3222,14 +3318,22 @@ impl BackgroundScanner {
                         return false;
                     };
 
-                let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
-                    snapshot
-                        .entry_for_path(parent)
-                        .map_or(false, |entry| entry.kind == EntryKind::Dir)
-                });
-                if !parent_dir_is_loaded {
-                    log::debug!("ignoring event {relative_path:?} within unloaded directory");
-                    return false;
+                if !is_git_related(&abs_path) {
+                    let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
+                        snapshot
+                            .entry_for_path(parent)
+                            .map_or(false, |entry| entry.kind == EntryKind::Dir)
+                    });
+                    if !parent_dir_is_loaded {
+                        log::debug!("ignoring event {relative_path:?} within unloaded directory");
+                        return false;
+                    }
+                    if snapshot.is_abs_path_excluded(abs_path) {
+                        log::debug!(
+                        "ignoring FS event for path {relative_path:?} within excluded directory"
+                    );
+                        return false;
+                    }
                 }
 
                 relative_paths.push(relative_path);
@@ -3392,18 +3496,26 @@ impl BackgroundScanner {
     }
 
     async fn scan_dir(&self, job: &ScanJob) -> Result<()> {
-        log::debug!("scan directory {:?}", job.path);
-
-        let mut ignore_stack = job.ignore_stack.clone();
-        let mut new_ignore = None;
-        let (root_abs_path, root_char_bag, next_entry_id) = {
-            let snapshot = &self.state.lock().snapshot;
-            (
-                snapshot.abs_path().clone(),
-                snapshot.root_char_bag,
-                self.next_entry_id.clone(),
-            )
-        };
+        let root_abs_path;
+        let mut ignore_stack;
+        let mut new_ignore;
+        let root_char_bag;
+        let next_entry_id;
+        {
+            let state = self.state.lock();
+            let snapshot = &state.snapshot;
+            root_abs_path = snapshot.abs_path().clone();
+            if snapshot.is_abs_path_excluded(&job.abs_path) {
+                log::error!("skipping excluded directory {:?}", job.path);
+                return Ok(());
+            }
+            log::debug!("scanning directory {:?}", job.path);
+            ignore_stack = job.ignore_stack.clone();
+            new_ignore = None;
+            root_char_bag = snapshot.root_char_bag;
+            next_entry_id = self.next_entry_id.clone();
+            drop(state);
+        }
 
         let mut dotgit_path = None;
         let mut root_canonical_path = None;
@@ -3418,18 +3530,8 @@ impl BackgroundScanner {
                     continue;
                 }
             };
-
             let child_name = child_abs_path.file_name().unwrap();
             let child_path: Arc<Path> = job.path.join(child_name).into();
-            let child_metadata = match self.fs.metadata(&child_abs_path).await {
-                Ok(Some(metadata)) => metadata,
-                Ok(None) => continue,
-                Err(err) => {
-                    log::error!("error processing {:?}: {:?}", child_abs_path, err);
-                    continue;
-                }
-            };
-
             // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
             if child_name == *GITIGNORE {
                 match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
@@ -3473,6 +3575,26 @@ impl BackgroundScanner {
                 dotgit_path = Some(child_path.clone());
             }
 
+            {
+                let mut state = self.state.lock();
+                if state.snapshot.is_abs_path_excluded(&child_abs_path) {
+                    let relative_path = job.path.join(child_name);
+                    log::debug!("skipping excluded child entry {relative_path:?}");
+                    state.remove_path(&relative_path);
+                    continue;
+                }
+                drop(state);
+            }
+
+            let child_metadata = match self.fs.metadata(&child_abs_path).await {
+                Ok(Some(metadata)) => metadata,
+                Ok(None) => continue,
+                Err(err) => {
+                    log::error!("error processing {child_abs_path:?}: {err:?}");
+                    continue;
+                }
+            };
+
             let mut child_entry = Entry::new(
                 child_path.clone(),
                 &child_metadata,
@@ -3653,19 +3775,16 @@ impl BackgroundScanner {
                         self.next_entry_id.as_ref(),
                         state.snapshot.root_char_bag,
                     );
-                    fs_entry.is_ignored = ignore_stack.is_all();
+                    let is_dir = fs_entry.is_dir();
+                    fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir);
                     fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path);
 
-                    if !fs_entry.is_ignored {
-                        if !fs_entry.is_dir() {
-                            if let Some((work_dir, repo)) =
-                                state.snapshot.local_repo_for_path(&path)
-                            {
-                                if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
-                                    let repo_path = RepoPath(repo_path.into());
-                                    let repo = repo.repo_ptr.lock();
-                                    fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime);
-                                }
+                    if !is_dir && !fs_entry.is_ignored {
+                        if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) {
+                            if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
+                                let repo_path = RepoPath(repo_path.into());
+                                let repo = repo.repo_ptr.lock();
+                                fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime);
                             }
                         }
                     }
@@ -3824,8 +3943,7 @@ impl BackgroundScanner {
                     ignore_stack.clone()
                 };
 
-                // Scan any directories that were previously ignored and weren't
-                // previously scanned.
+                // Scan any directories that were previously ignored and weren't previously scanned.
                 if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() {
                     let state = self.state.lock();
                     if state.should_scan_directory(&entry) {
@@ -4001,6 +4119,12 @@ impl BackgroundScanner {
     }
 }
 
+fn is_git_related(abs_path: &Path) -> bool {
+    abs_path
+        .components()
+        .any(|c| c.as_os_str() == *DOT_GIT || c.as_os_str() == *GITIGNORE)
+}
+
 fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
     let mut result = root_char_bag;
     result.extend(

crates/project2/src/worktree_tests.rs 🔗

@@ -1,2141 +1,2310 @@
-// use crate::{
-//     worktree::{Event, Snapshot, WorktreeModelHandle},
-//     Entry, EntryKind, PathChange, Worktree,
-// };
-// use anyhow::Result;
-// use client2::Client;
-// use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
-// use git::GITIGNORE;
-// use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext};
-// use parking_lot::Mutex;
-// use postage::stream::Stream;
-// use pretty_assertions::assert_eq;
-// use rand::prelude::*;
-// use serde_json::json;
-// use std::{
-//     env,
-//     fmt::Write,
-//     mem,
-//     path::{Path, PathBuf},
-//     sync::Arc,
-// };
-// use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
-
-// #[gpui::test]
-// async fn test_traversal(cx: &mut TestAppContext) {
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/root",
-//         json!({
-//            ".gitignore": "a/b\n",
-//            "a": {
-//                "b": "",
-//                "c": "",
-//            }
-//         }),
-//     )
-//     .await;
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         Path::new("/root"),
-//         true,
-//         fs,
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.entries(false)
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 Path::new(""),
-//                 Path::new(".gitignore"),
-//                 Path::new("a"),
-//                 Path::new("a/c"),
-//             ]
-//         );
-//         assert_eq!(
-//             tree.entries(true)
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 Path::new(""),
-//                 Path::new(".gitignore"),
-//                 Path::new("a"),
-//                 Path::new("a/b"),
-//                 Path::new("a/c"),
-//             ]
-//         );
-//     })
-// }
-
-// #[gpui::test]
-// async fn test_descendent_entries(cx: &mut TestAppContext) {
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/root",
-//         json!({
-//             "a": "",
-//             "b": {
-//                "c": {
-//                    "d": ""
-//                },
-//                "e": {}
-//             },
-//             "f": "",
-//             "g": {
-//                 "h": {}
-//             },
-//             "i": {
-//                 "j": {
-//                     "k": ""
-//                 },
-//                 "l": {
-
-//                 }
-//             },
-//             ".gitignore": "i/j\n",
-//         }),
-//     )
-//     .await;
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         Path::new("/root"),
-//         true,
-//         fs,
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.descendent_entries(false, false, Path::new("b"))
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             vec![Path::new("b/c/d"),]
-//         );
-//         assert_eq!(
-//             tree.descendent_entries(true, false, Path::new("b"))
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 Path::new("b"),
-//                 Path::new("b/c"),
-//                 Path::new("b/c/d"),
-//                 Path::new("b/e"),
-//             ]
-//         );
-
-//         assert_eq!(
-//             tree.descendent_entries(false, false, Path::new("g"))
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             Vec::<PathBuf>::new()
-//         );
-//         assert_eq!(
-//             tree.descendent_entries(true, false, Path::new("g"))
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             vec![Path::new("g"), Path::new("g/h"),]
-//         );
-//     });
-
-//     // Expand gitignored directory.
-//     tree.read_with(cx, |tree, _| {
-//         tree.as_local()
-//             .unwrap()
-//             .refresh_entries_for_paths(vec![Path::new("i/j").into()])
-//     })
-//     .recv()
-//     .await;
-
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.descendent_entries(false, false, Path::new("i"))
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             Vec::<PathBuf>::new()
-//         );
-//         assert_eq!(
-//             tree.descendent_entries(false, true, Path::new("i"))
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             vec![Path::new("i/j/k")]
-//         );
-//         assert_eq!(
-//             tree.descendent_entries(true, false, Path::new("i"))
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             vec![Path::new("i"), Path::new("i/l"),]
-//         );
-//     })
-// }
-
-// #[gpui::test(iterations = 10)]
-// async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/root",
-//         json!({
-//             "lib": {
-//                 "a": {
-//                     "a.txt": ""
-//                 },
-//                 "b": {
-//                     "b.txt": ""
-//                 }
-//             }
-//         }),
-//     )
-//     .await;
-//     fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
-//     fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         Path::new("/root"),
-//         true,
-//         fs.clone(),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.entries(false)
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 Path::new(""),
-//                 Path::new("lib"),
-//                 Path::new("lib/a"),
-//                 Path::new("lib/a/a.txt"),
-//                 Path::new("lib/a/lib"),
-//                 Path::new("lib/b"),
-//                 Path::new("lib/b/b.txt"),
-//                 Path::new("lib/b/lib"),
-//             ]
-//         );
-//     });
-
-//     fs.rename(
-//         Path::new("/root/lib/a/lib"),
-//         Path::new("/root/lib/a/lib-2"),
-//         Default::default(),
-//     )
-//     .await
-//     .unwrap();
-//     executor.run_until_parked();
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.entries(false)
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 Path::new(""),
-//                 Path::new("lib"),
-//                 Path::new("lib/a"),
-//                 Path::new("lib/a/a.txt"),
-//                 Path::new("lib/a/lib-2"),
-//                 Path::new("lib/b"),
-//                 Path::new("lib/b/b.txt"),
-//                 Path::new("lib/b/lib"),
-//             ]
-//         );
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/root",
-//         json!({
-//             "dir1": {
-//                 "deps": {
-//                     // symlinks here
-//                 },
-//                 "src": {
-//                     "a.rs": "",
-//                     "b.rs": "",
-//                 },
-//             },
-//             "dir2": {
-//                 "src": {
-//                     "c.rs": "",
-//                     "d.rs": "",
-//                 }
-//             },
-//             "dir3": {
-//                 "deps": {},
-//                 "src": {
-//                     "e.rs": "",
-//                     "f.rs": "",
-//                 },
-//             }
-//         }),
-//     )
-//     .await;
-
-//     // These symlinks point to directories outside of the worktree's root, dir1.
-//     fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
-//         .await;
-//     fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
-//         .await;
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         Path::new("/root/dir1"),
-//         true,
-//         fs.clone(),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-
-//     let tree_updates = Arc::new(Mutex::new(Vec::new()));
-//     tree.update(cx, |_, cx| {
-//         let tree_updates = tree_updates.clone();
-//         cx.subscribe(&tree, move |_, _, event, _| {
-//             if let Event::UpdatedEntries(update) = event {
-//                 tree_updates.lock().extend(
-//                     update
-//                         .iter()
-//                         .map(|(path, _, change)| (path.clone(), *change)),
-//                 );
-//             }
-//         })
-//         .detach();
-//     });
-
-//     // The symlinked directories are not scanned by default.
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.entries(true)
-//                 .map(|entry| (entry.path.as_ref(), entry.is_external))
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 (Path::new(""), false),
-//                 (Path::new("deps"), false),
-//                 (Path::new("deps/dep-dir2"), true),
-//                 (Path::new("deps/dep-dir3"), true),
-//                 (Path::new("src"), false),
-//                 (Path::new("src/a.rs"), false),
-//                 (Path::new("src/b.rs"), false),
-//             ]
-//         );
-
-//         assert_eq!(
-//             tree.entry_for_path("deps/dep-dir2").unwrap().kind,
-//             EntryKind::UnloadedDir
-//         );
-//     });
-
-//     // Expand one of the symlinked directories.
-//     tree.read_with(cx, |tree, _| {
-//         tree.as_local()
-//             .unwrap()
-//             .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
-//     })
-//     .recv()
-//     .await;
-
-//     // The expanded directory's contents are loaded. Subdirectories are
-//     // not scanned yet.
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.entries(true)
-//                 .map(|entry| (entry.path.as_ref(), entry.is_external))
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 (Path::new(""), false),
-//                 (Path::new("deps"), false),
-//                 (Path::new("deps/dep-dir2"), true),
-//                 (Path::new("deps/dep-dir3"), true),
-//                 (Path::new("deps/dep-dir3/deps"), true),
-//                 (Path::new("deps/dep-dir3/src"), true),
-//                 (Path::new("src"), false),
-//                 (Path::new("src/a.rs"), false),
-//                 (Path::new("src/b.rs"), false),
-//             ]
-//         );
-//     });
-//     assert_eq!(
-//         mem::take(&mut *tree_updates.lock()),
-//         &[
-//             (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
-//             (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
-//             (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
-//         ]
-//     );
-
-//     // Expand a subdirectory of one of the symlinked directories.
-//     tree.read_with(cx, |tree, _| {
-//         tree.as_local()
-//             .unwrap()
-//             .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
-//     })
-//     .recv()
-//     .await;
-
-//     // The expanded subdirectory's contents are loaded.
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.entries(true)
-//                 .map(|entry| (entry.path.as_ref(), entry.is_external))
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 (Path::new(""), false),
-//                 (Path::new("deps"), false),
-//                 (Path::new("deps/dep-dir2"), true),
-//                 (Path::new("deps/dep-dir3"), true),
-//                 (Path::new("deps/dep-dir3/deps"), true),
-//                 (Path::new("deps/dep-dir3/src"), true),
-//                 (Path::new("deps/dep-dir3/src/e.rs"), true),
-//                 (Path::new("deps/dep-dir3/src/f.rs"), true),
-//                 (Path::new("src"), false),
-//                 (Path::new("src/a.rs"), false),
-//                 (Path::new("src/b.rs"), false),
-//             ]
-//         );
-//     });
-
-//     assert_eq!(
-//         mem::take(&mut *tree_updates.lock()),
-//         &[
-//             (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
-//             (
-//                 Path::new("deps/dep-dir3/src/e.rs").into(),
-//                 PathChange::Loaded
-//             ),
-//             (
-//                 Path::new("deps/dep-dir3/src/f.rs").into(),
-//                 PathChange::Loaded
-//             )
-//         ]
-//     );
-// }
-
-// #[gpui::test]
-// async fn test_open_gitignored_files(cx: &mut TestAppContext) {
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/root",
-//         json!({
-//             ".gitignore": "node_modules\n",
-//             "one": {
-//                 "node_modules": {
-//                     "a": {
-//                         "a1.js": "a1",
-//                         "a2.js": "a2",
-//                     },
-//                     "b": {
-//                         "b1.js": "b1",
-//                         "b2.js": "b2",
-//                     },
-//                     "c": {
-//                         "c1.js": "c1",
-//                         "c2.js": "c2",
-//                     }
-//                 },
-//             },
-//             "two": {
-//                 "x.js": "",
-//                 "y.js": "",
-//             },
-//         }),
-//     )
-//     .await;
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         Path::new("/root"),
-//         true,
-//         fs.clone(),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.entries(true)
-//                 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 (Path::new(""), false),
-//                 (Path::new(".gitignore"), false),
-//                 (Path::new("one"), false),
-//                 (Path::new("one/node_modules"), true),
-//                 (Path::new("two"), false),
-//                 (Path::new("two/x.js"), false),
-//                 (Path::new("two/y.js"), false),
-//             ]
-//         );
-//     });
-
-//     // Open a file that is nested inside of a gitignored directory that
-//     // has not yet been expanded.
-//     let prev_read_dir_count = fs.read_dir_call_count();
-//     let buffer = tree
-//         .update(cx, |tree, cx| {
-//             tree.as_local_mut()
-//                 .unwrap()
-//                 .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx)
-//         })
-//         .await
-//         .unwrap();
-
-//     tree.read_with(cx, |tree, cx| {
-//         assert_eq!(
-//             tree.entries(true)
-//                 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 (Path::new(""), false),
-//                 (Path::new(".gitignore"), false),
-//                 (Path::new("one"), false),
-//                 (Path::new("one/node_modules"), true),
-//                 (Path::new("one/node_modules/a"), true),
-//                 (Path::new("one/node_modules/b"), true),
-//                 (Path::new("one/node_modules/b/b1.js"), true),
-//                 (Path::new("one/node_modules/b/b2.js"), true),
-//                 (Path::new("one/node_modules/c"), true),
-//                 (Path::new("two"), false),
-//                 (Path::new("two/x.js"), false),
-//                 (Path::new("two/y.js"), false),
-//             ]
-//         );
-
-//         assert_eq!(
-//             buffer.read(cx).file().unwrap().path().as_ref(),
-//             Path::new("one/node_modules/b/b1.js")
-//         );
-
-//         // Only the newly-expanded directories are scanned.
-//         assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
-//     });
-
-//     // Open another file in a different subdirectory of the same
-//     // gitignored directory.
-//     let prev_read_dir_count = fs.read_dir_call_count();
-//     let buffer = tree
-//         .update(cx, |tree, cx| {
-//             tree.as_local_mut()
-//                 .unwrap()
-//                 .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx)
-//         })
-//         .await
-//         .unwrap();
-
-//     tree.read_with(cx, |tree, cx| {
-//         assert_eq!(
-//             tree.entries(true)
-//                 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 (Path::new(""), false),
-//                 (Path::new(".gitignore"), false),
-//                 (Path::new("one"), false),
-//                 (Path::new("one/node_modules"), true),
-//                 (Path::new("one/node_modules/a"), true),
-//                 (Path::new("one/node_modules/a/a1.js"), true),
-//                 (Path::new("one/node_modules/a/a2.js"), true),
-//                 (Path::new("one/node_modules/b"), true),
-//                 (Path::new("one/node_modules/b/b1.js"), true),
-//                 (Path::new("one/node_modules/b/b2.js"), true),
-//                 (Path::new("one/node_modules/c"), true),
-//                 (Path::new("two"), false),
-//                 (Path::new("two/x.js"), false),
-//                 (Path::new("two/y.js"), false),
-//             ]
-//         );
-
-//         assert_eq!(
-//             buffer.read(cx).file().unwrap().path().as_ref(),
-//             Path::new("one/node_modules/a/a2.js")
-//         );
-
-//         // Only the newly-expanded directory is scanned.
-//         assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
-//     });
-
-//     // No work happens when files and directories change within an unloaded directory.
-//     let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
-//     fs.create_dir("/root/one/node_modules/c/lib".as_ref())
-//         .await
-//         .unwrap();
-//     cx.foreground().run_until_parked();
-//     assert_eq!(
-//         fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
-//         0
-//     );
-// }
-
-// #[gpui::test]
-// async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/root",
-//         json!({
-//             ".gitignore": "node_modules\n",
-//             "a": {
-//                 "a.js": "",
-//             },
-//             "b": {
-//                 "b.js": "",
-//             },
-//             "node_modules": {
-//                 "c": {
-//                     "c.js": "",
-//                 },
-//                 "d": {
-//                     "d.js": "",
-//                     "e": {
-//                         "e1.js": "",
-//                         "e2.js": "",
-//                     },
-//                     "f": {
-//                         "f1.js": "",
-//                         "f2.js": "",
-//                     }
-//                 },
-//             },
-//         }),
-//     )
-//     .await;
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         Path::new("/root"),
-//         true,
-//         fs.clone(),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-
-//     // Open a file within the gitignored directory, forcing some of its
-//     // subdirectories to be read, but not all.
-//     let read_dir_count_1 = fs.read_dir_call_count();
-//     tree.read_with(cx, |tree, _| {
-//         tree.as_local()
-//             .unwrap()
-//             .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
-//     })
-//     .recv()
-//     .await;
-
-//     // Those subdirectories are now loaded.
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.entries(true)
-//                 .map(|e| (e.path.as_ref(), e.is_ignored))
-//                 .collect::<Vec<_>>(),
-//             &[
-//                 (Path::new(""), false),
-//                 (Path::new(".gitignore"), false),
-//                 (Path::new("a"), false),
-//                 (Path::new("a/a.js"), false),
-//                 (Path::new("b"), false),
-//                 (Path::new("b/b.js"), false),
-//                 (Path::new("node_modules"), true),
-//                 (Path::new("node_modules/c"), true),
-//                 (Path::new("node_modules/d"), true),
-//                 (Path::new("node_modules/d/d.js"), true),
-//                 (Path::new("node_modules/d/e"), true),
-//                 (Path::new("node_modules/d/f"), true),
-//             ]
-//         );
-//     });
-//     let read_dir_count_2 = fs.read_dir_call_count();
-//     assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
-
-//     // Update the gitignore so that node_modules is no longer ignored,
-//     // but a subdirectory is ignored
-//     fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
-//         .await
-//         .unwrap();
-//     cx.foreground().run_until_parked();
-
-//     // All of the directories that are no longer ignored are now loaded.
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.entries(true)
-//                 .map(|e| (e.path.as_ref(), e.is_ignored))
-//                 .collect::<Vec<_>>(),
-//             &[
-//                 (Path::new(""), false),
-//                 (Path::new(".gitignore"), false),
-//                 (Path::new("a"), false),
-//                 (Path::new("a/a.js"), false),
-//                 (Path::new("b"), false),
-//                 (Path::new("b/b.js"), false),
-//                 // This directory is no longer ignored
-//                 (Path::new("node_modules"), false),
-//                 (Path::new("node_modules/c"), false),
-//                 (Path::new("node_modules/c/c.js"), false),
-//                 (Path::new("node_modules/d"), false),
-//                 (Path::new("node_modules/d/d.js"), false),
-//                 // This subdirectory is now ignored
-//                 (Path::new("node_modules/d/e"), true),
-//                 (Path::new("node_modules/d/f"), false),
-//                 (Path::new("node_modules/d/f/f1.js"), false),
-//                 (Path::new("node_modules/d/f/f2.js"), false),
-//             ]
-//         );
-//     });
-
-//     // Each of the newly-loaded directories is scanned only once.
-//     let read_dir_count_3 = fs.read_dir_call_count();
-//     assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
-// }
-
-// #[gpui::test(iterations = 10)]
-// async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/root",
-//         json!({
-//             ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
-//             "tree": {
-//                 ".git": {},
-//                 ".gitignore": "ignored-dir\n",
-//                 "tracked-dir": {
-//                     "tracked-file1": "",
-//                     "ancestor-ignored-file1": "",
-//                 },
-//                 "ignored-dir": {
-//                     "ignored-file1": ""
-//                 }
-//             }
-//         }),
-//     )
-//     .await;
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         "/root/tree".as_ref(),
-//         true,
-//         fs.clone(),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-
-//     tree.read_with(cx, |tree, _| {
-//         tree.as_local()
-//             .unwrap()
-//             .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
-//     })
-//     .recv()
-//     .await;
-
-//     cx.read(|cx| {
-//         let tree = tree.read(cx);
-//         assert!(
-//             !tree
-//                 .entry_for_path("tracked-dir/tracked-file1")
-//                 .unwrap()
-//                 .is_ignored
-//         );
-//         assert!(
-//             tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
-//                 .unwrap()
-//                 .is_ignored
-//         );
-//         assert!(
-//             tree.entry_for_path("ignored-dir/ignored-file1")
-//                 .unwrap()
-//                 .is_ignored
-//         );
-//     });
-
-//     fs.create_file(
-//         "/root/tree/tracked-dir/tracked-file2".as_ref(),
-//         Default::default(),
-//     )
-//     .await
-//     .unwrap();
-//     fs.create_file(
-//         "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
-//         Default::default(),
-//     )
-//     .await
-//     .unwrap();
-//     fs.create_file(
-//         "/root/tree/ignored-dir/ignored-file2".as_ref(),
-//         Default::default(),
-//     )
-//     .await
-//     .unwrap();
-
-//     cx.foreground().run_until_parked();
-//     cx.read(|cx| {
-//         let tree = tree.read(cx);
-//         assert!(
-//             !tree
-//                 .entry_for_path("tracked-dir/tracked-file2")
-//                 .unwrap()
-//                 .is_ignored
-//         );
-//         assert!(
-//             tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
-//                 .unwrap()
-//                 .is_ignored
-//         );
-//         assert!(
-//             tree.entry_for_path("ignored-dir/ignored-file2")
-//                 .unwrap()
-//                 .is_ignored
-//         );
-//         assert!(tree.entry_for_path(".git").unwrap().is_ignored);
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_write_file(cx: &mut TestAppContext) {
-//     let dir = temp_tree(json!({
-//         ".git": {},
-//         ".gitignore": "ignored-dir\n",
-//         "tracked-dir": {},
-//         "ignored-dir": {}
-//     }));
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         dir.path(),
-//         true,
-//         Arc::new(RealFs),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-//     tree.flush_fs_events(cx).await;
-
-//     tree.update(cx, |tree, cx| {
-//         tree.as_local().unwrap().write_file(
-//             Path::new("tracked-dir/file.txt"),
-//             "hello".into(),
-//             Default::default(),
-//             cx,
-//         )
-//     })
-//     .await
-//     .unwrap();
-//     tree.update(cx, |tree, cx| {
-//         tree.as_local().unwrap().write_file(
-//             Path::new("ignored-dir/file.txt"),
-//             "world".into(),
-//             Default::default(),
-//             cx,
-//         )
-//     })
-//     .await
-//     .unwrap();
-
-//     tree.read_with(cx, |tree, _| {
-//         let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
-//         let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
-//         assert!(!tracked.is_ignored);
-//         assert!(ignored.is_ignored);
-//     });
-// }
-
-// #[gpui::test(iterations = 30)]
-// async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/root",
-//         json!({
-//             "b": {},
-//             "c": {},
-//             "d": {},
-//         }),
-//     )
-//     .await;
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         "/root".as_ref(),
-//         true,
-//         fs,
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     let snapshot1 = tree.update(cx, |tree, cx| {
-//         let tree = tree.as_local_mut().unwrap();
-//         let snapshot = Arc::new(Mutex::new(tree.snapshot()));
-//         let _ = tree.observe_updates(0, cx, {
-//             let snapshot = snapshot.clone();
-//             move |update| {
-//                 snapshot.lock().apply_remote_update(update).unwrap();
-//                 async { true }
-//             }
-//         });
-//         snapshot
-//     });
-
-//     let entry = tree
-//         .update(cx, |tree, cx| {
-//             tree.as_local_mut()
-//                 .unwrap()
-//                 .create_entry("a/e".as_ref(), true, cx)
-//         })
-//         .await
-//         .unwrap();
-//     assert!(entry.is_dir());
-
-//     cx.foreground().run_until_parked();
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
-//     });
-
-//     let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
-//     assert_eq!(
-//         snapshot1.lock().entries(true).collect::<Vec<_>>(),
-//         snapshot2.entries(true).collect::<Vec<_>>()
-//     );
-// }
-
-// #[gpui::test]
-// async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
-//     let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
-
-//     let fs_fake = FakeFs::new(cx.background());
-//     fs_fake
-//         .insert_tree(
-//             "/root",
-//             json!({
-//                 "a": {},
-//             }),
-//         )
-//         .await;
-
-//     let tree_fake = Worktree::local(
-//         client_fake,
-//         "/root".as_ref(),
-//         true,
-//         fs_fake,
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     let entry = tree_fake
-//         .update(cx, |tree, cx| {
-//             tree.as_local_mut()
-//                 .unwrap()
-//                 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
-//         })
-//         .await
-//         .unwrap();
-//     assert!(entry.is_file());
-
-//     cx.foreground().run_until_parked();
-//     tree_fake.read_with(cx, |tree, _| {
-//         assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
-//         assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
-//         assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
-//     });
-
-//     let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
-
-//     let fs_real = Arc::new(RealFs);
-//     let temp_root = temp_tree(json!({
-//         "a": {}
-//     }));
-
-//     let tree_real = Worktree::local(
-//         client_real,
-//         temp_root.path(),
-//         true,
-//         fs_real,
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     let entry = tree_real
-//         .update(cx, |tree, cx| {
-//             tree.as_local_mut()
-//                 .unwrap()
-//                 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
-//         })
-//         .await
-//         .unwrap();
-//     assert!(entry.is_file());
-
-//     cx.foreground().run_until_parked();
-//     tree_real.read_with(cx, |tree, _| {
-//         assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
-//         assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
-//         assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
-//     });
-
-//     // Test smallest change
-//     let entry = tree_real
-//         .update(cx, |tree, cx| {
-//             tree.as_local_mut()
-//                 .unwrap()
-//                 .create_entry("a/b/c/e.txt".as_ref(), false, cx)
-//         })
-//         .await
-//         .unwrap();
-//     assert!(entry.is_file());
-
-//     cx.foreground().run_until_parked();
-//     tree_real.read_with(cx, |tree, _| {
-//         assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
-//     });
-
-//     // Test largest change
-//     let entry = tree_real
-//         .update(cx, |tree, cx| {
-//             tree.as_local_mut()
-//                 .unwrap()
-//                 .create_entry("d/e/f/g.txt".as_ref(), false, cx)
-//         })
-//         .await
-//         .unwrap();
-//     assert!(entry.is_file());
-
-//     cx.foreground().run_until_parked();
-//     tree_real.read_with(cx, |tree, _| {
-//         assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
-//         assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
-//         assert!(tree.entry_for_path("d/e/").unwrap().is_dir());
-//         assert!(tree.entry_for_path("d/").unwrap().is_dir());
-//     });
-// }
-
-// #[gpui::test(iterations = 100)]
-// async fn test_random_worktree_operations_during_initial_scan(
-//     cx: &mut TestAppContext,
-//     mut rng: StdRng,
-// ) {
-//     let operations = env::var("OPERATIONS")
-//         .map(|o| o.parse().unwrap())
-//         .unwrap_or(5);
-//     let initial_entries = env::var("INITIAL_ENTRIES")
-//         .map(|o| o.parse().unwrap())
-//         .unwrap_or(20);
-
-//     let root_dir = Path::new("/test");
-//     let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
-//     fs.as_fake().insert_tree(root_dir, json!({})).await;
-//     for _ in 0..initial_entries {
-//         randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
-//     }
-//     log::info!("generated initial tree");
-
-//     let worktree = Worktree::local(
-//         build_client(cx),
-//         root_dir,
-//         true,
-//         fs.clone(),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
-//     let updates = Arc::new(Mutex::new(Vec::new()));
-//     worktree.update(cx, |tree, cx| {
-//         check_worktree_change_events(tree, cx);
-
-//         let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
-//             let updates = updates.clone();
-//             move |update| {
-//                 updates.lock().push(update);
-//                 async { true }
-//             }
-//         });
-//     });
-
-//     for _ in 0..operations {
-//         worktree
-//             .update(cx, |worktree, cx| {
-//                 randomly_mutate_worktree(worktree, &mut rng, cx)
-//             })
-//             .await
-//             .log_err();
-//         worktree.read_with(cx, |tree, _| {
-//             tree.as_local().unwrap().snapshot().check_invariants(true)
-//         });
-
-//         if rng.gen_bool(0.6) {
-//             snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
-//         }
-//     }
-
-//     worktree
-//         .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
-//         .await;
-
-//     cx.foreground().run_until_parked();
-
-//     let final_snapshot = worktree.read_with(cx, |tree, _| {
-//         let tree = tree.as_local().unwrap();
-//         let snapshot = tree.snapshot();
-//         snapshot.check_invariants(true);
-//         snapshot
-//     });
-
-//     for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
-//         let mut updated_snapshot = snapshot.clone();
-//         for update in updates.lock().iter() {
-//             if update.scan_id >= updated_snapshot.scan_id() as u64 {
-//                 updated_snapshot
-//                     .apply_remote_update(update.clone())
-//                     .unwrap();
-//             }
-//         }
-
-//         assert_eq!(
-//             updated_snapshot.entries(true).collect::<Vec<_>>(),
-//             final_snapshot.entries(true).collect::<Vec<_>>(),
-//             "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
-//         );
-//     }
-// }
-
-// #[gpui::test(iterations = 100)]
-// async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
-//     let operations = env::var("OPERATIONS")
-//         .map(|o| o.parse().unwrap())
-//         .unwrap_or(40);
-//     let initial_entries = env::var("INITIAL_ENTRIES")
-//         .map(|o| o.parse().unwrap())
-//         .unwrap_or(20);
-
-//     let root_dir = Path::new("/test");
-//     let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
-//     fs.as_fake().insert_tree(root_dir, json!({})).await;
-//     for _ in 0..initial_entries {
-//         randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
-//     }
-//     log::info!("generated initial tree");
-
-//     let worktree = Worktree::local(
-//         build_client(cx),
-//         root_dir,
-//         true,
-//         fs.clone(),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     let updates = Arc::new(Mutex::new(Vec::new()));
-//     worktree.update(cx, |tree, cx| {
-//         check_worktree_change_events(tree, cx);
-
-//         let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
-//             let updates = updates.clone();
-//             move |update| {
-//                 updates.lock().push(update);
-//                 async { true }
-//             }
-//         });
-//     });
-
-//     worktree
-//         .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
-//         .await;
-
-//     fs.as_fake().pause_events();
-//     let mut snapshots = Vec::new();
-//     let mut mutations_len = operations;
-//     while mutations_len > 1 {
-//         if rng.gen_bool(0.2) {
-//             worktree
-//                 .update(cx, |worktree, cx| {
-//                     randomly_mutate_worktree(worktree, &mut rng, cx)
-//                 })
-//                 .await
-//                 .log_err();
-//         } else {
-//             randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
-//         }
-
-//         let buffered_event_count = fs.as_fake().buffered_event_count();
-//         if buffered_event_count > 0 && rng.gen_bool(0.3) {
-//             let len = rng.gen_range(0..=buffered_event_count);
-//             log::info!("flushing {} events", len);
-//             fs.as_fake().flush_events(len);
-//         } else {
-//             randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
-//             mutations_len -= 1;
-//         }
-
-//         cx.foreground().run_until_parked();
-//         if rng.gen_bool(0.2) {
-//             log::info!("storing snapshot {}", snapshots.len());
-//             let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
-//             snapshots.push(snapshot);
-//         }
-//     }
-
-//     log::info!("quiescing");
-//     fs.as_fake().flush_events(usize::MAX);
-//     cx.foreground().run_until_parked();
-
-//     let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
-//     snapshot.check_invariants(true);
-//     let expanded_paths = snapshot
-//         .expanded_entries()
-//         .map(|e| e.path.clone())
-//         .collect::<Vec<_>>();
-
-//     {
-//         let new_worktree = Worktree::local(
-//             build_client(cx),
-//             root_dir,
-//             true,
-//             fs.clone(),
-//             Default::default(),
-//             &mut cx.to_async(),
-//         )
-//         .await
-//         .unwrap();
-//         new_worktree
-//             .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
-//             .await;
-//         new_worktree
-//             .update(cx, |tree, _| {
-//                 tree.as_local_mut()
-//                     .unwrap()
-//                     .refresh_entries_for_paths(expanded_paths)
-//             })
-//             .recv()
-//             .await;
-//         let new_snapshot =
-//             new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
-//         assert_eq!(
-//             snapshot.entries_without_ids(true),
-//             new_snapshot.entries_without_ids(true)
-//         );
-//     }
-
-//     for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
-//         for update in updates.lock().iter() {
-//             if update.scan_id >= prev_snapshot.scan_id() as u64 {
-//                 prev_snapshot.apply_remote_update(update.clone()).unwrap();
-//             }
-//         }
-
-//         assert_eq!(
-//             prev_snapshot
-//                 .entries(true)
-//                 .map(ignore_pending_dir)
-//                 .collect::<Vec<_>>(),
-//             snapshot
-//                 .entries(true)
-//                 .map(ignore_pending_dir)
-//                 .collect::<Vec<_>>(),
-//             "wrong updates after snapshot {i}: {updates:#?}",
-//         );
-//     }
-
-//     fn ignore_pending_dir(entry: &Entry) -> Entry {
-//         let mut entry = entry.clone();
-//         if entry.kind.is_dir() {
-//             entry.kind = EntryKind::Dir
-//         }
-//         entry
-//     }
-// }
-
-// // The worktree's `UpdatedEntries` event can be used to follow along with
-// // all changes to the worktree's snapshot.
-// fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
-//     let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
-//     cx.subscribe(&cx.handle(), move |tree, _, event, _| {
-//         if let Event::UpdatedEntries(changes) = event {
-//             for (path, _, change_type) in changes.iter() {
-//                 let entry = tree.entry_for_path(&path).cloned();
-//                 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
-//                     Ok(ix) | Err(ix) => ix,
-//                 };
-//                 match change_type {
-//                     PathChange::Added => entries.insert(ix, entry.unwrap()),
-//                     PathChange::Removed => drop(entries.remove(ix)),
-//                     PathChange::Updated => {
-//                         let entry = entry.unwrap();
-//                         let existing_entry = entries.get_mut(ix).unwrap();
-//                         assert_eq!(existing_entry.path, entry.path);
-//                         *existing_entry = entry;
-//                     }
-//                     PathChange::AddedOrUpdated | PathChange::Loaded => {
-//                         let entry = entry.unwrap();
-//                         if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
-//                             *entries.get_mut(ix).unwrap() = entry;
-//                         } else {
-//                             entries.insert(ix, entry);
-//                         }
-//                     }
-//                 }
-//             }
-
-//             let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
-//             assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
-//         }
-//     })
-//     .detach();
-// }
-
-// fn randomly_mutate_worktree(
-//     worktree: &mut Worktree,
-//     rng: &mut impl Rng,
-//     cx: &mut ModelContext<Worktree>,
-// ) -> Task<Result<()>> {
-//     log::info!("mutating worktree");
-//     let worktree = worktree.as_local_mut().unwrap();
-//     let snapshot = worktree.snapshot();
-//     let entry = snapshot.entries(false).choose(rng).unwrap();
-
-//     match rng.gen_range(0_u32..100) {
-//         0..=33 if entry.path.as_ref() != Path::new("") => {
-//             log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
-//             worktree.delete_entry(entry.id, cx).unwrap()
-//         }
-//         ..=66 if entry.path.as_ref() != Path::new("") => {
-//             let other_entry = snapshot.entries(false).choose(rng).unwrap();
-//             let new_parent_path = if other_entry.is_dir() {
-//                 other_entry.path.clone()
-//             } else {
-//                 other_entry.path.parent().unwrap().into()
-//             };
-//             let mut new_path = new_parent_path.join(random_filename(rng));
-//             if new_path.starts_with(&entry.path) {
-//                 new_path = random_filename(rng).into();
-//             }
-
-//             log::info!(
-//                 "renaming entry {:?} ({}) to {:?}",
-//                 entry.path,
-//                 entry.id.0,
-//                 new_path
-//             );
-//             let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
-//             cx.foreground().spawn(async move {
-//                 task.await?;
-//                 Ok(())
-//             })
-//         }
-//         _ => {
-//             let task = if entry.is_dir() {
-//                 let child_path = entry.path.join(random_filename(rng));
-//                 let is_dir = rng.gen_bool(0.3);
-//                 log::info!(
-//                     "creating {} at {:?}",
-//                     if is_dir { "dir" } else { "file" },
-//                     child_path,
-//                 );
-//                 worktree.create_entry(child_path, is_dir, cx)
-//             } else {
-//                 log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
-//                 worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
-//             };
-//             cx.foreground().spawn(async move {
-//                 task.await?;
-//                 Ok(())
-//             })
-//         }
-//     }
-// }
-
-// async fn randomly_mutate_fs(
-//     fs: &Arc<dyn Fs>,
-//     root_path: &Path,
-//     insertion_probability: f64,
-//     rng: &mut impl Rng,
-// ) {
-//     log::info!("mutating fs");
-//     let mut files = Vec::new();
-//     let mut dirs = Vec::new();
-//     for path in fs.as_fake().paths(false) {
-//         if path.starts_with(root_path) {
-//             if fs.is_file(&path).await {
-//                 files.push(path);
-//             } else {
-//                 dirs.push(path);
-//             }
-//         }
-//     }
-
-//     if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
-//         let path = dirs.choose(rng).unwrap();
-//         let new_path = path.join(random_filename(rng));
-
-//         if rng.gen() {
-//             log::info!(
-//                 "creating dir {:?}",
-//                 new_path.strip_prefix(root_path).unwrap()
-//             );
-//             fs.create_dir(&new_path).await.unwrap();
-//         } else {
-//             log::info!(
-//                 "creating file {:?}",
-//                 new_path.strip_prefix(root_path).unwrap()
-//             );
-//             fs.create_file(&new_path, Default::default()).await.unwrap();
-//         }
-//     } else if rng.gen_bool(0.05) {
-//         let ignore_dir_path = dirs.choose(rng).unwrap();
-//         let ignore_path = ignore_dir_path.join(&*GITIGNORE);
-
-//         let subdirs = dirs
-//             .iter()
-//             .filter(|d| d.starts_with(&ignore_dir_path))
-//             .cloned()
-//             .collect::<Vec<_>>();
-//         let subfiles = files
-//             .iter()
-//             .filter(|d| d.starts_with(&ignore_dir_path))
-//             .cloned()
-//             .collect::<Vec<_>>();
-//         let files_to_ignore = {
-//             let len = rng.gen_range(0..=subfiles.len());
-//             subfiles.choose_multiple(rng, len)
-//         };
-//         let dirs_to_ignore = {
-//             let len = rng.gen_range(0..subdirs.len());
-//             subdirs.choose_multiple(rng, len)
-//         };
-
-//         let mut ignore_contents = String::new();
-//         for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
-//             writeln!(
-//                 ignore_contents,
-//                 "{}",
-//                 path_to_ignore
-//                     .strip_prefix(&ignore_dir_path)
-//                     .unwrap()
-//                     .to_str()
-//                     .unwrap()
-//             )
-//             .unwrap();
-//         }
-//         log::info!(
-//             "creating gitignore {:?} with contents:\n{}",
-//             ignore_path.strip_prefix(&root_path).unwrap(),
-//             ignore_contents
-//         );
-//         fs.save(
-//             &ignore_path,
-//             &ignore_contents.as_str().into(),
-//             Default::default(),
-//         )
-//         .await
-//         .unwrap();
-//     } else {
-//         let old_path = {
-//             let file_path = files.choose(rng);
-//             let dir_path = dirs[1..].choose(rng);
-//             file_path.into_iter().chain(dir_path).choose(rng).unwrap()
-//         };
-
-//         let is_rename = rng.gen();
-//         if is_rename {
-//             let new_path_parent = dirs
-//                 .iter()
-//                 .filter(|d| !d.starts_with(old_path))
-//                 .choose(rng)
-//                 .unwrap();
-
-//             let overwrite_existing_dir =
-//                 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
-//             let new_path = if overwrite_existing_dir {
-//                 fs.remove_dir(
-//                     &new_path_parent,
-//                     RemoveOptions {
-//                         recursive: true,
-//                         ignore_if_not_exists: true,
-//                     },
-//                 )
-//                 .await
-//                 .unwrap();
-//                 new_path_parent.to_path_buf()
-//             } else {
-//                 new_path_parent.join(random_filename(rng))
-//             };
-
-//             log::info!(
-//                 "renaming {:?} to {}{:?}",
-//                 old_path.strip_prefix(&root_path).unwrap(),
-//                 if overwrite_existing_dir {
-//                     "overwrite "
-//                 } else {
-//                     ""
-//                 },
-//                 new_path.strip_prefix(&root_path).unwrap()
-//             );
-//             fs.rename(
-//                 &old_path,
-//                 &new_path,
-//                 fs::RenameOptions {
-//                     overwrite: true,
-//                     ignore_if_exists: true,
-//                 },
-//             )
-//             .await
-//             .unwrap();
-//         } else if fs.is_file(&old_path).await {
-//             log::info!(
-//                 "deleting file {:?}",
-//                 old_path.strip_prefix(&root_path).unwrap()
-//             );
-//             fs.remove_file(old_path, Default::default()).await.unwrap();
-//         } else {
-//             log::info!(
-//                 "deleting dir {:?}",
-//                 old_path.strip_prefix(&root_path).unwrap()
-//             );
-//             fs.remove_dir(
-//                 &old_path,
-//                 RemoveOptions {
-//                     recursive: true,
-//                     ignore_if_not_exists: true,
-//                 },
-//             )
-//             .await
-//             .unwrap();
-//         }
-//     }
-// }
-
-// fn random_filename(rng: &mut impl Rng) -> String {
-//     (0..6)
-//         .map(|_| rng.sample(rand::distributions::Alphanumeric))
-//         .map(char::from)
-//         .collect()
-// }
-
-// #[gpui::test]
-// async fn test_rename_work_directory(cx: &mut TestAppContext) {
-//     let root = temp_tree(json!({
-//         "projects": {
-//             "project1": {
-//                 "a": "",
-//                 "b": "",
-//             }
-//         },
-
-//     }));
-//     let root_path = root.path();
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         root_path,
-//         true,
-//         Arc::new(RealFs),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     let repo = git_init(&root_path.join("projects/project1"));
-//     git_add("a", &repo);
-//     git_commit("init", &repo);
-//     std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
-
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-
-//     tree.flush_fs_events(cx).await;
-
-//     cx.read(|cx| {
-//         let tree = tree.read(cx);
-//         let (work_dir, _) = tree.repositories().next().unwrap();
-//         assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
-//         assert_eq!(
-//             tree.status_for_file(Path::new("projects/project1/a")),
-//             Some(GitFileStatus::Modified)
-//         );
-//         assert_eq!(
-//             tree.status_for_file(Path::new("projects/project1/b")),
-//             Some(GitFileStatus::Added)
-//         );
-//     });
-
-//     std::fs::rename(
-//         root_path.join("projects/project1"),
-//         root_path.join("projects/project2"),
-//     )
-//     .ok();
-//     tree.flush_fs_events(cx).await;
-
-//     cx.read(|cx| {
-//         let tree = tree.read(cx);
-//         let (work_dir, _) = tree.repositories().next().unwrap();
-//         assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
-//         assert_eq!(
-//             tree.status_for_file(Path::new("projects/project2/a")),
-//             Some(GitFileStatus::Modified)
-//         );
-//         assert_eq!(
-//             tree.status_for_file(Path::new("projects/project2/b")),
-//             Some(GitFileStatus::Added)
-//         );
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_git_repository_for_path(cx: &mut TestAppContext) {
-//     let root = temp_tree(json!({
-//         "c.txt": "",
-//         "dir1": {
-//             ".git": {},
-//             "deps": {
-//                 "dep1": {
-//                     ".git": {},
-//                     "src": {
-//                         "a.txt": ""
-//                     }
-//                 }
-//             },
-//             "src": {
-//                 "b.txt": ""
-//             }
-//         },
-//     }));
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         root.path(),
-//         true,
-//         Arc::new(RealFs),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-//     tree.flush_fs_events(cx).await;
-
-//     tree.read_with(cx, |tree, _cx| {
-//         let tree = tree.as_local().unwrap();
-
-//         assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
-
-//         let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
-//         assert_eq!(
-//             entry
-//                 .work_directory(tree)
-//                 .map(|directory| directory.as_ref().to_owned()),
-//             Some(Path::new("dir1").to_owned())
-//         );
-
-//         let entry = tree
-//             .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
-//             .unwrap();
-//         assert_eq!(
-//             entry
-//                 .work_directory(tree)
-//                 .map(|directory| directory.as_ref().to_owned()),
-//             Some(Path::new("dir1/deps/dep1").to_owned())
-//         );
-
-//         let entries = tree.files(false, 0);
-
-//         let paths_with_repos = tree
-//             .entries_with_repositories(entries)
-//             .map(|(entry, repo)| {
-//                 (
-//                     entry.path.as_ref(),
-//                     repo.and_then(|repo| {
-//                         repo.work_directory(&tree)
-//                             .map(|work_directory| work_directory.0.to_path_buf())
-//                     }),
-//                 )
-//             })
-//             .collect::<Vec<_>>();
-
-//         assert_eq!(
-//             paths_with_repos,
-//             &[
-//                 (Path::new("c.txt"), None),
-//                 (
-//                     Path::new("dir1/deps/dep1/src/a.txt"),
-//                     Some(Path::new("dir1/deps/dep1").into())
-//                 ),
-//                 (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
-//             ]
-//         );
-//     });
-
-//     let repo_update_events = Arc::new(Mutex::new(vec![]));
-//     tree.update(cx, |_, cx| {
-//         let repo_update_events = repo_update_events.clone();
-//         cx.subscribe(&tree, move |_, _, event, _| {
-//             if let Event::UpdatedGitRepositories(update) = event {
-//                 repo_update_events.lock().push(update.clone());
-//             }
-//         })
-//         .detach();
-//     });
-
-//     std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
-//     tree.flush_fs_events(cx).await;
-
-//     assert_eq!(
-//         repo_update_events.lock()[0]
-//             .iter()
-//             .map(|e| e.0.clone())
-//             .collect::<Vec<Arc<Path>>>(),
-//         vec![Path::new("dir1").into()]
-//     );
-
-//     std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
-//     tree.flush_fs_events(cx).await;
-
-//     tree.read_with(cx, |tree, _cx| {
-//         let tree = tree.as_local().unwrap();
-
-//         assert!(tree
-//             .repository_for_path("dir1/src/b.txt".as_ref())
-//             .is_none());
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
-//     const IGNORE_RULE: &'static str = "**/target";
-
-//     let root = temp_tree(json!({
-//         "project": {
-//             "a.txt": "a",
-//             "b.txt": "bb",
-//             "c": {
-//                 "d": {
-//                     "e.txt": "eee"
-//                 }
-//             },
-//             "f.txt": "ffff",
-//             "target": {
-//                 "build_file": "???"
-//             },
-//             ".gitignore": IGNORE_RULE
-//         },
-
-//     }));
-
-//     const A_TXT: &'static str = "a.txt";
-//     const B_TXT: &'static str = "b.txt";
-//     const E_TXT: &'static str = "c/d/e.txt";
-//     const F_TXT: &'static str = "f.txt";
-//     const DOTGITIGNORE: &'static str = ".gitignore";
-//     const BUILD_FILE: &'static str = "target/build_file";
-//     let project_path = Path::new("project");
-
-//     // Set up git repository before creating the worktree.
-//     let work_dir = root.path().join("project");
-//     let mut repo = git_init(work_dir.as_path());
-//     repo.add_ignore_rule(IGNORE_RULE).unwrap();
-//     git_add(A_TXT, &repo);
-//     git_add(E_TXT, &repo);
-//     git_add(DOTGITIGNORE, &repo);
-//     git_commit("Initial commit", &repo);
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         root.path(),
-//         true,
-//         Arc::new(RealFs),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     tree.flush_fs_events(cx).await;
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-//     deterministic.run_until_parked();
-
-//     // Check that the right git state is observed on startup
-//     tree.read_with(cx, |tree, _cx| {
-//         let snapshot = tree.snapshot();
-//         assert_eq!(snapshot.repositories().count(), 1);
-//         let (dir, _) = snapshot.repositories().next().unwrap();
-//         assert_eq!(dir.as_ref(), Path::new("project"));
-
-//         assert_eq!(
-//             snapshot.status_for_file(project_path.join(B_TXT)),
-//             Some(GitFileStatus::Added)
-//         );
-//         assert_eq!(
-//             snapshot.status_for_file(project_path.join(F_TXT)),
-//             Some(GitFileStatus::Added)
-//         );
-//     });
-
-//     // Modify a file in the working copy.
-//     std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
-//     tree.flush_fs_events(cx).await;
-//     deterministic.run_until_parked();
-
-//     // The worktree detects that the file's git status has changed.
-//     tree.read_with(cx, |tree, _cx| {
-//         let snapshot = tree.snapshot();
-//         assert_eq!(
-//             snapshot.status_for_file(project_path.join(A_TXT)),
-//             Some(GitFileStatus::Modified)
-//         );
-//     });
-
-//     // Create a commit in the git repository.
-//     git_add(A_TXT, &repo);
-//     git_add(B_TXT, &repo);
-//     git_commit("Committing modified and added", &repo);
-//     tree.flush_fs_events(cx).await;
-//     deterministic.run_until_parked();
-
-//     // The worktree detects that the files' git status have changed.
-//     tree.read_with(cx, |tree, _cx| {
-//         let snapshot = tree.snapshot();
-//         assert_eq!(
-//             snapshot.status_for_file(project_path.join(F_TXT)),
-//             Some(GitFileStatus::Added)
-//         );
-//         assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
-//         assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
-//     });
-
-//     // Modify files in the working copy and perform git operations on other files.
-//     git_reset(0, &repo);
-//     git_remove_index(Path::new(B_TXT), &repo);
-//     git_stash(&mut repo);
-//     std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
-//     std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
-//     tree.flush_fs_events(cx).await;
-//     deterministic.run_until_parked();
-
-//     // Check that more complex repo changes are tracked
-//     tree.read_with(cx, |tree, _cx| {
-//         let snapshot = tree.snapshot();
-
-//         assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
-//         assert_eq!(
-//             snapshot.status_for_file(project_path.join(B_TXT)),
-//             Some(GitFileStatus::Added)
-//         );
-//         assert_eq!(
-//             snapshot.status_for_file(project_path.join(E_TXT)),
-//             Some(GitFileStatus::Modified)
-//         );
-//     });
-
-//     std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
-//     std::fs::remove_dir_all(work_dir.join("c")).unwrap();
-//     std::fs::write(
-//         work_dir.join(DOTGITIGNORE),
-//         [IGNORE_RULE, "f.txt"].join("\n"),
-//     )
-//     .unwrap();
-
-//     git_add(Path::new(DOTGITIGNORE), &repo);
-//     git_commit("Committing modified git ignore", &repo);
-
-//     tree.flush_fs_events(cx).await;
-//     deterministic.run_until_parked();
-
-//     let mut renamed_dir_name = "first_directory/second_directory";
-//     const RENAMED_FILE: &'static str = "rf.txt";
-
-//     std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
-//     std::fs::write(
-//         work_dir.join(renamed_dir_name).join(RENAMED_FILE),
-//         "new-contents",
-//     )
-//     .unwrap();
-
-//     tree.flush_fs_events(cx).await;
-//     deterministic.run_until_parked();
-
-//     tree.read_with(cx, |tree, _cx| {
-//         let snapshot = tree.snapshot();
-//         assert_eq!(
-//             snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
-//             Some(GitFileStatus::Added)
-//         );
-//     });
-
-//     renamed_dir_name = "new_first_directory/second_directory";
-
-//     std::fs::rename(
-//         work_dir.join("first_directory"),
-//         work_dir.join("new_first_directory"),
-//     )
-//     .unwrap();
-
-//     tree.flush_fs_events(cx).await;
-//     deterministic.run_until_parked();
-
-//     tree.read_with(cx, |tree, _cx| {
-//         let snapshot = tree.snapshot();
-
-//         assert_eq!(
-//             snapshot.status_for_file(
-//                 project_path
-//                     .join(Path::new(renamed_dir_name))
-//                     .join(RENAMED_FILE)
-//             ),
-//             Some(GitFileStatus::Added)
-//         );
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/root",
-//         json!({
-//             ".git": {},
-//             "a": {
-//                 "b": {
-//                     "c1.txt": "",
-//                     "c2.txt": "",
-//                 },
-//                 "d": {
-//                     "e1.txt": "",
-//                     "e2.txt": "",
-//                     "e3.txt": "",
-//                 }
-//             },
-//             "f": {
-//                 "no-status.txt": ""
-//             },
-//             "g": {
-//                 "h1.txt": "",
-//                 "h2.txt": ""
-//             },
-
-//         }),
-//     )
-//     .await;
-
-//     fs.set_status_for_repo_via_git_operation(
-//         &Path::new("/root/.git"),
-//         &[
-//             (Path::new("a/b/c1.txt"), GitFileStatus::Added),
-//             (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
-//             (Path::new("g/h2.txt"), GitFileStatus::Conflict),
-//         ],
-//     );
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         Path::new("/root"),
-//         true,
-//         fs.clone(),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-
-//     cx.foreground().run_until_parked();
-//     let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
-
-//     check_propagated_statuses(
-//         &snapshot,
-//         &[
-//             (Path::new(""), Some(GitFileStatus::Conflict)),
-//             (Path::new("a"), Some(GitFileStatus::Modified)),
-//             (Path::new("a/b"), Some(GitFileStatus::Added)),
-//             (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
-//             (Path::new("a/b/c2.txt"), None),
-//             (Path::new("a/d"), Some(GitFileStatus::Modified)),
-//             (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
-//             (Path::new("f"), None),
-//             (Path::new("f/no-status.txt"), None),
-//             (Path::new("g"), Some(GitFileStatus::Conflict)),
-//             (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
-//         ],
-//     );
-
-//     check_propagated_statuses(
-//         &snapshot,
-//         &[
-//             (Path::new("a/b"), Some(GitFileStatus::Added)),
-//             (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
-//             (Path::new("a/b/c2.txt"), None),
-//             (Path::new("a/d"), Some(GitFileStatus::Modified)),
-//             (Path::new("a/d/e1.txt"), None),
-//             (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
-//             (Path::new("f"), None),
-//             (Path::new("f/no-status.txt"), None),
-//             (Path::new("g"), Some(GitFileStatus::Conflict)),
-//         ],
-//     );
-
-//     check_propagated_statuses(
-//         &snapshot,
-//         &[
-//             (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
-//             (Path::new("a/b/c2.txt"), None),
-//             (Path::new("a/d/e1.txt"), None),
-//             (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
-//             (Path::new("f/no-status.txt"), None),
-//         ],
-//     );
-
-//     #[track_caller]
-//     fn check_propagated_statuses(
-//         snapshot: &Snapshot,
-//         expected_statuses: &[(&Path, Option<GitFileStatus>)],
-//     ) {
-//         let mut entries = expected_statuses
-//             .iter()
-//             .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
-//             .collect::<Vec<_>>();
-//         snapshot.propagate_git_statuses(&mut entries);
-//         assert_eq!(
-//             entries
-//                 .iter()
-//                 .map(|e| (e.path.as_ref(), e.git_status))
-//                 .collect::<Vec<_>>(),
-//             expected_statuses
-//         );
-//     }
-// }
-
-// fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
-//     let http_client = FakeHttpClient::with_404_response();
-//     cx.read(|cx| Client::new(http_client, cx))
-// }
-
-// #[track_caller]
-// fn git_init(path: &Path) -> git2::Repository {
-//     git2::Repository::init(path).expect("Failed to initialize git repository")
-// }
-
-// #[track_caller]
-// fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
-//     let path = path.as_ref();
-//     let mut index = repo.index().expect("Failed to get index");
-//     index.add_path(path).expect("Failed to add a.txt");
-//     index.write().expect("Failed to write index");
-// }
-
-// #[track_caller]
-// fn git_remove_index(path: &Path, repo: &git2::Repository) {
-//     let mut index = repo.index().expect("Failed to get index");
-//     index.remove_path(path).expect("Failed to add a.txt");
-//     index.write().expect("Failed to write index");
-// }
-
-// #[track_caller]
-// fn git_commit(msg: &'static str, repo: &git2::Repository) {
-//     use git2::Signature;
-
-//     let signature = Signature::now("test", "test@zed.dev").unwrap();
-//     let oid = repo.index().unwrap().write_tree().unwrap();
-//     let tree = repo.find_tree(oid).unwrap();
-//     if let Some(head) = repo.head().ok() {
-//         let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
-
-//         let parent_commit = parent_obj.as_commit().unwrap();
-
-//         repo.commit(
-//             Some("HEAD"),
-//             &signature,
-//             &signature,
-//             msg,
-//             &tree,
-//             &[parent_commit],
-//         )
-//         .expect("Failed to commit with parent");
-//     } else {
-//         repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
-//             .expect("Failed to commit");
-//     }
-// }
-
-// #[track_caller]
-// fn git_stash(repo: &mut git2::Repository) {
-//     use git2::Signature;
-
-//     let signature = Signature::now("test", "test@zed.dev").unwrap();
-//     repo.stash_save(&signature, "N/A", None)
-//         .expect("Failed to stash");
-// }
-
-// #[track_caller]
-// fn git_reset(offset: usize, repo: &git2::Repository) {
-//     let head = repo.head().expect("Couldn't get repo head");
-//     let object = head.peel(git2::ObjectType::Commit).unwrap();
-//     let commit = object.as_commit().unwrap();
-//     let new_head = commit
-//         .parents()
-//         .inspect(|parnet| {
-//             parnet.message();
-//         })
-//         .skip(offset)
-//         .next()
-//         .expect("Not enough history");
-//     repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
-//         .expect("Could not reset");
-// }
-
-// #[allow(dead_code)]
-// #[track_caller]
-// fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
-//     repo.statuses(None)
-//         .unwrap()
-//         .iter()
-//         .map(|status| (status.path().unwrap().to_string(), status.status()))
-//         .collect()
-// }
+use crate::{
+    project_settings::ProjectSettings,
+    worktree::{Event, Snapshot, WorktreeModelHandle},
+    Entry, EntryKind, PathChange, Project, Worktree,
+};
+use anyhow::Result;
+use client::Client;
+use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
+use git::GITIGNORE;
+use gpui::{ModelContext, Task, TestAppContext};
+use parking_lot::Mutex;
+use postage::stream::Stream;
+use pretty_assertions::assert_eq;
+use rand::prelude::*;
+use serde_json::json;
+use settings::SettingsStore;
+use std::{
+    env,
+    fmt::Write,
+    mem,
+    path::{Path, PathBuf},
+    sync::Arc,
+};
+use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
+
+#[gpui::test]
+async fn test_traversal(cx: &mut TestAppContext) {
+    init_test(cx);
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+           ".gitignore": "a/b\n",
+           "a": {
+               "b": "",
+               "c": "",
+           }
+        }),
+    )
+    .await;
+
+    let tree = Worktree::local(
+        build_client(cx),
+        Path::new("/root"),
+        true,
+        fs,
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(false)
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![
+                Path::new(""),
+                Path::new(".gitignore"),
+                Path::new("a"),
+                Path::new("a/c"),
+            ]
+        );
+        assert_eq!(
+            tree.entries(true)
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![
+                Path::new(""),
+                Path::new(".gitignore"),
+                Path::new("a"),
+                Path::new("a/b"),
+                Path::new("a/c"),
+            ]
+        );
+    })
+}
+
+#[gpui::test]
+async fn test_descendent_entries(cx: &mut TestAppContext) {
+    init_test(cx);
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+            "a": "",
+            "b": {
+               "c": {
+                   "d": ""
+               },
+               "e": {}
+            },
+            "f": "",
+            "g": {
+                "h": {}
+            },
+            "i": {
+                "j": {
+                    "k": ""
+                },
+                "l": {
+
+                }
+            },
+            ".gitignore": "i/j\n",
+        }),
+    )
+    .await;
+
+    let tree = Worktree::local(
+        build_client(cx),
+        Path::new("/root"),
+        true,
+        fs,
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.descendent_entries(false, false, Path::new("b"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![Path::new("b/c/d"),]
+        );
+        assert_eq!(
+            tree.descendent_entries(true, false, Path::new("b"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![
+                Path::new("b"),
+                Path::new("b/c"),
+                Path::new("b/c/d"),
+                Path::new("b/e"),
+            ]
+        );
+
+        assert_eq!(
+            tree.descendent_entries(false, false, Path::new("g"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            Vec::<PathBuf>::new()
+        );
+        assert_eq!(
+            tree.descendent_entries(true, false, Path::new("g"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![Path::new("g"), Path::new("g/h"),]
+        );
+    });
+
+    // Expand gitignored directory.
+    tree.read_with(cx, |tree, _| {
+        tree.as_local()
+            .unwrap()
+            .refresh_entries_for_paths(vec![Path::new("i/j").into()])
+    })
+    .recv()
+    .await;
+
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.descendent_entries(false, false, Path::new("i"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            Vec::<PathBuf>::new()
+        );
+        assert_eq!(
+            tree.descendent_entries(false, true, Path::new("i"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![Path::new("i/j/k")]
+        );
+        assert_eq!(
+            tree.descendent_entries(true, false, Path::new("i"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![Path::new("i"), Path::new("i/l"),]
+        );
+    })
+}
+
+#[gpui::test(iterations = 10)]
+async fn test_circular_symlinks(cx: &mut TestAppContext) {
+    init_test(cx);
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+            "lib": {
+                "a": {
+                    "a.txt": ""
+                },
+                "b": {
+                    "b.txt": ""
+                }
+            }
+        }),
+    )
+    .await;
+    fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
+    fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
+
+    let tree = Worktree::local(
+        build_client(cx),
+        Path::new("/root"),
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(false)
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![
+                Path::new(""),
+                Path::new("lib"),
+                Path::new("lib/a"),
+                Path::new("lib/a/a.txt"),
+                Path::new("lib/a/lib"),
+                Path::new("lib/b"),
+                Path::new("lib/b/b.txt"),
+                Path::new("lib/b/lib"),
+            ]
+        );
+    });
+
+    fs.rename(
+        Path::new("/root/lib/a/lib"),
+        Path::new("/root/lib/a/lib-2"),
+        Default::default(),
+    )
+    .await
+    .unwrap();
+    cx.executor().run_until_parked();
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(false)
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![
+                Path::new(""),
+                Path::new("lib"),
+                Path::new("lib/a"),
+                Path::new("lib/a/a.txt"),
+                Path::new("lib/a/lib-2"),
+                Path::new("lib/b"),
+                Path::new("lib/b/b.txt"),
+                Path::new("lib/b/lib"),
+            ]
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
+    init_test(cx);
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+            "dir1": {
+                "deps": {
+                    // symlinks here
+                },
+                "src": {
+                    "a.rs": "",
+                    "b.rs": "",
+                },
+            },
+            "dir2": {
+                "src": {
+                    "c.rs": "",
+                    "d.rs": "",
+                }
+            },
+            "dir3": {
+                "deps": {},
+                "src": {
+                    "e.rs": "",
+                    "f.rs": "",
+                },
+            }
+        }),
+    )
+    .await;
+
+    // These symlinks point to directories outside of the worktree's root, dir1.
+    fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
+        .await;
+    fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
+        .await;
+
+    let tree = Worktree::local(
+        build_client(cx),
+        Path::new("/root/dir1"),
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    let tree_updates = Arc::new(Mutex::new(Vec::new()));
+    tree.update(cx, |_, cx| {
+        let tree_updates = tree_updates.clone();
+        cx.subscribe(&tree, move |_, _, event, _| {
+            if let Event::UpdatedEntries(update) = event {
+                tree_updates.lock().extend(
+                    update
+                        .iter()
+                        .map(|(path, _, change)| (path.clone(), *change)),
+                );
+            }
+        })
+        .detach();
+    });
+
+    // The symlinked directories are not scanned by default.
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(true)
+                .map(|entry| (entry.path.as_ref(), entry.is_external))
+                .collect::<Vec<_>>(),
+            vec![
+                (Path::new(""), false),
+                (Path::new("deps"), false),
+                (Path::new("deps/dep-dir2"), true),
+                (Path::new("deps/dep-dir3"), true),
+                (Path::new("src"), false),
+                (Path::new("src/a.rs"), false),
+                (Path::new("src/b.rs"), false),
+            ]
+        );
+
+        assert_eq!(
+            tree.entry_for_path("deps/dep-dir2").unwrap().kind,
+            EntryKind::UnloadedDir
+        );
+    });
+
+    // Expand one of the symlinked directories.
+    tree.read_with(cx, |tree, _| {
+        tree.as_local()
+            .unwrap()
+            .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
+    })
+    .recv()
+    .await;
+
+    // The expanded directory's contents are loaded. Subdirectories are
+    // not scanned yet.
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(true)
+                .map(|entry| (entry.path.as_ref(), entry.is_external))
+                .collect::<Vec<_>>(),
+            vec![
+                (Path::new(""), false),
+                (Path::new("deps"), false),
+                (Path::new("deps/dep-dir2"), true),
+                (Path::new("deps/dep-dir3"), true),
+                (Path::new("deps/dep-dir3/deps"), true),
+                (Path::new("deps/dep-dir3/src"), true),
+                (Path::new("src"), false),
+                (Path::new("src/a.rs"), false),
+                (Path::new("src/b.rs"), false),
+            ]
+        );
+    });
+    assert_eq!(
+        mem::take(&mut *tree_updates.lock()),
+        &[
+            (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
+            (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
+            (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
+        ]
+    );
+
+    // Expand a subdirectory of one of the symlinked directories.
+    tree.read_with(cx, |tree, _| {
+        tree.as_local()
+            .unwrap()
+            .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
+    })
+    .recv()
+    .await;
+
+    // The expanded subdirectory's contents are loaded.
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(true)
+                .map(|entry| (entry.path.as_ref(), entry.is_external))
+                .collect::<Vec<_>>(),
+            vec![
+                (Path::new(""), false),
+                (Path::new("deps"), false),
+                (Path::new("deps/dep-dir2"), true),
+                (Path::new("deps/dep-dir3"), true),
+                (Path::new("deps/dep-dir3/deps"), true),
+                (Path::new("deps/dep-dir3/src"), true),
+                (Path::new("deps/dep-dir3/src/e.rs"), true),
+                (Path::new("deps/dep-dir3/src/f.rs"), true),
+                (Path::new("src"), false),
+                (Path::new("src/a.rs"), false),
+                (Path::new("src/b.rs"), false),
+            ]
+        );
+    });
+
+    assert_eq!(
+        mem::take(&mut *tree_updates.lock()),
+        &[
+            (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
+            (
+                Path::new("deps/dep-dir3/src/e.rs").into(),
+                PathChange::Loaded
+            ),
+            (
+                Path::new("deps/dep-dir3/src/f.rs").into(),
+                PathChange::Loaded
+            )
+        ]
+    );
+}
+
+#[gpui::test]
+async fn test_open_gitignored_files(cx: &mut TestAppContext) {
+    init_test(cx);
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+            ".gitignore": "node_modules\n",
+            "one": {
+                "node_modules": {
+                    "a": {
+                        "a1.js": "a1",
+                        "a2.js": "a2",
+                    },
+                    "b": {
+                        "b1.js": "b1",
+                        "b2.js": "b2",
+                    },
+                    "c": {
+                        "c1.js": "c1",
+                        "c2.js": "c2",
+                    }
+                },
+            },
+            "two": {
+                "x.js": "",
+                "y.js": "",
+            },
+        }),
+    )
+    .await;
+
+    let tree = Worktree::local(
+        build_client(cx),
+        Path::new("/root"),
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(true)
+                .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+                .collect::<Vec<_>>(),
+            vec![
+                (Path::new(""), false),
+                (Path::new(".gitignore"), false),
+                (Path::new("one"), false),
+                (Path::new("one/node_modules"), true),
+                (Path::new("two"), false),
+                (Path::new("two/x.js"), false),
+                (Path::new("two/y.js"), false),
+            ]
+        );
+    });
+
+    // Open a file that is nested inside of a gitignored directory that
+    // has not yet been expanded.
+    let prev_read_dir_count = fs.read_dir_call_count();
+    let buffer = tree
+        .update(cx, |tree, cx| {
+            tree.as_local_mut()
+                .unwrap()
+                .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx)
+        })
+        .await
+        .unwrap();
+
+    tree.read_with(cx, |tree, cx| {
+        assert_eq!(
+            tree.entries(true)
+                .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+                .collect::<Vec<_>>(),
+            vec![
+                (Path::new(""), false),
+                (Path::new(".gitignore"), false),
+                (Path::new("one"), false),
+                (Path::new("one/node_modules"), true),
+                (Path::new("one/node_modules/a"), true),
+                (Path::new("one/node_modules/b"), true),
+                (Path::new("one/node_modules/b/b1.js"), true),
+                (Path::new("one/node_modules/b/b2.js"), true),
+                (Path::new("one/node_modules/c"), true),
+                (Path::new("two"), false),
+                (Path::new("two/x.js"), false),
+                (Path::new("two/y.js"), false),
+            ]
+        );
+
+        assert_eq!(
+            buffer.read(cx).file().unwrap().path().as_ref(),
+            Path::new("one/node_modules/b/b1.js")
+        );
+
+        // Only the newly-expanded directories are scanned.
+        assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
+    });
+
+    // Open another file in a different subdirectory of the same
+    // gitignored directory.
+    let prev_read_dir_count = fs.read_dir_call_count();
+    let buffer = tree
+        .update(cx, |tree, cx| {
+            tree.as_local_mut()
+                .unwrap()
+                .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx)
+        })
+        .await
+        .unwrap();
+
+    tree.read_with(cx, |tree, cx| {
+        assert_eq!(
+            tree.entries(true)
+                .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+                .collect::<Vec<_>>(),
+            vec![
+                (Path::new(""), false),
+                (Path::new(".gitignore"), false),
+                (Path::new("one"), false),
+                (Path::new("one/node_modules"), true),
+                (Path::new("one/node_modules/a"), true),
+                (Path::new("one/node_modules/a/a1.js"), true),
+                (Path::new("one/node_modules/a/a2.js"), true),
+                (Path::new("one/node_modules/b"), true),
+                (Path::new("one/node_modules/b/b1.js"), true),
+                (Path::new("one/node_modules/b/b2.js"), true),
+                (Path::new("one/node_modules/c"), true),
+                (Path::new("two"), false),
+                (Path::new("two/x.js"), false),
+                (Path::new("two/y.js"), false),
+            ]
+        );
+
+        assert_eq!(
+            buffer.read(cx).file().unwrap().path().as_ref(),
+            Path::new("one/node_modules/a/a2.js")
+        );
+
+        // Only the newly-expanded directory is scanned.
+        assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
+    });
+
+    // No work happens when files and directories change within an unloaded directory.
+    let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
+    fs.create_dir("/root/one/node_modules/c/lib".as_ref())
+        .await
+        .unwrap();
+    cx.executor().run_until_parked();
+    assert_eq!(
+        fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
+        0
+    );
+}
+
+#[gpui::test]
+async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
+    init_test(cx);
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+            ".gitignore": "node_modules\n",
+            "a": {
+                "a.js": "",
+            },
+            "b": {
+                "b.js": "",
+            },
+            "node_modules": {
+                "c": {
+                    "c.js": "",
+                },
+                "d": {
+                    "d.js": "",
+                    "e": {
+                        "e1.js": "",
+                        "e2.js": "",
+                    },
+                    "f": {
+                        "f1.js": "",
+                        "f2.js": "",
+                    }
+                },
+            },
+        }),
+    )
+    .await;
+
+    let tree = Worktree::local(
+        build_client(cx),
+        Path::new("/root"),
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    // Open a file within the gitignored directory, forcing some of its
+    // subdirectories to be read, but not all.
+    let read_dir_count_1 = fs.read_dir_call_count();
+    tree.read_with(cx, |tree, _| {
+        tree.as_local()
+            .unwrap()
+            .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
+    })
+    .recv()
+    .await;
+
+    // Those subdirectories are now loaded.
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(true)
+                .map(|e| (e.path.as_ref(), e.is_ignored))
+                .collect::<Vec<_>>(),
+            &[
+                (Path::new(""), false),
+                (Path::new(".gitignore"), false),
+                (Path::new("a"), false),
+                (Path::new("a/a.js"), false),
+                (Path::new("b"), false),
+                (Path::new("b/b.js"), false),
+                (Path::new("node_modules"), true),
+                (Path::new("node_modules/c"), true),
+                (Path::new("node_modules/d"), true),
+                (Path::new("node_modules/d/d.js"), true),
+                (Path::new("node_modules/d/e"), true),
+                (Path::new("node_modules/d/f"), true),
+            ]
+        );
+    });
+    let read_dir_count_2 = fs.read_dir_call_count();
+    assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
+
+    // Update the gitignore so that node_modules is no longer ignored,
+    // but a subdirectory is ignored
+    fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
+        .await
+        .unwrap();
+    cx.executor().run_until_parked();
+
+    // All of the directories that are no longer ignored are now loaded.
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(true)
+                .map(|e| (e.path.as_ref(), e.is_ignored))
+                .collect::<Vec<_>>(),
+            &[
+                (Path::new(""), false),
+                (Path::new(".gitignore"), false),
+                (Path::new("a"), false),
+                (Path::new("a/a.js"), false),
+                (Path::new("b"), false),
+                (Path::new("b/b.js"), false),
+                // This directory is no longer ignored
+                (Path::new("node_modules"), false),
+                (Path::new("node_modules/c"), false),
+                (Path::new("node_modules/c/c.js"), false),
+                (Path::new("node_modules/d"), false),
+                (Path::new("node_modules/d/d.js"), false),
+                // This subdirectory is now ignored
+                (Path::new("node_modules/d/e"), true),
+                (Path::new("node_modules/d/f"), false),
+                (Path::new("node_modules/d/f/f1.js"), false),
+                (Path::new("node_modules/d/f/f2.js"), false),
+            ]
+        );
+    });
+
+    // Each of the newly-loaded directories is scanned only once.
+    let read_dir_count_3 = fs.read_dir_call_count();
+    assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
+}
+
+#[gpui::test(iterations = 10)]
+async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
+    init_test(cx);
+    cx.update(|cx| {
+        cx.update_global::<SettingsStore, _>(|store, cx| {
+            store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                project_settings.file_scan_exclusions = Some(Vec::new());
+            });
+        });
+    });
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+            ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
+            "tree": {
+                ".git": {},
+                ".gitignore": "ignored-dir\n",
+                "tracked-dir": {
+                    "tracked-file1": "",
+                    "ancestor-ignored-file1": "",
+                },
+                "ignored-dir": {
+                    "ignored-file1": ""
+                }
+            }
+        }),
+    )
+    .await;
+
+    let tree = Worktree::local(
+        build_client(cx),
+        "/root/tree".as_ref(),
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    tree.read_with(cx, |tree, _| {
+        tree.as_local()
+            .unwrap()
+            .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
+    })
+    .recv()
+    .await;
+
+    cx.read(|cx| {
+        let tree = tree.read(cx);
+        assert!(
+            !tree
+                .entry_for_path("tracked-dir/tracked-file1")
+                .unwrap()
+                .is_ignored
+        );
+        assert!(
+            tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
+                .unwrap()
+                .is_ignored
+        );
+        assert!(
+            tree.entry_for_path("ignored-dir/ignored-file1")
+                .unwrap()
+                .is_ignored
+        );
+    });
+
+    fs.create_file(
+        "/root/tree/tracked-dir/tracked-file2".as_ref(),
+        Default::default(),
+    )
+    .await
+    .unwrap();
+    fs.create_file(
+        "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
+        Default::default(),
+    )
+    .await
+    .unwrap();
+    fs.create_file(
+        "/root/tree/ignored-dir/ignored-file2".as_ref(),
+        Default::default(),
+    )
+    .await
+    .unwrap();
+
+    cx.executor().run_until_parked();
+    cx.read(|cx| {
+        let tree = tree.read(cx);
+        assert!(
+            !tree
+                .entry_for_path("tracked-dir/tracked-file2")
+                .unwrap()
+                .is_ignored
+        );
+        assert!(
+            tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
+                .unwrap()
+                .is_ignored
+        );
+        assert!(
+            tree.entry_for_path("ignored-dir/ignored-file2")
+                .unwrap()
+                .is_ignored
+        );
+        assert!(tree.entry_for_path(".git").unwrap().is_ignored);
+    });
+}
+
+#[gpui::test]
+async fn test_write_file(cx: &mut TestAppContext) {
+    init_test(cx);
+    cx.executor().allow_parking();
+    let dir = temp_tree(json!({
+        ".git": {},
+        ".gitignore": "ignored-dir\n",
+        "tracked-dir": {},
+        "ignored-dir": {}
+    }));
+
+    let tree = Worktree::local(
+        build_client(cx),
+        dir.path(),
+        true,
+        Arc::new(RealFs),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+    tree.flush_fs_events(cx).await;
+
+    tree.update(cx, |tree, cx| {
+        tree.as_local().unwrap().write_file(
+            Path::new("tracked-dir/file.txt"),
+            "hello".into(),
+            Default::default(),
+            cx,
+        )
+    })
+    .await
+    .unwrap();
+    tree.update(cx, |tree, cx| {
+        tree.as_local().unwrap().write_file(
+            Path::new("ignored-dir/file.txt"),
+            "world".into(),
+            Default::default(),
+            cx,
+        )
+    })
+    .await
+    .unwrap();
+
+    tree.read_with(cx, |tree, _| {
+        let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
+        let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
+        assert!(!tracked.is_ignored);
+        assert!(ignored.is_ignored);
+    });
+}
+
+#[gpui::test]
+async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
+    init_test(cx);
+    cx.executor().allow_parking();
+    let dir = temp_tree(json!({
+        ".gitignore": "**/target\n/node_modules\n",
+        "target": {
+            "index": "blah2"
+        },
+        "node_modules": {
+            ".DS_Store": "",
+            "prettier": {
+                "package.json": "{}",
+            },
+        },
+        "src": {
+            ".DS_Store": "",
+            "foo": {
+                "foo.rs": "mod another;\n",
+                "another.rs": "// another",
+            },
+            "bar": {
+                "bar.rs": "// bar",
+            },
+            "lib.rs": "mod foo;\nmod bar;\n",
+        },
+        ".DS_Store": "",
+    }));
+    cx.update(|cx| {
+        cx.update_global::<SettingsStore, _>(|store, cx| {
+            store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                project_settings.file_scan_exclusions =
+                    Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
+            });
+        });
+    });
+
+    let tree = Worktree::local(
+        build_client(cx),
+        dir.path(),
+        true,
+        Arc::new(RealFs),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+    tree.flush_fs_events(cx).await;
+    tree.read_with(cx, |tree, _| {
+        check_worktree_entries(
+            tree,
+            &[
+                "src/foo/foo.rs",
+                "src/foo/another.rs",
+                "node_modules/.DS_Store",
+                "src/.DS_Store",
+                ".DS_Store",
+            ],
+            &["target", "node_modules"],
+            &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
+        )
+    });
+
+    cx.update(|cx| {
+        cx.update_global::<SettingsStore, _>(|store, cx| {
+            store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                project_settings.file_scan_exclusions =
+                    Some(vec!["**/node_modules/**".to_string()]);
+            });
+        });
+    });
+    tree.flush_fs_events(cx).await;
+    cx.executor().run_until_parked();
+    tree.read_with(cx, |tree, _| {
+        check_worktree_entries(
+            tree,
+            &[
+                "node_modules/prettier/package.json",
+                "node_modules/.DS_Store",
+                "node_modules",
+            ],
+            &["target"],
+            &[
+                ".gitignore",
+                "src/lib.rs",
+                "src/bar/bar.rs",
+                "src/foo/foo.rs",
+                "src/foo/another.rs",
+                "src/.DS_Store",
+                ".DS_Store",
+            ],
+        )
+    });
+}
+
+#[gpui::test(iterations = 30)]
+async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
+    init_test(cx);
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+            "b": {},
+            "c": {},
+            "d": {},
+        }),
+    )
+    .await;
+
+    let tree = Worktree::local(
+        build_client(cx),
+        "/root".as_ref(),
+        true,
+        fs,
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    let snapshot1 = tree.update(cx, |tree, cx| {
+        let tree = tree.as_local_mut().unwrap();
+        let snapshot = Arc::new(Mutex::new(tree.snapshot()));
+        let _ = tree.observe_updates(0, cx, {
+            let snapshot = snapshot.clone();
+            move |update| {
+                snapshot.lock().apply_remote_update(update).unwrap();
+                async { true }
+            }
+        });
+        snapshot
+    });
+
+    let entry = tree
+        .update(cx, |tree, cx| {
+            tree.as_local_mut()
+                .unwrap()
+                .create_entry("a/e".as_ref(), true, cx)
+        })
+        .await
+        .unwrap();
+    assert!(entry.is_dir());
+
+    cx.executor().run_until_parked();
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
+    });
+
+    let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
+    assert_eq!(
+        snapshot1.lock().entries(true).collect::<Vec<_>>(),
+        snapshot2.entries(true).collect::<Vec<_>>()
+    );
+}
+
+#[gpui::test]
+async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
+    init_test(cx);
+    cx.executor().allow_parking();
+    let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
+
+    let fs_fake = FakeFs::new(cx.background_executor.clone());
+    fs_fake
+        .insert_tree(
+            "/root",
+            json!({
+                "a": {},
+            }),
+        )
+        .await;
+
+    let tree_fake = Worktree::local(
+        client_fake,
+        "/root".as_ref(),
+        true,
+        fs_fake,
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    let entry = tree_fake
+        .update(cx, |tree, cx| {
+            tree.as_local_mut()
+                .unwrap()
+                .create_entry("a/b/c/d.txt".as_ref(), false, cx)
+        })
+        .await
+        .unwrap();
+    assert!(entry.is_file());
+
+    cx.executor().run_until_parked();
+    tree_fake.read_with(cx, |tree, _| {
+        assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
+        assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
+        assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
+    });
+
+    let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
+
+    let fs_real = Arc::new(RealFs);
+    let temp_root = temp_tree(json!({
+        "a": {}
+    }));
+
+    let tree_real = Worktree::local(
+        client_real,
+        temp_root.path(),
+        true,
+        fs_real,
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    let entry = tree_real
+        .update(cx, |tree, cx| {
+            tree.as_local_mut()
+                .unwrap()
+                .create_entry("a/b/c/d.txt".as_ref(), false, cx)
+        })
+        .await
+        .unwrap();
+    assert!(entry.is_file());
+
+    cx.executor().run_until_parked();
+    tree_real.read_with(cx, |tree, _| {
+        assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
+        assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
+        assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
+    });
+
+    // Test smallest change
+    let entry = tree_real
+        .update(cx, |tree, cx| {
+            tree.as_local_mut()
+                .unwrap()
+                .create_entry("a/b/c/e.txt".as_ref(), false, cx)
+        })
+        .await
+        .unwrap();
+    assert!(entry.is_file());
+
+    cx.executor().run_until_parked();
+    tree_real.read_with(cx, |tree, _| {
+        assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
+    });
+
+    // Test largest change
+    let entry = tree_real
+        .update(cx, |tree, cx| {
+            tree.as_local_mut()
+                .unwrap()
+                .create_entry("d/e/f/g.txt".as_ref(), false, cx)
+        })
+        .await
+        .unwrap();
+    assert!(entry.is_file());
+
+    cx.executor().run_until_parked();
+    tree_real.read_with(cx, |tree, _| {
+        assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
+        assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
+        assert!(tree.entry_for_path("d/e/").unwrap().is_dir());
+        assert!(tree.entry_for_path("d/").unwrap().is_dir());
+    });
+}
+
+#[gpui::test(iterations = 100)]
+async fn test_random_worktree_operations_during_initial_scan(
+    cx: &mut TestAppContext,
+    mut rng: StdRng,
+) {
+    init_test(cx);
+    let operations = env::var("OPERATIONS")
+        .map(|o| o.parse().unwrap())
+        .unwrap_or(5);
+    let initial_entries = env::var("INITIAL_ENTRIES")
+        .map(|o| o.parse().unwrap())
+        .unwrap_or(20);
+
+    let root_dir = Path::new("/test");
+    let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
+    fs.as_fake().insert_tree(root_dir, json!({})).await;
+    for _ in 0..initial_entries {
+        randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
+    }
+    log::info!("generated initial tree");
+
+    let worktree = Worktree::local(
+        build_client(cx),
+        root_dir,
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
+    let updates = Arc::new(Mutex::new(Vec::new()));
+    worktree.update(cx, |tree, cx| {
+        check_worktree_change_events(tree, cx);
+
+        let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
+            let updates = updates.clone();
+            move |update| {
+                updates.lock().push(update);
+                async { true }
+            }
+        });
+    });
+
+    for _ in 0..operations {
+        worktree
+            .update(cx, |worktree, cx| {
+                randomly_mutate_worktree(worktree, &mut rng, cx)
+            })
+            .await
+            .log_err();
+        worktree.read_with(cx, |tree, _| {
+            tree.as_local().unwrap().snapshot().check_invariants(true)
+        });
+
+        if rng.gen_bool(0.6) {
+            snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
+        }
+    }
+
+    worktree
+        .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
+        .await;
+
+    cx.executor().run_until_parked();
+
+    let final_snapshot = worktree.read_with(cx, |tree, _| {
+        let tree = tree.as_local().unwrap();
+        let snapshot = tree.snapshot();
+        snapshot.check_invariants(true);
+        snapshot
+    });
+
+    for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
+        let mut updated_snapshot = snapshot.clone();
+        for update in updates.lock().iter() {
+            if update.scan_id >= updated_snapshot.scan_id() as u64 {
+                updated_snapshot
+                    .apply_remote_update(update.clone())
+                    .unwrap();
+            }
+        }
+
+        assert_eq!(
+            updated_snapshot.entries(true).collect::<Vec<_>>(),
+            final_snapshot.entries(true).collect::<Vec<_>>(),
+            "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
+        );
+    }
+}
+
+#[gpui::test(iterations = 100)]
+async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
+    init_test(cx);
+    let operations = env::var("OPERATIONS")
+        .map(|o| o.parse().unwrap())
+        .unwrap_or(40);
+    let initial_entries = env::var("INITIAL_ENTRIES")
+        .map(|o| o.parse().unwrap())
+        .unwrap_or(20);
+
+    let root_dir = Path::new("/test");
+    let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
+    fs.as_fake().insert_tree(root_dir, json!({})).await;
+    for _ in 0..initial_entries {
+        randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
+    }
+    log::info!("generated initial tree");
+
+    let worktree = Worktree::local(
+        build_client(cx),
+        root_dir,
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    let updates = Arc::new(Mutex::new(Vec::new()));
+    worktree.update(cx, |tree, cx| {
+        check_worktree_change_events(tree, cx);
+
+        let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
+            let updates = updates.clone();
+            move |update| {
+                updates.lock().push(update);
+                async { true }
+            }
+        });
+    });
+
+    worktree
+        .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
+        .await;
+
+    fs.as_fake().pause_events();
+    let mut snapshots = Vec::new();
+    let mut mutations_len = operations;
+    while mutations_len > 1 {
+        if rng.gen_bool(0.2) {
+            worktree
+                .update(cx, |worktree, cx| {
+                    randomly_mutate_worktree(worktree, &mut rng, cx)
+                })
+                .await
+                .log_err();
+        } else {
+            randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
+        }
+
+        let buffered_event_count = fs.as_fake().buffered_event_count();
+        if buffered_event_count > 0 && rng.gen_bool(0.3) {
+            let len = rng.gen_range(0..=buffered_event_count);
+            log::info!("flushing {} events", len);
+            fs.as_fake().flush_events(len);
+        } else {
+            randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
+            mutations_len -= 1;
+        }
+
+        cx.executor().run_until_parked();
+        if rng.gen_bool(0.2) {
+            log::info!("storing snapshot {}", snapshots.len());
+            let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
+            snapshots.push(snapshot);
+        }
+    }
+
+    log::info!("quiescing");
+    fs.as_fake().flush_events(usize::MAX);
+    cx.executor().run_until_parked();
+
+    let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
+    snapshot.check_invariants(true);
+    let expanded_paths = snapshot
+        .expanded_entries()
+        .map(|e| e.path.clone())
+        .collect::<Vec<_>>();
+
+    {
+        let new_worktree = Worktree::local(
+            build_client(cx),
+            root_dir,
+            true,
+            fs.clone(),
+            Default::default(),
+            &mut cx.to_async(),
+        )
+        .await
+        .unwrap();
+        new_worktree
+            .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
+            .await;
+        new_worktree
+            .update(cx, |tree, _| {
+                tree.as_local_mut()
+                    .unwrap()
+                    .refresh_entries_for_paths(expanded_paths)
+            })
+            .recv()
+            .await;
+        let new_snapshot =
+            new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
+        assert_eq!(
+            snapshot.entries_without_ids(true),
+            new_snapshot.entries_without_ids(true)
+        );
+    }
+
+    for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
+        for update in updates.lock().iter() {
+            if update.scan_id >= prev_snapshot.scan_id() as u64 {
+                prev_snapshot.apply_remote_update(update.clone()).unwrap();
+            }
+        }
+
+        assert_eq!(
+            prev_snapshot
+                .entries(true)
+                .map(ignore_pending_dir)
+                .collect::<Vec<_>>(),
+            snapshot
+                .entries(true)
+                .map(ignore_pending_dir)
+                .collect::<Vec<_>>(),
+            "wrong updates after snapshot {i}: {updates:#?}",
+        );
+    }
+
+    fn ignore_pending_dir(entry: &Entry) -> Entry {
+        let mut entry = entry.clone();
+        if entry.kind.is_dir() {
+            entry.kind = EntryKind::Dir
+        }
+        entry
+    }
+}
+
+// The worktree's `UpdatedEntries` event can be used to follow along with
+// all changes to the worktree's snapshot.
+fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
+    let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
+    cx.subscribe(&cx.handle(), move |tree, _, event, _| {
+        if let Event::UpdatedEntries(changes) = event {
+            for (path, _, change_type) in changes.iter() {
+                let entry = tree.entry_for_path(&path).cloned();
+                let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
+                    Ok(ix) | Err(ix) => ix,
+                };
+                match change_type {
+                    PathChange::Added => entries.insert(ix, entry.unwrap()),
+                    PathChange::Removed => drop(entries.remove(ix)),
+                    PathChange::Updated => {
+                        let entry = entry.unwrap();
+                        let existing_entry = entries.get_mut(ix).unwrap();
+                        assert_eq!(existing_entry.path, entry.path);
+                        *existing_entry = entry;
+                    }
+                    PathChange::AddedOrUpdated | PathChange::Loaded => {
+                        let entry = entry.unwrap();
+                        if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
+                            *entries.get_mut(ix).unwrap() = entry;
+                        } else {
+                            entries.insert(ix, entry);
+                        }
+                    }
+                }
+            }
+
+            let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
+            assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
+        }
+    })
+    .detach();
+}
+
+fn randomly_mutate_worktree(
+    worktree: &mut Worktree,
+    rng: &mut impl Rng,
+    cx: &mut ModelContext<Worktree>,
+) -> Task<Result<()>> {
+    log::info!("mutating worktree");
+    let worktree = worktree.as_local_mut().unwrap();
+    let snapshot = worktree.snapshot();
+    let entry = snapshot.entries(false).choose(rng).unwrap();
+
+    match rng.gen_range(0_u32..100) {
+        0..=33 if entry.path.as_ref() != Path::new("") => {
+            log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
+            worktree.delete_entry(entry.id, cx).unwrap()
+        }
+        ..=66 if entry.path.as_ref() != Path::new("") => {
+            let other_entry = snapshot.entries(false).choose(rng).unwrap();
+            let new_parent_path = if other_entry.is_dir() {
+                other_entry.path.clone()
+            } else {
+                other_entry.path.parent().unwrap().into()
+            };
+            let mut new_path = new_parent_path.join(random_filename(rng));
+            if new_path.starts_with(&entry.path) {
+                new_path = random_filename(rng).into();
+            }
+
+            log::info!(
+                "renaming entry {:?} ({}) to {:?}",
+                entry.path,
+                entry.id.0,
+                new_path
+            );
+            let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
+            cx.background_executor().spawn(async move {
+                task.await?;
+                Ok(())
+            })
+        }
+        _ => {
+            let task = if entry.is_dir() {
+                let child_path = entry.path.join(random_filename(rng));
+                let is_dir = rng.gen_bool(0.3);
+                log::info!(
+                    "creating {} at {:?}",
+                    if is_dir { "dir" } else { "file" },
+                    child_path,
+                );
+                worktree.create_entry(child_path, is_dir, cx)
+            } else {
+                log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
+                worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
+            };
+            cx.background_executor().spawn(async move {
+                task.await?;
+                Ok(())
+            })
+        }
+    }
+}
+
+async fn randomly_mutate_fs(
+    fs: &Arc<dyn Fs>,
+    root_path: &Path,
+    insertion_probability: f64,
+    rng: &mut impl Rng,
+) {
+    log::info!("mutating fs");
+    let mut files = Vec::new();
+    let mut dirs = Vec::new();
+    for path in fs.as_fake().paths(false) {
+        if path.starts_with(root_path) {
+            if fs.is_file(&path).await {
+                files.push(path);
+            } else {
+                dirs.push(path);
+            }
+        }
+    }
+
+    if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
+        let path = dirs.choose(rng).unwrap();
+        let new_path = path.join(random_filename(rng));
+
+        if rng.gen() {
+            log::info!(
+                "creating dir {:?}",
+                new_path.strip_prefix(root_path).unwrap()
+            );
+            fs.create_dir(&new_path).await.unwrap();
+        } else {
+            log::info!(
+                "creating file {:?}",
+                new_path.strip_prefix(root_path).unwrap()
+            );
+            fs.create_file(&new_path, Default::default()).await.unwrap();
+        }
+    } else if rng.gen_bool(0.05) {
+        let ignore_dir_path = dirs.choose(rng).unwrap();
+        let ignore_path = ignore_dir_path.join(&*GITIGNORE);
+
+        let subdirs = dirs
+            .iter()
+            .filter(|d| d.starts_with(&ignore_dir_path))
+            .cloned()
+            .collect::<Vec<_>>();
+        let subfiles = files
+            .iter()
+            .filter(|d| d.starts_with(&ignore_dir_path))
+            .cloned()
+            .collect::<Vec<_>>();
+        let files_to_ignore = {
+            let len = rng.gen_range(0..=subfiles.len());
+            subfiles.choose_multiple(rng, len)
+        };
+        let dirs_to_ignore = {
+            let len = rng.gen_range(0..subdirs.len());
+            subdirs.choose_multiple(rng, len)
+        };
+
+        let mut ignore_contents = String::new();
+        for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
+            writeln!(
+                ignore_contents,
+                "{}",
+                path_to_ignore
+                    .strip_prefix(&ignore_dir_path)
+                    .unwrap()
+                    .to_str()
+                    .unwrap()
+            )
+            .unwrap();
+        }
+        log::info!(
+            "creating gitignore {:?} with contents:\n{}",
+            ignore_path.strip_prefix(&root_path).unwrap(),
+            ignore_contents
+        );
+        fs.save(
+            &ignore_path,
+            &ignore_contents.as_str().into(),
+            Default::default(),
+        )
+        .await
+        .unwrap();
+    } else {
+        let old_path = {
+            let file_path = files.choose(rng);
+            let dir_path = dirs[1..].choose(rng);
+            file_path.into_iter().chain(dir_path).choose(rng).unwrap()
+        };
+
+        let is_rename = rng.gen();
+        if is_rename {
+            let new_path_parent = dirs
+                .iter()
+                .filter(|d| !d.starts_with(old_path))
+                .choose(rng)
+                .unwrap();
+
+            let overwrite_existing_dir =
+                !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
+            let new_path = if overwrite_existing_dir {
+                fs.remove_dir(
+                    &new_path_parent,
+                    RemoveOptions {
+                        recursive: true,
+                        ignore_if_not_exists: true,
+                    },
+                )
+                .await
+                .unwrap();
+                new_path_parent.to_path_buf()
+            } else {
+                new_path_parent.join(random_filename(rng))
+            };
+
+            log::info!(
+                "renaming {:?} to {}{:?}",
+                old_path.strip_prefix(&root_path).unwrap(),
+                if overwrite_existing_dir {
+                    "overwrite "
+                } else {
+                    ""
+                },
+                new_path.strip_prefix(&root_path).unwrap()
+            );
+            fs.rename(
+                &old_path,
+                &new_path,
+                fs::RenameOptions {
+                    overwrite: true,
+                    ignore_if_exists: true,
+                },
+            )
+            .await
+            .unwrap();
+        } else if fs.is_file(&old_path).await {
+            log::info!(
+                "deleting file {:?}",
+                old_path.strip_prefix(&root_path).unwrap()
+            );
+            fs.remove_file(old_path, Default::default()).await.unwrap();
+        } else {
+            log::info!(
+                "deleting dir {:?}",
+                old_path.strip_prefix(&root_path).unwrap()
+            );
+            fs.remove_dir(
+                &old_path,
+                RemoveOptions {
+                    recursive: true,
+                    ignore_if_not_exists: true,
+                },
+            )
+            .await
+            .unwrap();
+        }
+    }
+}
+
+fn random_filename(rng: &mut impl Rng) -> String {
+    (0..6)
+        .map(|_| rng.sample(rand::distributions::Alphanumeric))
+        .map(char::from)
+        .collect()
+}
+
+#[gpui::test]
+async fn test_rename_work_directory(cx: &mut TestAppContext) {
+    init_test(cx);
+    cx.executor().allow_parking();
+    let root = temp_tree(json!({
+        "projects": {
+            "project1": {
+                "a": "",
+                "b": "",
+            }
+        },
+
+    }));
+    let root_path = root.path();
+
+    let tree = Worktree::local(
+        build_client(cx),
+        root_path,
+        true,
+        Arc::new(RealFs),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    let repo = git_init(&root_path.join("projects/project1"));
+    git_add("a", &repo);
+    git_commit("init", &repo);
+    std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
+
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    tree.flush_fs_events(cx).await;
+
+    cx.read(|cx| {
+        let tree = tree.read(cx);
+        let (work_dir, _) = tree.repositories().next().unwrap();
+        assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
+        assert_eq!(
+            tree.status_for_file(Path::new("projects/project1/a")),
+            Some(GitFileStatus::Modified)
+        );
+        assert_eq!(
+            tree.status_for_file(Path::new("projects/project1/b")),
+            Some(GitFileStatus::Added)
+        );
+    });
+
+    std::fs::rename(
+        root_path.join("projects/project1"),
+        root_path.join("projects/project2"),
+    )
+    .ok();
+    tree.flush_fs_events(cx).await;
+
+    cx.read(|cx| {
+        let tree = tree.read(cx);
+        let (work_dir, _) = tree.repositories().next().unwrap();
+        assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
+        assert_eq!(
+            tree.status_for_file(Path::new("projects/project2/a")),
+            Some(GitFileStatus::Modified)
+        );
+        assert_eq!(
+            tree.status_for_file(Path::new("projects/project2/b")),
+            Some(GitFileStatus::Added)
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_git_repository_for_path(cx: &mut TestAppContext) {
+    init_test(cx);
+    cx.executor().allow_parking();
+    let root = temp_tree(json!({
+        "c.txt": "",
+        "dir1": {
+            ".git": {},
+            "deps": {
+                "dep1": {
+                    ".git": {},
+                    "src": {
+                        "a.txt": ""
+                    }
+                }
+            },
+            "src": {
+                "b.txt": ""
+            }
+        },
+    }));
+
+    let tree = Worktree::local(
+        build_client(cx),
+        root.path(),
+        true,
+        Arc::new(RealFs),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+    tree.flush_fs_events(cx).await;
+
+    tree.read_with(cx, |tree, _cx| {
+        let tree = tree.as_local().unwrap();
+
+        assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
+
+        let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
+        assert_eq!(
+            entry
+                .work_directory(tree)
+                .map(|directory| directory.as_ref().to_owned()),
+            Some(Path::new("dir1").to_owned())
+        );
+
+        let entry = tree
+            .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
+            .unwrap();
+        assert_eq!(
+            entry
+                .work_directory(tree)
+                .map(|directory| directory.as_ref().to_owned()),
+            Some(Path::new("dir1/deps/dep1").to_owned())
+        );
+
+        let entries = tree.files(false, 0);
+
+        let paths_with_repos = tree
+            .entries_with_repositories(entries)
+            .map(|(entry, repo)| {
+                (
+                    entry.path.as_ref(),
+                    repo.and_then(|repo| {
+                        repo.work_directory(&tree)
+                            .map(|work_directory| work_directory.0.to_path_buf())
+                    }),
+                )
+            })
+            .collect::<Vec<_>>();
+
+        assert_eq!(
+            paths_with_repos,
+            &[
+                (Path::new("c.txt"), None),
+                (
+                    Path::new("dir1/deps/dep1/src/a.txt"),
+                    Some(Path::new("dir1/deps/dep1").into())
+                ),
+                (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
+            ]
+        );
+    });
+
+    let repo_update_events = Arc::new(Mutex::new(vec![]));
+    tree.update(cx, |_, cx| {
+        let repo_update_events = repo_update_events.clone();
+        cx.subscribe(&tree, move |_, _, event, _| {
+            if let Event::UpdatedGitRepositories(update) = event {
+                repo_update_events.lock().push(update.clone());
+            }
+        })
+        .detach();
+    });
+
+    std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
+    tree.flush_fs_events(cx).await;
+
+    assert_eq!(
+        repo_update_events.lock()[0]
+            .iter()
+            .map(|e| e.0.clone())
+            .collect::<Vec<Arc<Path>>>(),
+        vec![Path::new("dir1").into()]
+    );
+
+    std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
+    tree.flush_fs_events(cx).await;
+
+    tree.read_with(cx, |tree, _cx| {
+        let tree = tree.as_local().unwrap();
+
+        assert!(tree
+            .repository_for_path("dir1/src/b.txt".as_ref())
+            .is_none());
+    });
+}
+
+#[gpui::test]
+async fn test_git_status(cx: &mut TestAppContext) {
+    init_test(cx);
+    cx.executor().allow_parking();
+    const IGNORE_RULE: &'static str = "**/target";
+
+    let root = temp_tree(json!({
+        "project": {
+            "a.txt": "a",
+            "b.txt": "bb",
+            "c": {
+                "d": {
+                    "e.txt": "eee"
+                }
+            },
+            "f.txt": "ffff",
+            "target": {
+                "build_file": "???"
+            },
+            ".gitignore": IGNORE_RULE
+        },
+
+    }));
+
+    const A_TXT: &'static str = "a.txt";
+    const B_TXT: &'static str = "b.txt";
+    const E_TXT: &'static str = "c/d/e.txt";
+    const F_TXT: &'static str = "f.txt";
+    const DOTGITIGNORE: &'static str = ".gitignore";
+    const BUILD_FILE: &'static str = "target/build_file";
+    let project_path = Path::new("project");
+
+    // Set up git repository before creating the worktree.
+    let work_dir = root.path().join("project");
+    let mut repo = git_init(work_dir.as_path());
+    repo.add_ignore_rule(IGNORE_RULE).unwrap();
+    git_add(A_TXT, &repo);
+    git_add(E_TXT, &repo);
+    git_add(DOTGITIGNORE, &repo);
+    git_commit("Initial commit", &repo);
+
+    let tree = Worktree::local(
+        build_client(cx),
+        root.path(),
+        true,
+        Arc::new(RealFs),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    tree.flush_fs_events(cx).await;
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+    cx.executor().run_until_parked();
+
+    // Check that the right git state is observed on startup
+    tree.read_with(cx, |tree, _cx| {
+        let snapshot = tree.snapshot();
+        assert_eq!(snapshot.repositories().count(), 1);
+        let (dir, _) = snapshot.repositories().next().unwrap();
+        assert_eq!(dir.as_ref(), Path::new("project"));
+
+        assert_eq!(
+            snapshot.status_for_file(project_path.join(B_TXT)),
+            Some(GitFileStatus::Added)
+        );
+        assert_eq!(
+            snapshot.status_for_file(project_path.join(F_TXT)),
+            Some(GitFileStatus::Added)
+        );
+    });
+
+    // Modify a file in the working copy.
+    std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
+    tree.flush_fs_events(cx).await;
+    cx.executor().run_until_parked();
+
+    // The worktree detects that the file's git status has changed.
+    tree.read_with(cx, |tree, _cx| {
+        let snapshot = tree.snapshot();
+        assert_eq!(
+            snapshot.status_for_file(project_path.join(A_TXT)),
+            Some(GitFileStatus::Modified)
+        );
+    });
+
+    // Create a commit in the git repository.
+    git_add(A_TXT, &repo);
+    git_add(B_TXT, &repo);
+    git_commit("Committing modified and added", &repo);
+    tree.flush_fs_events(cx).await;
+    cx.executor().run_until_parked();
+
+    // The worktree detects that the files' git status have changed.
+    tree.read_with(cx, |tree, _cx| {
+        let snapshot = tree.snapshot();
+        assert_eq!(
+            snapshot.status_for_file(project_path.join(F_TXT)),
+            Some(GitFileStatus::Added)
+        );
+        assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
+        assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
+    });
+
+    // Modify files in the working copy and perform git operations on other files.
+    git_reset(0, &repo);
+    git_remove_index(Path::new(B_TXT), &repo);
+    git_stash(&mut repo);
+    std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
+    std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
+    tree.flush_fs_events(cx).await;
+    cx.executor().run_until_parked();
+
+    // Check that more complex repo changes are tracked
+    tree.read_with(cx, |tree, _cx| {
+        let snapshot = tree.snapshot();
+
+        assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
+        assert_eq!(
+            snapshot.status_for_file(project_path.join(B_TXT)),
+            Some(GitFileStatus::Added)
+        );
+        assert_eq!(
+            snapshot.status_for_file(project_path.join(E_TXT)),
+            Some(GitFileStatus::Modified)
+        );
+    });
+
+    std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
+    std::fs::remove_dir_all(work_dir.join("c")).unwrap();
+    std::fs::write(
+        work_dir.join(DOTGITIGNORE),
+        [IGNORE_RULE, "f.txt"].join("\n"),
+    )
+    .unwrap();
+
+    git_add(Path::new(DOTGITIGNORE), &repo);
+    git_commit("Committing modified git ignore", &repo);
+
+    tree.flush_fs_events(cx).await;
+    cx.executor().run_until_parked();
+
+    let mut renamed_dir_name = "first_directory/second_directory";
+    const RENAMED_FILE: &'static str = "rf.txt";
+
+    std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
+    std::fs::write(
+        work_dir.join(renamed_dir_name).join(RENAMED_FILE),
+        "new-contents",
+    )
+    .unwrap();
+
+    tree.flush_fs_events(cx).await;
+    cx.executor().run_until_parked();
+
+    tree.read_with(cx, |tree, _cx| {
+        let snapshot = tree.snapshot();
+        assert_eq!(
+            snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
+            Some(GitFileStatus::Added)
+        );
+    });
+
+    renamed_dir_name = "new_first_directory/second_directory";
+
+    std::fs::rename(
+        work_dir.join("first_directory"),
+        work_dir.join("new_first_directory"),
+    )
+    .unwrap();
+
+    tree.flush_fs_events(cx).await;
+    cx.executor().run_until_parked();
+
+    tree.read_with(cx, |tree, _cx| {
+        let snapshot = tree.snapshot();
+
+        assert_eq!(
+            snapshot.status_for_file(
+                project_path
+                    .join(Path::new(renamed_dir_name))
+                    .join(RENAMED_FILE)
+            ),
+            Some(GitFileStatus::Added)
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
+    init_test(cx);
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+            ".git": {},
+            "a": {
+                "b": {
+                    "c1.txt": "",
+                    "c2.txt": "",
+                },
+                "d": {
+                    "e1.txt": "",
+                    "e2.txt": "",
+                    "e3.txt": "",
+                }
+            },
+            "f": {
+                "no-status.txt": ""
+            },
+            "g": {
+                "h1.txt": "",
+                "h2.txt": ""
+            },
+
+        }),
+    )
+    .await;
+
+    fs.set_status_for_repo_via_git_operation(
+        &Path::new("/root/.git"),
+        &[
+            (Path::new("a/b/c1.txt"), GitFileStatus::Added),
+            (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
+            (Path::new("g/h2.txt"), GitFileStatus::Conflict),
+        ],
+    );
+
+    let tree = Worktree::local(
+        build_client(cx),
+        Path::new("/root"),
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    cx.executor().run_until_parked();
+    let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
+
+    check_propagated_statuses(
+        &snapshot,
+        &[
+            (Path::new(""), Some(GitFileStatus::Conflict)),
+            (Path::new("a"), Some(GitFileStatus::Modified)),
+            (Path::new("a/b"), Some(GitFileStatus::Added)),
+            (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
+            (Path::new("a/b/c2.txt"), None),
+            (Path::new("a/d"), Some(GitFileStatus::Modified)),
+            (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
+            (Path::new("f"), None),
+            (Path::new("f/no-status.txt"), None),
+            (Path::new("g"), Some(GitFileStatus::Conflict)),
+            (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
+        ],
+    );
+
+    check_propagated_statuses(
+        &snapshot,
+        &[
+            (Path::new("a/b"), Some(GitFileStatus::Added)),
+            (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
+            (Path::new("a/b/c2.txt"), None),
+            (Path::new("a/d"), Some(GitFileStatus::Modified)),
+            (Path::new("a/d/e1.txt"), None),
+            (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
+            (Path::new("f"), None),
+            (Path::new("f/no-status.txt"), None),
+            (Path::new("g"), Some(GitFileStatus::Conflict)),
+        ],
+    );
+
+    check_propagated_statuses(
+        &snapshot,
+        &[
+            (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
+            (Path::new("a/b/c2.txt"), None),
+            (Path::new("a/d/e1.txt"), None),
+            (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
+            (Path::new("f/no-status.txt"), None),
+        ],
+    );
+
+    #[track_caller]
+    fn check_propagated_statuses(
+        snapshot: &Snapshot,
+        expected_statuses: &[(&Path, Option<GitFileStatus>)],
+    ) {
+        let mut entries = expected_statuses
+            .iter()
+            .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
+            .collect::<Vec<_>>();
+        snapshot.propagate_git_statuses(&mut entries);
+        assert_eq!(
+            entries
+                .iter()
+                .map(|e| (e.path.as_ref(), e.git_status))
+                .collect::<Vec<_>>(),
+            expected_statuses
+        );
+    }
+}
+
+fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
+    let http_client = FakeHttpClient::with_404_response();
+    cx.read(|cx| Client::new(http_client, cx))
+}
+
+#[track_caller]
+fn git_init(path: &Path) -> git2::Repository {
+    git2::Repository::init(path).expect("Failed to initialize git repository")
+}
+
+#[track_caller]
+fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
+    let path = path.as_ref();
+    let mut index = repo.index().expect("Failed to get index");
+    index.add_path(path).expect("Failed to add a.txt");
+    index.write().expect("Failed to write index");
+}
+
+#[track_caller]
+fn git_remove_index(path: &Path, repo: &git2::Repository) {
+    let mut index = repo.index().expect("Failed to get index");
+    index.remove_path(path).expect("Failed to add a.txt");
+    index.write().expect("Failed to write index");
+}
+
+#[track_caller]
+fn git_commit(msg: &'static str, repo: &git2::Repository) {
+    use git2::Signature;
+
+    let signature = Signature::now("test", "test@zed.dev").unwrap();
+    let oid = repo.index().unwrap().write_tree().unwrap();
+    let tree = repo.find_tree(oid).unwrap();
+    if let Some(head) = repo.head().ok() {
+        let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
+
+        let parent_commit = parent_obj.as_commit().unwrap();
+
+        repo.commit(
+            Some("HEAD"),
+            &signature,
+            &signature,
+            msg,
+            &tree,
+            &[parent_commit],
+        )
+        .expect("Failed to commit with parent");
+    } else {
+        repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
+            .expect("Failed to commit");
+    }
+}
+
+#[track_caller]
+fn git_stash(repo: &mut git2::Repository) {
+    use git2::Signature;
+
+    let signature = Signature::now("test", "test@zed.dev").unwrap();
+    repo.stash_save(&signature, "N/A", None)
+        .expect("Failed to stash");
+}
+
+#[track_caller]
+fn git_reset(offset: usize, repo: &git2::Repository) {
+    let head = repo.head().expect("Couldn't get repo head");
+    let object = head.peel(git2::ObjectType::Commit).unwrap();
+    let commit = object.as_commit().unwrap();
+    let new_head = commit
+        .parents()
+        .inspect(|parnet| {
+            parnet.message();
+        })
+        .skip(offset)
+        .next()
+        .expect("Not enough history");
+    repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
+        .expect("Could not reset");
+}
+
+#[allow(dead_code)]
+#[track_caller]
+fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
+    repo.statuses(None)
+        .unwrap()
+        .iter()
+        .map(|status| (status.path().unwrap().to_string(), status.status()))
+        .collect()
+}
+
+#[track_caller]
+fn check_worktree_entries(
+    tree: &Worktree,
+    expected_excluded_paths: &[&str],
+    expected_ignored_paths: &[&str],
+    expected_tracked_paths: &[&str],
+) {
+    for path in expected_excluded_paths {
+        let entry = tree.entry_for_path(path);
+        assert!(
+            entry.is_none(),
+            "expected path '{path}' to be excluded, but got entry: {entry:?}",
+        );
+    }
+    for path in expected_ignored_paths {
+        let entry = tree
+            .entry_for_path(path)
+            .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'"));
+        assert!(
+            entry.is_ignored,
+            "expected path '{path}' to be ignored, but got entry: {entry:?}",
+        );
+    }
+    for path in expected_tracked_paths {
+        let entry = tree
+            .entry_for_path(path)
+            .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
+        assert!(
+            !entry.is_ignored,
+            "expected path '{path}' to be tracked, but got entry: {entry:?}",
+        );
+    }
+}
+
+fn init_test(cx: &mut gpui::TestAppContext) {
+    cx.update(|cx| {
+        let settings_store = SettingsStore::test(cx);
+        cx.set_global(settings_store);
+        Project::init_settings(cx);
+    });
+}

crates/project_panel/src/project_panel.rs 🔗

@@ -1732,7 +1732,7 @@ mod tests {
     use super::*;
     use gpui::{AnyWindowHandle, TestAppContext, ViewHandle, WindowHandle};
     use pretty_assertions::assert_eq;
-    use project::FakeFs;
+    use project::{project_settings::ProjectSettings, FakeFs};
     use serde_json::json;
     use settings::SettingsStore;
     use std::{
@@ -1832,6 +1832,123 @@ mod tests {
         );
     }
 
+    #[gpui::test]
+    async fn test_exclusions_in_visible_list(cx: &mut gpui::TestAppContext) {
+        init_test(cx);
+        cx.update(|cx| {
+            cx.update_global::<SettingsStore, _, _>(|store, cx| {
+                store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                    project_settings.file_scan_exclusions =
+                        Some(vec!["**/.git".to_string(), "**/4/**".to_string()]);
+                });
+            });
+        });
+
+        let fs = FakeFs::new(cx.background());
+        fs.insert_tree(
+            "/root1",
+            json!({
+                ".dockerignore": "",
+                ".git": {
+                    "HEAD": "",
+                },
+                "a": {
+                    "0": { "q": "", "r": "", "s": "" },
+                    "1": { "t": "", "u": "" },
+                    "2": { "v": "", "w": "", "x": "", "y": "" },
+                },
+                "b": {
+                    "3": { "Q": "" },
+                    "4": { "R": "", "S": "", "T": "", "U": "" },
+                },
+                "C": {
+                    "5": {},
+                    "6": { "V": "", "W": "" },
+                    "7": { "X": "" },
+                    "8": { "Y": {}, "Z": "" }
+                }
+            }),
+        )
+        .await;
+        fs.insert_tree(
+            "/root2",
+            json!({
+                "d": {
+                    "4": ""
+                },
+                "e": {}
+            }),
+        )
+        .await;
+
+        let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await;
+        let workspace = cx
+            .add_window(|cx| Workspace::test_new(project.clone(), cx))
+            .root(cx);
+        let panel = workspace.update(cx, |workspace, cx| ProjectPanel::new(workspace, cx));
+        assert_eq!(
+            visible_entries_as_strings(&panel, 0..50, cx),
+            &[
+                "v root1",
+                "    > a",
+                "    > b",
+                "    > C",
+                "      .dockerignore",
+                "v root2",
+                "    > d",
+                "    > e",
+            ]
+        );
+
+        toggle_expand_dir(&panel, "root1/b", cx);
+        assert_eq!(
+            visible_entries_as_strings(&panel, 0..50, cx),
+            &[
+                "v root1",
+                "    > a",
+                "    v b  <== selected",
+                "        > 3",
+                "    > C",
+                "      .dockerignore",
+                "v root2",
+                "    > d",
+                "    > e",
+            ]
+        );
+
+        toggle_expand_dir(&panel, "root2/d", cx);
+        assert_eq!(
+            visible_entries_as_strings(&panel, 0..50, cx),
+            &[
+                "v root1",
+                "    > a",
+                "    v b",
+                "        > 3",
+                "    > C",
+                "      .dockerignore",
+                "v root2",
+                "    v d  <== selected",
+                "    > e",
+            ]
+        );
+
+        toggle_expand_dir(&panel, "root2/e", cx);
+        assert_eq!(
+            visible_entries_as_strings(&panel, 0..50, cx),
+            &[
+                "v root1",
+                "    > a",
+                "    v b",
+                "        > 3",
+                "    > C",
+                "      .dockerignore",
+                "v root2",
+                "    v d",
+                "    v e  <== selected",
+            ]
+        );
+    }
+
     #[gpui::test(iterations = 30)]
     async fn test_editing_files(cx: &mut gpui::TestAppContext) {
         init_test(cx);
@@ -2929,6 +3046,12 @@ mod tests {
             workspace::init_settings(cx);
             client::init_settings(cx);
             Project::init_settings(cx);
+
+            cx.update_global::<SettingsStore, _, _>(|store, cx| {
+                store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                    project_settings.file_scan_exclusions = Some(Vec::new());
+                });
+            });
         });
     }
 

crates/project_panel2/src/project_panel.rs 🔗

@@ -1,6 +1,6 @@
 pub mod file_associations;
 mod project_panel_settings;
-use settings::Settings;
+use settings::{Settings, SettingsStore};
 
 use db::kvp::KEY_VALUE_STORE;
 use editor::{scroll::autoscroll::Autoscroll, Cancel, Editor};
@@ -34,7 +34,7 @@ use ui::{h_stack, v_stack, IconElement, Label};
 use unicase::UniCase;
 use util::{maybe, ResultExt, TryFutureExt};
 use workspace::{
-    dock::{DockPosition, PanelEvent},
+    dock::{DockPosition, Panel, PanelEvent},
     Workspace,
 };
 
@@ -148,7 +148,6 @@ pub enum Event {
     SplitEntry {
         entry_id: ProjectEntryId,
     },
-    DockPositionChanged,
     Focus,
     NewSearchInDirectory {
         dir_entry: Entry,
@@ -200,10 +199,11 @@ impl ProjectPanel {
             let filename_editor = cx.build_view(|cx| Editor::single_line(cx));
 
             cx.subscribe(&filename_editor, |this, _, event, cx| match event {
-                editor::Event::BufferEdited | editor::Event::SelectionsChanged { .. } => {
+                editor::EditorEvent::BufferEdited
+                | editor::EditorEvent::SelectionsChanged { .. } => {
                     this.autoscroll(cx);
                 }
-                editor::Event::Blurred => {
+                editor::EditorEvent::Blurred => {
                     if this
                         .edit_state
                         .as_ref()
@@ -244,16 +244,17 @@ impl ProjectPanel {
             this.update_visible_entries(None, cx);
 
             // Update the dock position when the setting changes.
-            // todo!()
-            // let mut old_dock_position = this.position(cx);
-            // cx.observe_global::<SettingsStore, _>(move |this, cx| {
-            //     let new_dock_position = this.position(cx);
-            //     if new_dock_position != old_dock_position {
-            //         old_dock_position = new_dock_position;
-            //         cx.emit(Event::DockPositionChanged);
-            //     }
-            // })
-            // .detach();
+            let mut old_dock_position = this.position(cx);
+            ProjectPanelSettings::register(cx);
+            cx.observe_global::<SettingsStore>(move |this, cx| {
+                dbg!("OLA!");
+                let new_dock_position = this.position(cx);
+                if new_dock_position != old_dock_position {
+                    old_dock_position = new_dock_position;
+                    cx.emit(PanelEvent::ChangePosition);
+                }
+            })
+            .detach();
 
             this
         });
@@ -1485,7 +1486,7 @@ impl EventEmitter<Event> for ProjectPanel {}
 
 impl EventEmitter<PanelEvent> for ProjectPanel {}
 
-impl workspace::dock::Panel for ProjectPanel {
+impl Panel for ProjectPanel {
     fn position(&self, cx: &WindowContext) -> DockPosition {
         match ProjectPanelSettings::get_global(cx).dock {
             ProjectPanelDockPosition::Left => DockPosition::Left,
@@ -1571,7 +1572,7 @@ mod tests {
     use super::*;
     use gpui::{TestAppContext, View, VisualTestContext, WindowHandle};
     use pretty_assertions::assert_eq;
-    use project::FakeFs;
+    use project::{project_settings::ProjectSettings, FakeFs};
     use serde_json::json;
     use settings::SettingsStore;
     use std::{
@@ -1672,6 +1673,124 @@ mod tests {
         );
     }
 
+    #[gpui::test]
+    async fn test_exclusions_in_visible_list(cx: &mut gpui::TestAppContext) {
+        init_test(cx);
+        cx.update(|cx| {
+            cx.update_global::<SettingsStore, _>(|store, cx| {
+                store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                    project_settings.file_scan_exclusions =
+                        Some(vec!["**/.git".to_string(), "**/4/**".to_string()]);
+                });
+            });
+        });
+
+        let fs = FakeFs::new(cx.background_executor.clone());
+        fs.insert_tree(
+            "/root1",
+            json!({
+                ".dockerignore": "",
+                ".git": {
+                    "HEAD": "",
+                },
+                "a": {
+                    "0": { "q": "", "r": "", "s": "" },
+                    "1": { "t": "", "u": "" },
+                    "2": { "v": "", "w": "", "x": "", "y": "" },
+                },
+                "b": {
+                    "3": { "Q": "" },
+                    "4": { "R": "", "S": "", "T": "", "U": "" },
+                },
+                "C": {
+                    "5": {},
+                    "6": { "V": "", "W": "" },
+                    "7": { "X": "" },
+                    "8": { "Y": {}, "Z": "" }
+                }
+            }),
+        )
+        .await;
+        fs.insert_tree(
+            "/root2",
+            json!({
+                "d": {
+                    "4": ""
+                },
+                "e": {}
+            }),
+        )
+        .await;
+
+        let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await;
+        let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
+        let cx = &mut VisualTestContext::from_window(*workspace, cx);
+        let panel = workspace
+            .update(cx, |workspace, cx| ProjectPanel::new(workspace, cx))
+            .unwrap();
+        assert_eq!(
+            visible_entries_as_strings(&panel, 0..50, cx),
+            &[
+                "v root1",
+                "    > a",
+                "    > b",
+                "    > C",
+                "      .dockerignore",
+                "v root2",
+                "    > d",
+                "    > e",
+            ]
+        );
+
+        toggle_expand_dir(&panel, "root1/b", cx);
+        assert_eq!(
+            visible_entries_as_strings(&panel, 0..50, cx),
+            &[
+                "v root1",
+                "    > a",
+                "    v b  <== selected",
+                "        > 3",
+                "    > C",
+                "      .dockerignore",
+                "v root2",
+                "    > d",
+                "    > e",
+            ]
+        );
+
+        toggle_expand_dir(&panel, "root2/d", cx);
+        assert_eq!(
+            visible_entries_as_strings(&panel, 0..50, cx),
+            &[
+                "v root1",
+                "    > a",
+                "    v b",
+                "        > 3",
+                "    > C",
+                "      .dockerignore",
+                "v root2",
+                "    v d  <== selected",
+                "    > e",
+            ]
+        );
+
+        toggle_expand_dir(&panel, "root2/e", cx);
+        assert_eq!(
+            visible_entries_as_strings(&panel, 0..50, cx),
+            &[
+                "v root1",
+                "    > a",
+                "    v b",
+                "        > 3",
+                "    > C",
+                "      .dockerignore",
+                "v root2",
+                "    v d",
+                "    v e  <== selected",
+            ]
+        );
+    }
+
     #[gpui::test(iterations = 30)]
     async fn test_editing_files(cx: &mut gpui::TestAppContext) {
         init_test(cx);
@@ -2792,6 +2911,12 @@ mod tests {
             workspace::init_settings(cx);
             client::init_settings(cx);
             Project::init_settings(cx);
+
+            cx.update_global::<SettingsStore, _>(|store, cx| {
+                store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                    project_settings.file_scan_exclusions = Some(Vec::new());
+                });
+            });
         });
     }
 

crates/rpc/proto/zed.proto 🔗

@@ -884,6 +884,7 @@ message SearchProject {
     bool case_sensitive = 5;
     string files_to_include = 6;
     string files_to_exclude = 7;
+    bool include_ignored = 8;
 }
 
 message SearchProjectResponse {

crates/rpc2/proto/zed.proto 🔗

@@ -884,6 +884,7 @@ message SearchProject {
     bool case_sensitive = 5;
     string files_to_include = 6;
     string files_to_exclude = 7;
+    bool include_ignored = 8;
 }
 
 message SearchProjectResponse {

crates/search/src/buffer_search.rs 🔗

@@ -805,6 +805,7 @@ impl BufferSearchBar {
                         query,
                         self.search_options.contains(SearchOptions::WHOLE_WORD),
                         self.search_options.contains(SearchOptions::CASE_SENSITIVE),
+                        false,
                         Vec::new(),
                         Vec::new(),
                     ) {
@@ -820,6 +821,7 @@ impl BufferSearchBar {
                         query,
                         self.search_options.contains(SearchOptions::WHOLE_WORD),
                         self.search_options.contains(SearchOptions::CASE_SENSITIVE),
+                        false,
                         Vec::new(),
                         Vec::new(),
                     ) {

crates/search/src/project_search.rs 🔗

@@ -4,7 +4,7 @@ use crate::{
     search_bar::{render_nav_button, render_option_button_icon, render_search_mode_button},
     ActivateRegexMode, ActivateSemanticMode, ActivateTextMode, CycleMode, NextHistoryQuery,
     PreviousHistoryQuery, ReplaceAll, ReplaceNext, SearchOptions, SelectNextMatch, SelectPrevMatch,
-    ToggleCaseSensitive, ToggleReplace, ToggleWholeWord,
+    ToggleCaseSensitive, ToggleIncludeIgnored, ToggleReplace, ToggleWholeWord,
 };
 use anyhow::{Context, Result};
 use collections::HashMap;
@@ -85,6 +85,7 @@ pub fn init(cx: &mut AppContext) {
     cx.capture_action(ProjectSearchView::replace_next);
     add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx);
     add_toggle_option_action::<ToggleWholeWord>(SearchOptions::WHOLE_WORD, cx);
+    add_toggle_option_action::<ToggleIncludeIgnored>(SearchOptions::INCLUDE_IGNORED, cx);
     add_toggle_filters_action::<ToggleFilters>(cx);
 }
 
@@ -1192,6 +1193,7 @@ impl ProjectSearchView {
                     text,
                     self.search_options.contains(SearchOptions::WHOLE_WORD),
                     self.search_options.contains(SearchOptions::CASE_SENSITIVE),
+                    self.search_options.contains(SearchOptions::INCLUDE_IGNORED),
                     included_files,
                     excluded_files,
                 ) {
@@ -1210,6 +1212,7 @@ impl ProjectSearchView {
                 text,
                 self.search_options.contains(SearchOptions::WHOLE_WORD),
                 self.search_options.contains(SearchOptions::CASE_SENSITIVE),
+                self.search_options.contains(SearchOptions::INCLUDE_IGNORED),
                 included_files,
                 excluded_files,
             ) {
@@ -1764,6 +1767,17 @@ impl View for ProjectSearchBar {
                 render_option_button_icon("icons/word_search.svg", SearchOptions::WHOLE_WORD, cx)
             });
 
+            let mut include_ignored = is_semantic_disabled.then(|| {
+                render_option_button_icon(
+                    // TODO proper icon
+                    "icons/case_insensitive.svg",
+                    SearchOptions::INCLUDE_IGNORED,
+                    cx,
+                )
+            });
+            // TODO not implemented yet
+            let _ = include_ignored.take();
+
             let search_button_for_mode = |mode, side, cx: &mut ViewContext<ProjectSearchBar>| {
                 let is_active = if let Some(search) = self.active_project_search.as_ref() {
                     let search = search.read(cx);
@@ -1879,7 +1893,15 @@ impl View for ProjectSearchBar {
                 .with_children(search.filters_enabled.then(|| {
                     Flex::row()
                         .with_child(
-                            ChildView::new(&search.included_files_editor, cx)
+                            Flex::row()
+                                .with_child(
+                                    ChildView::new(&search.included_files_editor, cx)
+                                        .contained()
+                                        .constrained()
+                                        .with_height(theme.search.search_bar_row_height)
+                                        .flex(1., true),
+                                )
+                                .with_children(include_ignored)
                                 .contained()
                                 .with_style(include_container_style)
                                 .constrained()

crates/search/src/search.rs 🔗

@@ -29,6 +29,7 @@ actions!(
         CycleMode,
         ToggleWholeWord,
         ToggleCaseSensitive,
+        ToggleIncludeIgnored,
         ToggleReplace,
         SelectNextMatch,
         SelectPrevMatch,
@@ -49,31 +50,35 @@ bitflags! {
         const NONE = 0b000;
         const WHOLE_WORD = 0b001;
         const CASE_SENSITIVE = 0b010;
+        const INCLUDE_IGNORED = 0b100;
     }
 }
 
 impl SearchOptions {
     pub fn label(&self) -> &'static str {
         match *self {
-            SearchOptions::WHOLE_WORD => "Match Whole Word",
-            SearchOptions::CASE_SENSITIVE => "Match Case",
-            _ => panic!("{:?} is not a named SearchOption", self),
+            Self::WHOLE_WORD => "Match Whole Word",
+            Self::CASE_SENSITIVE => "Match Case",
+            Self::INCLUDE_IGNORED => "Include Ignored",
+            _ => panic!("{self:?} is not a named SearchOption"),
         }
     }
 
     pub fn icon(&self) -> &'static str {
         match *self {
-            SearchOptions::WHOLE_WORD => "icons/word_search.svg",
-            SearchOptions::CASE_SENSITIVE => "icons/case_insensitive.svg",
-            _ => panic!("{:?} is not a named SearchOption", self),
+            Self::WHOLE_WORD => "icons/word_search.svg",
+            Self::CASE_SENSITIVE => "icons/case_insensitive.svg",
+            Self::INCLUDE_IGNORED => "icons/case_insensitive.svg",
+            _ => panic!("{self:?} is not a named SearchOption"),
         }
     }
 
     pub fn to_toggle_action(&self) -> Box<dyn Action> {
         match *self {
-            SearchOptions::WHOLE_WORD => Box::new(ToggleWholeWord),
-            SearchOptions::CASE_SENSITIVE => Box::new(ToggleCaseSensitive),
-            _ => panic!("{:?} is not a named SearchOption", self),
+            Self::WHOLE_WORD => Box::new(ToggleWholeWord),
+            Self::CASE_SENSITIVE => Box::new(ToggleCaseSensitive),
+            Self::INCLUDE_IGNORED => Box::new(ToggleIncludeIgnored),
+            _ => panic!("{self:?} is not a named SearchOption"),
         }
     }
 
@@ -85,6 +90,7 @@ impl SearchOptions {
         let mut options = SearchOptions::NONE;
         options.set(SearchOptions::WHOLE_WORD, query.whole_word());
         options.set(SearchOptions::CASE_SENSITIVE, query.case_sensitive());
+        options.set(SearchOptions::INCLUDE_IGNORED, query.include_ignored());
         options
     }
 

crates/settings2/src/settings_file.rs 🔗

@@ -77,6 +77,7 @@ pub fn handle_settings_file_changes(
     });
     cx.spawn(move |mut cx| async move {
         while let Some(user_settings_content) = user_settings_file_rx.next().await {
+            eprintln!("settings file changed");
             let result = cx.update_global(|store: &mut SettingsStore, cx| {
                 store
                     .set_user_settings(&user_settings_content, cx)

crates/storybook2/src/stories/text.rs 🔗

@@ -1,4 +1,7 @@
-use gpui::{div, white, Div, ParentElement, Render, Styled, View, VisualContext, WindowContext};
+use gpui::{
+    blue, div, red, white, Div, ParentElement, Render, Styled, View, VisualContext, WindowContext,
+};
+use ui::v_stack;
 
 pub struct TextStory;
 
@@ -12,10 +15,46 @@ impl Render<Self> for TextStory {
     type Element = Div<Self>;
 
     fn render(&mut self, cx: &mut gpui::ViewContext<Self>) -> Self::Element {
-        div().size_full().bg(white()).child(concat!(
-            "The quick brown fox jumps over the lazy dog. ",
-            "Meanwhile, the lazy dog decided it was time for a change. ",
-            "He started daily workout routines, ate healthier and became the fastest dog in town.",
-        ))
+        v_stack()
+            .bg(blue())
+            .child(
+                div()
+                    .flex()
+                    .child(div().max_w_96().bg(white()).child(concat!(
+        "max-width: 96. The quick brown fox jumps over the lazy dog. ",
+        "Meanwhile, the lazy dog decided it was time for a change. ",
+        "He started daily workout routines, ate healthier and became the fastest dog in town.",
+    ))),
+            )
+            .child(div().h_5())
+            .child(div().flex().flex_col().w_96().bg(white()).child(concat!(
+        "flex-col. width: 96; The quick brown fox jumps over the lazy dog. ",
+        "Meanwhile, the lazy dog decided it was time for a change. ",
+        "He started daily workout routines, ate healthier and became the fastest dog in town.",
+    )))
+            .child(div().h_5())
+            .child(
+                div()
+                    .flex()
+                    .child(div().min_w_96().bg(white()).child(concat!(
+    "min-width: 96. The quick brown fox jumps over the lazy dog. ",
+    "Meanwhile, the lazy dog decided it was time for a change. ",
+    "He started daily workout routines, ate healthier and became the fastest dog in town.",
+))))
+            .child(div().h_5())
+            .child(div().flex().w_96().bg(white()).child(div().overflow_hidden().child(concat!(
+        "flex-row. width 96. overflow-hidden. The quick brown fox jumps over the lazy dog. ",
+        "Meanwhile, the lazy dog decided it was time for a change. ",
+        "He started daily workout routines, ate healthier and became the fastest dog in town.",
+    ))))
+            // NOTE: When rendering text in a horizonal flex container,
+            // Taffy will not pass width constraints down from the parent.
+            // To fix this, render text in a praent with overflow: hidden, which
+                    .child(div().h_5())
+                    .child(div().flex().w_96().bg(red()).child(concat!(
+                "flex-row. width 96. The quick brown fox jumps over the lazy dog. ",
+                "Meanwhile, the lazy dog decided it was time for a change. ",
+                "He started daily workout routines, ate healthier and became the fastest dog in town.",
+            )))
     }
 }

crates/storybook3/src/storybook3.rs 🔗

@@ -1,9 +1,9 @@
 use anyhow::Result;
-use gpui::AssetSource;
 use gpui::{
     div, px, size, AnyView, Bounds, Div, Render, ViewContext, VisualContext, WindowBounds,
     WindowOptions,
 };
+use gpui::{white, AssetSource};
 use settings::{default_settings, Settings, SettingsStore};
 use std::borrow::Cow;
 use std::sync::Arc;
@@ -56,6 +56,7 @@ fn main() {
 }
 
 struct TestView {
+    #[allow(unused)]
     story: AnyView,
 }
 
@@ -65,9 +66,22 @@ impl Render<Self> for TestView {
     fn render(&mut self, _cx: &mut ViewContext<Self>) -> Self::Element {
         div()
             .flex()
+            .bg(gpui::blue())
             .flex_col()
             .size_full()
             .font("Helvetica")
-            .child(self.story.clone())
+            .child(div().h_5())
+            .child(
+                div()
+                    .flex()
+                    .w_96()
+                    .bg(white())
+                    .relative()
+                    .child(div().child(concat!(
+            "The quick brown fox jumps over the lazy dog. ",
+            "Meanwhile, the lazy dog decided it was time for a change. ",
+            "He started daily workout routines, ate healthier and became the fastest dog in town.",
+        ))),
+            )
     }
 }

crates/terminal_view2/src/terminal_view.rs 🔗

@@ -31,7 +31,7 @@ use workspace::{
     notifications::NotifyResultExt,
     register_deserializable_item,
     searchable::{SearchEvent, SearchOptions, SearchableItem},
-    ui::{ContextMenu, Label},
+    ui::{ContextMenu, Icon, IconElement, Label, ListEntry},
     CloseActiveItem, NewCenterTerminal, Pane, ToolbarItemLocation, Workspace, WorkspaceId,
 };
 
@@ -84,7 +84,7 @@ pub struct TerminalView {
     has_new_content: bool,
     //Currently using iTerm bell, show bell emoji in tab until input is received
     has_bell: bool,
-    context_menu: Option<View<ContextMenu>>,
+    context_menu: Option<View<ContextMenu<Self>>>,
     blink_state: bool,
     blinking_on: bool,
     blinking_paused: bool,
@@ -299,11 +299,10 @@ impl TerminalView {
         position: gpui::Point<Pixels>,
         cx: &mut ViewContext<Self>,
     ) {
-        self.context_menu = Some(cx.build_view(|cx| {
-            ContextMenu::new(cx)
-                .entry(Label::new("Clear"), Box::new(Clear))
-                .entry(
-                    Label::new("Close"),
+        self.context_menu = Some(ContextMenu::build(cx, |menu, _| {
+            menu.action(ListEntry::new(Label::new("Clear")), Box::new(Clear))
+                .action(
+                    ListEntry::new(Label::new("Close")),
                     Box::new(CloseActiveItem { save_intent: None }),
                 )
         }));
@@ -755,8 +754,8 @@ impl Item for TerminalView {
         let title = self.terminal().read(cx).title();
 
         div()
-            .child(img().uri("icons/terminal.svg").bg(red()))
-            .child(SharedString::from(title))
+            .child(IconElement::new(Icon::Terminal))
+            .child(title)
             .into_any()
     }
 

crates/theme2/src/styles/players.rs 🔗

@@ -1,6 +1,6 @@
 use gpui::Hsla;
 
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy, Default)]
 pub struct PlayerColor {
     pub cursor: Hsla,
     pub background: Hsla,

crates/theme2/src/theme2.rs 🔗

@@ -130,7 +130,7 @@ impl Theme {
     }
 }
 
-#[derive(Clone, Debug)]
+#[derive(Clone, Debug, Default)]
 pub struct DiagnosticStyle {
     pub error: Hsla,
     pub warning: Hsla,

crates/ui2/Cargo.toml 🔗

@@ -18,5 +18,5 @@ theme2 = { path = "../theme2" }
 rand = "0.8"
 
 [features]
-default = ["stories"]
+default = []
 stories = ["dep:itertools"]

crates/ui2/src/components/context_menu.rs 🔗

@@ -4,58 +4,101 @@ use std::rc::Rc;
 use crate::prelude::*;
 use crate::{v_stack, Label, List, ListEntry, ListItem, ListSeparator, ListSubHeader};
 use gpui::{
+<<<<<<< HEAD
     overlay, px, Action, AnchorCorner, AnyElement, Bounds, Dismiss, DispatchPhase, Div,
     FocusHandle, LayoutId, ManagedView, MouseButton, MouseDownEvent, Pixels, Point, Render,
     RenderOnce, View,
+=======
+    overlay, px, Action, AnchorCorner, AnyElement, AppContext, Bounds, DispatchPhase, Div,
+    EventEmitter, FocusHandle, FocusableView, LayoutId, ManagedView, Manager, MouseButton,
+    MouseDownEvent, Pixels, Point, Render, View, VisualContext, WeakView,
+>>>>>>> main
 };
 
-pub struct ContextMenu {
-    items: Vec<ListItem>,
+pub enum ContextMenuItem<V> {
+    Separator(ListSeparator),
+    Header(ListSubHeader),
+    Entry(
+        ListEntry<ContextMenu<V>>,
+        Rc<dyn Fn(&mut V, &mut ViewContext<V>)>,
+    ),
+}
+
+pub struct ContextMenu<V> {
+    items: Vec<ContextMenuItem<V>>,
     focus_handle: FocusHandle,
+    handle: WeakView<V>,
 }
 
-impl ManagedView for ContextMenu {
-    fn focus_handle(&self, cx: &gpui::AppContext) -> FocusHandle {
+impl<V: Render> FocusableView for ContextMenu<V> {
+    fn focus_handle(&self, _cx: &AppContext) -> FocusHandle {
         self.focus_handle.clone()
     }
 }
 
-impl ContextMenu {
-    pub fn new(cx: &mut WindowContext) -> Self {
-        Self {
-            items: Default::default(),
-            focus_handle: cx.focus_handle(),
-        }
+impl<V: Render> EventEmitter<Manager> for ContextMenu<V> {}
+
+impl<V: Render> ContextMenu<V> {
+    pub fn build(
+        cx: &mut ViewContext<V>,
+        f: impl FnOnce(Self, &mut ViewContext<Self>) -> Self,
+    ) -> View<Self> {
+        let handle = cx.view().downgrade();
+        cx.build_view(|cx| {
+            f(
+                Self {
+                    handle,
+                    items: Default::default(),
+                    focus_handle: cx.focus_handle(),
+                },
+                cx,
+            )
+        })
     }
 
     pub fn header(mut self, title: impl Into<SharedString>) -> Self {
-        self.items.push(ListItem::Header(ListSubHeader::new(title)));
+        self.items
+            .push(ContextMenuItem::Header(ListSubHeader::new(title)));
         self
     }
 
     pub fn separator(mut self) -> Self {
-        self.items.push(ListItem::Separator(ListSeparator));
+        self.items.push(ContextMenuItem::Separator(ListSeparator));
         self
     }
 
-    pub fn entry(mut self, label: Label, action: Box<dyn Action>) -> Self {
-        self.items.push(ListEntry::new(label).action(action).into());
+    pub fn entry(
+        mut self,
+        view: ListEntry<Self>,
+        on_click: impl Fn(&mut V, &mut ViewContext<V>) + 'static,
+    ) -> Self {
+        self.items
+            .push(ContextMenuItem::Entry(view, Rc::new(on_click)));
         self
     }
 
+    pub fn action(self, view: ListEntry<Self>, action: Box<dyn Action>) -> Self {
+        // todo: add the keybindings to the list entry
+        self.entry(view, move |_, cx| cx.dispatch_action(action.boxed_clone()))
+    }
+
     pub fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext<Self>) {
         // todo!()
-        cx.emit(Dismiss);
+        cx.emit(Manager::Dismiss);
     }
 
     pub fn cancel(&mut self, _: &menu::Cancel, cx: &mut ViewContext<Self>) {
-        cx.emit(Dismiss);
+        cx.emit(Manager::Dismiss);
     }
 }
 
+<<<<<<< HEAD
 impl Render<Self> for ContextMenu {
+=======
+impl<V: Render> Render for ContextMenu<V> {
+>>>>>>> main
     type Element = Div<Self>;
-    // todo!()
+
     fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element {
         div().elevation_2(cx).flex().flex_row().child(
             v_stack()
@@ -72,7 +115,25 @@ impl Render<Self> for ContextMenu {
                 // .bg(cx.theme().colors().elevated_surface_background)
                 // .border()
                 // .border_color(cx.theme().colors().border)
-                .child(List::new(self.items.clone())),
+                .child(List::new(
+                    self.items
+                        .iter()
+                        .map(|item| match item {
+                            ContextMenuItem::Separator(separator) => {
+                                ListItem::Separator(separator.clone())
+                            }
+                            ContextMenuItem::Header(header) => ListItem::Header(header.clone()),
+                            ContextMenuItem::Entry(entry, callback) => {
+                                let callback = callback.clone();
+                                let handle = self.handle.clone();
+                                ListItem::Entry(entry.clone().on_click(move |this, cx| {
+                                    handle.update(cx, |view, cx| callback(view, cx)).ok();
+                                    cx.emit(Manager::Dismiss);
+                                }))
+                            }
+                        })
+                        .collect(),
+                )),
         )
     }
 }
@@ -218,12 +279,13 @@ impl<V: 'static, M: ManagedView> Element<V> for MenuHandle<V, M> {
                 let new_menu = (builder)(view_state, cx);
                 let menu2 = menu.clone();
                 cx.subscribe(&new_menu, move |this, modal, e, cx| match e {
-                    &Dismiss => {
+                    &Manager::Dismiss => {
                         *menu2.borrow_mut() = None;
                         cx.notify();
                     }
                 })
                 .detach();
+                cx.focus_view(&new_menu);
                 *menu.borrow_mut() = Some(new_menu);
 
                 *position.borrow_mut() = if attach.is_some() && child_layout_id.is_some() {
@@ -258,16 +320,25 @@ pub use stories::*;
 mod stories {
     use super::*;
     use crate::story::Story;
-    use gpui::{actions, Div, Render, VisualContext};
-
-    actions!(PrintCurrentDate);
-
-    fn build_menu(cx: &mut WindowContext, header: impl Into<SharedString>) -> View<ContextMenu> {
-        cx.build_view(|cx| {
-            ContextMenu::new(cx).header(header).separator().entry(
-                Label::new("Print current time"),
-                PrintCurrentDate.boxed_clone(),
-            )
+    use gpui::{actions, Div, Render};
+
+    actions!(PrintCurrentDate, PrintBestFood);
+
+    fn build_menu<V: Render>(
+        cx: &mut ViewContext<V>,
+        header: impl Into<SharedString>,
+    ) -> View<ContextMenu<V>> {
+        let handle = cx.view().clone();
+        ContextMenu::build(cx, |menu, _| {
+            menu.header(header)
+                .separator()
+                .entry(ListEntry::new(Label::new("Print current time")), |v, cx| {
+                    println!("dispatching PrintCurrentTime action");
+                    cx.dispatch_action(PrintCurrentDate.boxed_clone())
+                })
+                .entry(ListEntry::new(Label::new("Print best food")), |v, cx| {
+                    cx.dispatch_action(PrintBestFood.boxed_clone())
+                })
         })
     }
 
@@ -279,10 +350,14 @@ mod stories {
         fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element {
             Story::container(cx)
                 .on_action(|_, _: &PrintCurrentDate, _| {
+                    println!("printing unix time!");
                     if let Ok(unix_time) = std::time::UNIX_EPOCH.elapsed() {
                         println!("Current Unix time is {:?}", unix_time.as_secs());
                     }
                 })
+                .on_action(|_, _: &PrintBestFood, _| {
+                    println!("burrito");
+                })
                 .flex()
                 .flex_row()
                 .justify_between()

crates/ui2/src/components/icon.rs 🔗

@@ -16,8 +16,12 @@ pub enum Icon {
     ArrowLeft,
     ArrowRight,
     ArrowUpRight,
+    AtSign,
     AudioOff,
     AudioOn,
+    Bell,
+    BellOff,
+    BellRing,
     Bolt,
     Check,
     ChevronDown,
@@ -26,12 +30,14 @@ pub enum Icon {
     ChevronUp,
     Close,
     Collab,
+    Copilot,
     Dash,
-    Exit,
+    Envelope,
     ExclamationTriangle,
+    Exit,
     File,
-    FileGeneric,
     FileDoc,
+    FileGeneric,
     FileGit,
     FileLock,
     FileRust,
@@ -44,6 +50,7 @@ pub enum Icon {
     InlayHint,
     MagicWand,
     MagnifyingGlass,
+    MailOpen,
     Maximize,
     Menu,
     MessageBubbles,
@@ -59,13 +66,6 @@ pub enum Icon {
     SplitMessage,
     Terminal,
     XCircle,
-    Copilot,
-    Envelope,
-    Bell,
-    BellOff,
-    BellRing,
-    MailOpen,
-    AtSign,
 }
 
 impl Icon {
@@ -75,8 +75,12 @@ impl Icon {
             Icon::ArrowLeft => "icons/arrow_left.svg",
             Icon::ArrowRight => "icons/arrow_right.svg",
             Icon::ArrowUpRight => "icons/arrow_up_right.svg",
+            Icon::AtSign => "icons/at-sign.svg",
             Icon::AudioOff => "icons/speaker-off.svg",
             Icon::AudioOn => "icons/speaker-loud.svg",
+            Icon::Bell => "icons/bell.svg",
+            Icon::BellOff => "icons/bell-off.svg",
+            Icon::BellRing => "icons/bell-ring.svg",
             Icon::Bolt => "icons/bolt.svg",
             Icon::Check => "icons/check.svg",
             Icon::ChevronDown => "icons/chevron_down.svg",
@@ -85,12 +89,14 @@ impl Icon {
             Icon::ChevronUp => "icons/chevron_up.svg",
             Icon::Close => "icons/x.svg",
             Icon::Collab => "icons/user_group_16.svg",
+            Icon::Copilot => "icons/copilot.svg",
             Icon::Dash => "icons/dash.svg",
-            Icon::Exit => "icons/exit.svg",
+            Icon::Envelope => "icons/feedback.svg",
             Icon::ExclamationTriangle => "icons/warning.svg",
+            Icon::Exit => "icons/exit.svg",
             Icon::File => "icons/file.svg",
-            Icon::FileGeneric => "icons/file_icons/file.svg",
             Icon::FileDoc => "icons/file_icons/book.svg",
+            Icon::FileGeneric => "icons/file_icons/file.svg",
             Icon::FileGit => "icons/file_icons/git.svg",
             Icon::FileLock => "icons/file_icons/lock.svg",
             Icon::FileRust => "icons/file_icons/rust.svg",
@@ -103,6 +109,7 @@ impl Icon {
             Icon::InlayHint => "icons/inlay_hint.svg",
             Icon::MagicWand => "icons/magic-wand.svg",
             Icon::MagnifyingGlass => "icons/magnifying_glass.svg",
+            Icon::MailOpen => "icons/mail-open.svg",
             Icon::Maximize => "icons/maximize.svg",
             Icon::Menu => "icons/menu.svg",
             Icon::MessageBubbles => "icons/conversations.svg",
@@ -118,13 +125,6 @@ impl Icon {
             Icon::SplitMessage => "icons/split_message.svg",
             Icon::Terminal => "icons/terminal.svg",
             Icon::XCircle => "icons/error.svg",
-            Icon::Copilot => "icons/copilot.svg",
-            Icon::Envelope => "icons/feedback.svg",
-            Icon::Bell => "icons/bell.svg",
-            Icon::BellOff => "icons/bell-off.svg",
-            Icon::BellRing => "icons/bell-ring.svg",
-            Icon::MailOpen => "icons/mail-open.svg",
-            Icon::AtSign => "icons/at-sign.svg",
         }
     }
 }

crates/ui2/src/components/keybinding.rs 🔗

@@ -82,16 +82,22 @@ pub enum ModifierKey {
     Shift,
 }
 
+actions!(NoAction);
+
+pub fn binding(key: &str) -> gpui::KeyBinding {
+    gpui::KeyBinding::new(key, NoAction {}, None)
+}
+
 #[cfg(feature = "stories")]
 pub use stories::*;
 
 #[cfg(feature = "stories")]
 mod stories {
     use super::*;
-    use crate::Story;
+    pub use crate::KeyBinding;
+    use crate::{binding, Story};
     use gpui::{actions, Div, Render};
     use itertools::Itertools;
-
     pub struct KeybindingStory;
 
     actions!(NoAction);
@@ -100,7 +106,7 @@ mod stories {
         gpui::KeyBinding::new(key, NoAction {}, None)
     }
 
-    impl Render<Self> for KeybindingStory {
+    impl Render for KeybindingStory {
         type Element = Div<Self>;
 
         fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element {

crates/ui2/src/components/list.rs 🔗

@@ -1,4 +1,5 @@
 use gpui::{div, Action, Div, RenderOnce};
+use std::rc::Rc;
 
 use crate::settings::user_settings;
 use crate::{
@@ -232,36 +233,36 @@ pub enum ListEntrySize {
 }
 
 #[derive(RenderOnce, Clone)]
-pub enum ListItem {
-    Entry(ListEntry),
+pub enum ListItem<V: 'static> {
+    Entry(ListEntry<V>),
     Separator(ListSeparator),
     Header(ListSubHeader),
 }
 
-impl From<ListEntry> for ListItem {
-    fn from(entry: ListEntry) -> Self {
+impl<V: 'static> From<ListEntry<V>> for ListItem<V> {
+    fn from(entry: ListEntry<V>) -> Self {
         Self::Entry(entry)
     }
 }
 
-impl From<ListSeparator> for ListItem {
+impl<V: 'static> From<ListSeparator> for ListItem<V> {
     fn from(entry: ListSeparator) -> Self {
         Self::Separator(entry)
     }
 }
 
-impl From<ListSubHeader> for ListItem {
+impl<V: 'static> From<ListSubHeader> for ListItem<V> {
     fn from(entry: ListSubHeader) -> Self {
         Self::Header(entry)
     }
 }
 
-impl<V: 'static> Component<V> for ListItem {
+impl<V: 'static> Component<V> for ListItem<V> {
     type Rendered = Div<V>;
 
     fn render(self, view: &mut V, cx: &mut ViewContext<V>) -> Self::Rendered {
         match self {
-            ListItem::Entry(entry) => div().child(entry.render(view, cx)),
+            ListItem::Entry(entry) => div().child(entry.render(ix, cx)),
             ListItem::Separator(separator) => div().child(separator.render(view, cx)),
             ListItem::Header(header) => div().child(header.render(view, cx)),
         }
@@ -273,7 +274,7 @@ impl ListItem {
         Self::Entry(ListEntry::new(label))
     }
 
-    pub fn as_entry(&mut self) -> Option<&mut ListEntry> {
+    pub fn as_entry(&mut self) -> Option<&mut ListEntry<V>> {
         if let Self::Entry(entry) = self {
             Some(entry)
         } else {
@@ -283,7 +284,7 @@ impl ListItem {
 }
 
 // #[derive(RenderOnce)]
-pub struct ListEntry {
+pub struct ListEntry<V> {
     disabled: bool,
     // TODO: Reintroduce this
     // disclosure_control_style: DisclosureControlVisibility,
@@ -294,15 +295,13 @@ pub struct ListEntry {
     size: ListEntrySize,
     toggle: Toggle,
     variant: ListItemVariant,
-    on_click: Option<Box<dyn Action>>,
+    on_click: Option<Rc<dyn Fn(&mut V, &mut ViewContext<V>) + 'static>>,
 }
 
-impl Clone for ListEntry {
+impl<V> Clone for ListEntry<V> {
     fn clone(&self) -> Self {
         Self {
             disabled: self.disabled,
-            // TODO: Reintroduce this
-            // disclosure_control_style: DisclosureControlVisibility,
             indent_level: self.indent_level,
             label: self.label.clone(),
             left_slot: self.left_slot.clone(),
@@ -310,12 +309,12 @@ impl Clone for ListEntry {
             size: self.size,
             toggle: self.toggle,
             variant: self.variant,
-            on_click: self.on_click.as_ref().map(|opt| opt.boxed_clone()),
+            on_click: self.on_click.clone(),
         }
     }
 }
 
-impl ListEntry {
+impl<V: 'static> ListEntry<V> {
     pub fn new(label: Label) -> Self {
         Self {
             disabled: false,
@@ -330,8 +329,8 @@ impl ListEntry {
         }
     }
 
-    pub fn action(mut self, action: impl Into<Box<dyn Action>>) -> Self {
-        self.on_click = Some(action.into());
+    pub fn on_click(mut self, handler: impl Fn(&mut V, &mut ViewContext<V>) + 'static) -> Self {
+        self.on_click = Some(Rc::new(handler));
         self
     }
 
@@ -370,7 +369,7 @@ impl ListEntry {
         self
     }
 
-    fn render<V: 'static>(self, _view: &mut V, cx: &mut ViewContext<V>) -> impl Element<V> {
+    fn render(self, ix: usize, cx: &mut ViewContext<V>) -> Stateful<V, Div<V>> {
         let settings = user_settings(cx);
 
         let left_content = match self.left_slot.clone() {
@@ -391,21 +390,21 @@ impl ListEntry {
             ListEntrySize::Medium => div().h_7(),
         };
         div()
+            .id(ix)
             .relative()
             .hover(|mut style| {
                 style.background = Some(cx.theme().colors().editor_background.into());
                 style
             })
-            .on_mouse_down(gpui::MouseButton::Left, {
-                let action = self.on_click.map(|action| action.boxed_clone());
+            .on_click({
+                let on_click = self.on_click.clone();
 
-                move |entry: &mut V, event, cx| {
-                    if let Some(action) = action.as_ref() {
-                        cx.dispatch_action(action.boxed_clone());
+                move |view: &mut V, event, cx| {
+                    if let Some(on_click) = &on_click {
+                        (on_click)(view, cx)
                     }
                 }
             })
-            .group("")
             .bg(cx.theme().colors().surface_background)
             // TODO: Add focus state
             // .when(self.state == InteractionState::Focused, |this| {
@@ -458,7 +457,7 @@ impl<V: 'static> Component<V> for ListSeparator {
 }
 
 #[derive(RenderOnce)]
-pub struct List {
+pub struct List<V: 'static> {
     items: Vec<ListItem>,
     /// Message to display when the list is empty
     /// Defaults to "No items"
@@ -467,7 +466,7 @@ pub struct List {
     toggle: Toggle,
 }
 
-impl<V: 'static> Component<V> for List {
+impl<V: 'static> Component<V> for List<V> {
     type Rendered = Div<V>;
 
     fn render(self, view: &mut V, cx: &mut ViewContext<V>) -> Self::Rendered {
@@ -487,7 +486,7 @@ impl<V: 'static> Component<V> for List {
     }
 }
 
-impl List {
+impl<V: 'static> List<V> {
     pub fn new(items: Vec<ListItem>) -> Self {
         Self {
             items,
@@ -514,7 +513,12 @@ impl List {
 
     fn render<V: 'static>(self, _view: &mut V, cx: &mut ViewContext<V>) -> impl Element<V> {
         let list_content = match (self.items.is_empty(), self.toggle) {
-            (false, _) => div().children(self.items),
+            (false, _) => div().children(
+                self.items
+                    .into_iter()
+                    .enumerate()
+                    .map(|(ix, item)| item.render(view, ix, cx)),
+            ),
             (true, Toggle::Toggled(false)) => div(),
             (true, _) => {
                 div().child(Label::new(self.empty_message.clone()).color(TextColor::Muted))

crates/ui2/src/lib.rs 🔗

@@ -24,6 +24,7 @@ mod to_extract;
 pub mod utils;
 
 pub use components::*;
+use gpui::actions;
 pub use prelude::*;
 pub use static_data::*;
 pub use styled_ext::*;
@@ -42,3 +43,8 @@ pub use crate::settings::*;
 mod story;
 #[cfg(feature = "stories")]
 pub use story::*;
+actions!(NoAction);
+
+pub fn binding(key: &str) -> gpui::KeyBinding {
+    gpui::KeyBinding::new(key, NoAction {}, None)
+}

crates/ui2/src/static_data.rs 🔗

@@ -478,7 +478,7 @@ pub fn static_new_notification_items_2<V: 'static>() -> Vec<Notification<V>> {
     ]
 }
 
-pub fn static_project_panel_project_items() -> Vec<ListItem> {
+pub fn static_project_panel_project_items<V>() -> Vec<ListItem<V>> {
     vec![
         ListEntry::new(Label::new("zed"))
             .left_icon(Icon::FolderOpen.into())
@@ -605,7 +605,7 @@ pub fn static_project_panel_project_items() -> Vec<ListItem> {
     .collect()
 }
 
-pub fn static_project_panel_single_items() -> Vec<ListItem> {
+pub fn static_project_panel_single_items<V>() -> Vec<ListItem<V>> {
     vec![
         ListEntry::new(Label::new("todo.md"))
             .left_icon(Icon::FileDoc.into())
@@ -622,7 +622,7 @@ pub fn static_project_panel_single_items() -> Vec<ListItem> {
     .collect()
 }
 
-pub fn static_collab_panel_current_call() -> Vec<ListItem> {
+pub fn static_collab_panel_current_call<V>() -> Vec<ListItem<V>> {
     vec![
         ListEntry::new(Label::new("as-cii")).left_avatar("http://github.com/as-cii.png?s=50"),
         ListEntry::new(Label::new("nathansobo"))
@@ -635,7 +635,7 @@ pub fn static_collab_panel_current_call() -> Vec<ListItem> {
     .collect()
 }
 
-pub fn static_collab_panel_channels() -> Vec<ListItem> {
+pub fn static_collab_panel_channels<V>() -> Vec<ListItem<V>> {
     vec![
         ListEntry::new(Label::new("zed"))
             .left_icon(Icon::Hash.into())

crates/util/src/channel.rs 🔗

@@ -1,6 +1,5 @@
-use std::env;
-
 use lazy_static::lazy_static;
+use std::env;
 
 lazy_static! {
     pub static ref RELEASE_CHANNEL_NAME: String = if cfg!(debug_assertions) {
@@ -9,18 +8,22 @@ lazy_static! {
     } else {
         include_str!("../../zed/RELEASE_CHANNEL").to_string()
     };
-    pub static ref RELEASE_CHANNEL: ReleaseChannel = match RELEASE_CHANNEL_NAME.as_str() {
+    pub static ref RELEASE_CHANNEL: ReleaseChannel = match RELEASE_CHANNEL_NAME.as_str().trim() {
         "dev" => ReleaseChannel::Dev,
+        "nightly" => ReleaseChannel::Nightly,
         "preview" => ReleaseChannel::Preview,
         "stable" => ReleaseChannel::Stable,
         _ => panic!("invalid release channel {}", *RELEASE_CHANNEL_NAME),
     };
 }
 
+pub struct AppCommitSha(pub String);
+
 #[derive(Copy, Clone, PartialEq, Eq, Default)]
 pub enum ReleaseChannel {
     #[default]
     Dev,
+    Nightly,
     Preview,
     Stable,
 }
@@ -29,6 +32,7 @@ impl ReleaseChannel {
     pub fn display_name(&self) -> &'static str {
         match self {
             ReleaseChannel::Dev => "Zed Dev",
+            ReleaseChannel::Nightly => "Zed Nightly",
             ReleaseChannel::Preview => "Zed Preview",
             ReleaseChannel::Stable => "Zed",
         }
@@ -37,6 +41,7 @@ impl ReleaseChannel {
     pub fn dev_name(&self) -> &'static str {
         match self {
             ReleaseChannel::Dev => "dev",
+            ReleaseChannel::Nightly => "nightly",
             ReleaseChannel::Preview => "preview",
             ReleaseChannel::Stable => "stable",
         }
@@ -45,6 +50,7 @@ impl ReleaseChannel {
     pub fn url_scheme(&self) -> &'static str {
         match self {
             ReleaseChannel::Dev => "zed-dev://",
+            ReleaseChannel::Nightly => "zed-nightly://",
             ReleaseChannel::Preview => "zed-preview://",
             ReleaseChannel::Stable => "zed://",
         }
@@ -53,15 +59,27 @@ impl ReleaseChannel {
     pub fn link_prefix(&self) -> &'static str {
         match self {
             ReleaseChannel::Dev => "https://zed.dev/dev/",
+            // TODO kb need to add server handling
+            ReleaseChannel::Nightly => "https://zed.dev/nightly/",
             ReleaseChannel::Preview => "https://zed.dev/preview/",
             ReleaseChannel::Stable => "https://zed.dev/",
         }
     }
+
+    pub fn release_query_param(&self) -> Option<&'static str> {
+        match self {
+            Self::Dev => None,
+            Self::Nightly => Some("nightly=1"),
+            Self::Preview => Some("preview=1"),
+            Self::Stable => None,
+        }
+    }
 }
 
 pub fn parse_zed_link(link: &str) -> Option<&str> {
     for release in [
         ReleaseChannel::Dev,
+        ReleaseChannel::Nightly,
         ReleaseChannel::Preview,
         ReleaseChannel::Stable,
     ] {

crates/util/src/paths.rs 🔗

@@ -202,6 +202,14 @@ impl std::fmt::Display for PathMatcher {
     }
 }
 
+impl PartialEq for PathMatcher {
+    fn eq(&self, other: &Self) -> bool {
+        self.maybe_path.eq(&other.maybe_path)
+    }
+}
+
+impl Eq for PathMatcher {}
+
 impl PathMatcher {
     pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
         Ok(PathMatcher {
@@ -211,7 +219,19 @@ impl PathMatcher {
     }
 
     pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
-        other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
+        other.as_ref().starts_with(&self.maybe_path)
+            || self.glob.is_match(&other)
+            || self.check_with_end_separator(other.as_ref())
+    }
+
+    fn check_with_end_separator(&self, path: &Path) -> bool {
+        let path_str = path.to_string_lossy();
+        let separator = std::path::MAIN_SEPARATOR_STR;
+        if path_str.ends_with(separator) {
+            self.glob.is_match(path)
+        } else {
+            self.glob.is_match(path_str.to_string() + separator)
+        }
     }
 }
 
@@ -388,4 +408,14 @@ mod tests {
         let path = Path::new("/a/b/c/.eslintrc.js");
         assert_eq!(path.extension_or_hidden_file_name(), Some("js"));
     }
+
+    #[test]
+    fn edge_of_glob() {
+        let path = Path::new("/work/node_modules");
+        let path_matcher = PathMatcher::new("**/node_modules/**").unwrap();
+        assert!(
+            path_matcher.is_match(&path),
+            "Path matcher {path_matcher} should match {path:?}"
+        );
+    }
 }

crates/workspace2/src/dock.rs 🔗

@@ -8,7 +8,9 @@ use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
 use std::sync::Arc;
 use theme2::ActiveTheme;
-use ui::{h_stack, menu_handle, ContextMenu, IconButton, InteractionState, Tooltip};
+use ui::{
+    h_stack, menu_handle, ContextMenu, IconButton, InteractionState, Label, ListEntry, Tooltip,
+};
 
 pub enum PanelEvent {
     ChangePosition,
@@ -40,7 +42,7 @@ pub trait Panel: FocusableView + EventEmitter<PanelEvent> {
 }
 
 pub trait PanelHandle: Send + Sync {
-    fn id(&self) -> EntityId;
+    fn entity_id(&self) -> EntityId;
     fn persistent_name(&self) -> &'static str;
     fn position(&self, cx: &WindowContext) -> DockPosition;
     fn position_is_valid(&self, position: DockPosition, cx: &WindowContext) -> bool;
@@ -62,8 +64,8 @@ impl<T> PanelHandle for View<T>
 where
     T: Panel,
 {
-    fn id(&self) -> EntityId {
-        self.entity_id()
+    fn entity_id(&self) -> EntityId {
+        Entity::entity_id(self)
     }
 
     fn persistent_name(&self) -> &'static str {
@@ -254,20 +256,19 @@ impl Dock {
         }
     }
 
-    // todo!()
-    // pub fn set_panel_zoomed(&mut self, panel: &AnyView, zoomed: bool, cx: &mut ViewContext<Self>) {
-    //     for entry in &mut self.panel_entries {
-    //         if entry.panel.as_any() == panel {
-    //             if zoomed != entry.panel.is_zoomed(cx) {
-    //                 entry.panel.set_zoomed(zoomed, cx);
-    //             }
-    //         } else if entry.panel.is_zoomed(cx) {
-    //             entry.panel.set_zoomed(false, cx);
-    //         }
-    //     }
+    pub fn set_panel_zoomed(&mut self, panel: &AnyView, zoomed: bool, cx: &mut ViewContext<Self>) {
+        for entry in &mut self.panel_entries {
+            if entry.panel.entity_id() == panel.entity_id() {
+                if zoomed != entry.panel.is_zoomed(cx) {
+                    entry.panel.set_zoomed(zoomed, cx);
+                }
+            } else if entry.panel.is_zoomed(cx) {
+                entry.panel.set_zoomed(false, cx);
+            }
+        }
 
-    //     cx.notify();
-    // }
+        cx.notify();
+    }
 
     pub fn zoom_out(&mut self, cx: &mut ViewContext<Self>) {
         for entry in &mut self.panel_entries {
@@ -277,42 +278,91 @@ impl Dock {
         }
     }
 
-    pub(crate) fn add_panel<T: Panel>(&mut self, panel: View<T>, cx: &mut ViewContext<Self>) {
+    pub(crate) fn add_panel<T: Panel>(
+        &mut self,
+        panel: View<T>,
+        workspace: WeakView<Workspace>,
+        cx: &mut ViewContext<Self>,
+    ) {
         let subscriptions = [
             cx.observe(&panel, |_, _, cx| cx.notify()),
-            cx.subscribe(&panel, |this, panel, event, cx| {
-                match event {
-                    PanelEvent::ChangePosition => {
-                        //todo!()
-                        // see: Workspace::add_panel_with_extra_event_handler
-                    }
-                    PanelEvent::ZoomIn => {
-                        //todo!()
-                        // see: Workspace::add_panel_with_extra_event_handler
-                    }
-                    PanelEvent::ZoomOut => {
-                        // todo!()
-                        // // see: Workspace::add_panel_with_extra_event_handler
-                    }
-                    PanelEvent::Activate => {
-                        if let Some(ix) = this
-                            .panel_entries
-                            .iter()
-                            .position(|entry| entry.panel.id() == panel.id())
-                        {
-                            this.set_open(true, cx);
-                            this.activate_panel(ix, cx);
-                            //` todo!()
-                            // cx.focus(&panel);
+            cx.subscribe(&panel, move |this, panel, event, cx| match event {
+                PanelEvent::ChangePosition => {
+                    let new_position = panel.read(cx).position(cx);
+
+                    let Ok(new_dock) = workspace.update(cx, |workspace, cx| {
+                        if panel.is_zoomed(cx) {
+                            workspace.zoomed_position = Some(new_position);
                         }
-                    }
-                    PanelEvent::Close => {
-                        if this.visible_panel().map_or(false, |p| p.id() == panel.id()) {
-                            this.set_open(false, cx);
+                        match new_position {
+                            DockPosition::Left => &workspace.left_dock,
+                            DockPosition::Bottom => &workspace.bottom_dock,
+                            DockPosition::Right => &workspace.right_dock,
+                        }
+                        .clone()
+                    }) else {
+                        return;
+                    };
+
+                    let was_visible = this.is_open()
+                        && this.visible_panel().map_or(false, |active_panel| {
+                            active_panel.entity_id() == Entity::entity_id(&panel)
+                        });
+
+                    this.remove_panel(&panel, cx);
+
+                    new_dock.update(cx, |new_dock, cx| {
+                        new_dock.add_panel(panel.clone(), workspace.clone(), cx);
+                        if was_visible {
+                            new_dock.set_open(true, cx);
+                            new_dock.activate_panel(this.panels_len() - 1, cx);
                         }
+                    });
+                }
+                PanelEvent::ZoomIn => {
+                    this.set_panel_zoomed(&panel.to_any(), true, cx);
+                    if !panel.has_focus(cx) {
+                        cx.focus_view(&panel);
+                    }
+                    workspace
+                        .update(cx, |workspace, cx| {
+                            workspace.zoomed = Some(panel.downgrade().into());
+                            workspace.zoomed_position = Some(panel.read(cx).position(cx));
+                        })
+                        .ok();
+                }
+                PanelEvent::ZoomOut => {
+                    this.set_panel_zoomed(&panel.to_any(), false, cx);
+                    workspace
+                        .update(cx, |workspace, cx| {
+                            if workspace.zoomed_position == Some(this.position) {
+                                workspace.zoomed = None;
+                                workspace.zoomed_position = None;
+                            }
+                            cx.notify();
+                        })
+                        .ok();
+                }
+                PanelEvent::Activate => {
+                    if let Some(ix) = this
+                        .panel_entries
+                        .iter()
+                        .position(|entry| entry.panel.entity_id() == Entity::entity_id(&panel))
+                    {
+                        this.set_open(true, cx);
+                        this.activate_panel(ix, cx);
+                        cx.focus_view(&panel);
+                    }
+                }
+                PanelEvent::Close => {
+                    if this
+                        .visible_panel()
+                        .map_or(false, |p| p.entity_id() == Entity::entity_id(&panel))
+                    {
+                        this.set_open(false, cx);
                     }
-                    PanelEvent::Focus => todo!(),
                 }
+                PanelEvent::Focus => todo!(),
             }),
         ];
 
@@ -335,7 +385,7 @@ impl Dock {
         if let Some(panel_ix) = self
             .panel_entries
             .iter()
-            .position(|entry| entry.panel.id() == panel.id())
+            .position(|entry| entry.panel.entity_id() == Entity::entity_id(panel))
         {
             if panel_ix == self.active_panel_index {
                 self.active_panel_index = 0;
@@ -396,7 +446,7 @@ impl Dock {
     pub fn panel_size(&self, panel: &dyn PanelHandle, cx: &WindowContext) -> Option<f32> {
         self.panel_entries
             .iter()
-            .find(|entry| entry.panel.id() == panel.id())
+            .find(|entry| entry.panel.entity_id() == panel.entity_id())
             .map(|entry| entry.panel.size(cx))
     }
 
@@ -620,6 +670,7 @@ impl Render<Self> for PanelButtons {
         let dock = self.dock.read(cx);
         let active_index = dock.active_panel_index;
         let is_open = dock.is_open;
+        let dock_position = dock.position;
 
         let (menu_anchor, menu_attach) = match dock.position {
             DockPosition::Left => (AnchorCorner::BottomLeft, AnchorCorner::TopLeft),
@@ -632,9 +683,10 @@ impl Render<Self> for PanelButtons {
             .panel_entries
             .iter()
             .enumerate()
-            .filter_map(|(i, panel)| {
-                let icon = panel.panel.icon(cx)?;
-                let name = panel.panel.persistent_name();
+            .filter_map(|(i, entry)| {
+                let icon = entry.panel.icon(cx)?;
+                let name = entry.panel.persistent_name();
+                let panel = entry.panel.clone();
 
                 let mut button: IconButton<Self> = if i == active_index && is_open {
                     let action = dock.toggle_action();
@@ -645,7 +697,7 @@ impl Render<Self> for PanelButtons {
                         .action(action.boxed_clone())
                         .tooltip(move |_, cx| Tooltip::for_action(tooltip.clone(), &*action, cx))
                 } else {
-                    let action = panel.panel.toggle_action(cx);
+                    let action = entry.panel.toggle_action(cx);
 
                     IconButton::new(name, icon)
                         .action(action.boxed_clone())
@@ -655,7 +707,30 @@ impl Render<Self> for PanelButtons {
                 Some(
                     menu_handle(name)
                         .menu(move |_, cx| {
-                            cx.build_view(|cx| ContextMenu::new(cx).header("SECTION"))
+                            const POSITIONS: [DockPosition; 3] = [
+                                DockPosition::Left,
+                                DockPosition::Right,
+                                DockPosition::Bottom,
+                            ];
+                            ContextMenu::build(cx, |mut menu, cx| {
+                                for position in POSITIONS {
+                                    if position != dock_position
+                                        && panel.position_is_valid(position, cx)
+                                    {
+                                        let panel = panel.clone();
+                                        menu = menu.entry(
+                                            ListEntry::new(Label::new(format!(
+                                                "Dock {}",
+                                                position.to_label()
+                                            ))),
+                                            move |_, cx| {
+                                                panel.set_position(position, cx);
+                                            },
+                                        )
+                                    }
+                                }
+                                menu
+                            })
                         })
                         .anchor(menu_anchor)
                         .attach(menu_attach)

crates/workspace2/src/notifications.rs 🔗

@@ -15,6 +15,8 @@ pub enum NotificationEvent {
 
 pub trait Notification: EventEmitter<NotificationEvent> + Render<Self> {}
 
+impl<V: EventEmitter<NotificationEvent> + Render> Notification for V {}
+
 pub trait NotificationHandle: Send {
     fn id(&self) -> EntityId;
     fn to_any(&self) -> AnyView;
@@ -164,7 +166,7 @@ impl Workspace {
 }
 
 pub mod simple_message_notification {
-    use super::{Notification, NotificationEvent};
+    use super::NotificationEvent;
     use gpui::{AnyElement, AppContext, Div, EventEmitter, Render, TextStyle, ViewContext};
     use serde::Deserialize;
     use std::{borrow::Cow, sync::Arc};
@@ -359,7 +361,6 @@ pub mod simple_message_notification {
     //     }
 
     impl EventEmitter<NotificationEvent> for MessageNotification {}
-    impl Notification for MessageNotification {}
 }
 
 pub trait NotifyResultExt {

crates/workspace2/src/pane.rs 🔗

@@ -24,6 +24,7 @@ use std::{
         Arc,
     },
 };
+
 use ui::v_stack;
 use ui::{prelude::*, Icon, IconButton, IconElement, TextColor, Tooltip};
 use util::truncate_and_remove_front;
@@ -1480,15 +1481,10 @@ impl Pane {
             // Right Side
             .child(
                 div()
-                    // We only use absolute here since we don't
-                    // have opacity or `hidden()` yet
-                    .absolute()
-                    .neg_top_7()
                     .px_1()
                     .flex()
                     .flex_none()
                     .gap_2()
-                    .group_hover("tab_bar", |this| this.top_0())
                     // Nav Buttons
                     .child(
                         div()
@@ -1931,9 +1927,11 @@ impl Render<Self> for Pane {
                     .map(|task| task.detach_and_log_err(cx));
             })
             .child(self.render_tab_bar(cx))
-            .child(div() /* todo!(toolbar) */)
+            // .child(
+            //     div()
+            // ) /* todo!(toolbar) */
             .child(if let Some(item) = self.active_item() {
-                div().flex_1().child(item.to_any())
+                div().flex().flex_1().child(item.to_any())
             } else {
                 // todo!()
                 div().child("Empty Pane")

crates/workspace2/src/status_bar.rs 🔗

@@ -56,7 +56,7 @@ impl StatusBar {
     fn render_left_tools(&self, cx: &mut ViewContext<Self>) -> impl RenderOnce<Self> {
         h_stack()
             .items_center()
-            .gap_1()
+            .gap_2()
             .children(self.left_items.iter().map(|item| item.to_any()))
     }
 

crates/workspace2/src/workspace2.rs 🔗

@@ -64,7 +64,7 @@ use std::{
     time::Duration,
 };
 use theme2::{ActiveTheme, ThemeSettings};
-pub use toolbar::{ToolbarItemLocation, ToolbarItemView};
+pub use toolbar::{ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView};
 pub use ui;
 use util::ResultExt;
 use uuid::Uuid;
@@ -813,7 +813,9 @@ impl Workspace {
             DockPosition::Right => &self.right_dock,
         };
 
-        dock.update(cx, |dock, cx| dock.add_panel(panel, cx));
+        dock.update(cx, |dock, cx| {
+            dock.add_panel(panel, self.weak_self.clone(), cx)
+        });
     }
 
     pub fn status_bar(&self) -> &View<StatusBar> {
@@ -3664,7 +3666,7 @@ impl Render<Self> for Workspace {
                                         &self.app_state,
                                         cx,
                                     ))
-                                    .child(div().flex().flex_1().child(self.bottom_dock.clone())),
+                                    .child(self.bottom_dock.clone()),
                             )
                             // Right Dock
                             .child(
@@ -3677,19 +3679,6 @@ impl Render<Self> for Workspace {
                     ),
             )
             .child(self.status_bar.clone())
-            .z_index(8)
-            // Debug
-            .child(
-                div()
-                    .flex()
-                    .flex_col()
-                    .z_index(9)
-                    .absolute()
-                    .top_20()
-                    .left_1_4()
-                    .w_40()
-                    .gap_2(),
-            )
     }
 }
 

crates/zed/Cargo.toml 🔗

@@ -170,6 +170,15 @@ osx_minimum_system_version = "10.15.7"
 osx_info_plist_exts = ["resources/info/*"]
 osx_url_schemes = ["zed-dev"]
 
+[package.metadata.bundle-nightly]
+# TODO kb different icon?
+icon = ["resources/app-icon-preview@2x.png", "resources/app-icon-preview.png"]
+identifier = "dev.zed.Zed-Nightly"
+name = "Zed Nightly"
+osx_minimum_system_version = "10.15.7"
+osx_info_plist_exts = ["resources/info/*"]
+osx_url_schemes = ["zed-nightly"]
+
 [package.metadata.bundle-preview]
 icon = ["resources/app-icon-preview@2x.png", "resources/app-icon-preview.png"]
 identifier = "dev.zed.Zed-Preview"
@@ -178,7 +187,6 @@ osx_minimum_system_version = "10.15.7"
 osx_info_plist_exts = ["resources/info/*"]
 osx_url_schemes = ["zed-preview"]
 
-
 [package.metadata.bundle-stable]
 icon = ["resources/app-icon@2x.png", "resources/app-icon.png"]
 identifier = "dev.zed.Zed"

crates/zed/src/main.rs 🔗

@@ -3,6 +3,7 @@
 
 use anyhow::{anyhow, Context, Result};
 use backtrace::Backtrace;
+use chrono::Utc;
 use cli::FORCE_CLI_MODE_ENV_VAR_NAME;
 use client::{
     self, Client, TelemetrySettings, UserStore, ZED_APP_VERSION, ZED_SECRET_CLIENT_TOKEN,
@@ -34,7 +35,6 @@ use std::{
         Arc, Weak,
     },
     thread,
-    time::{SystemTime, UNIX_EPOCH},
 };
 use util::{
     channel::{parse_zed_link, ReleaseChannel},
@@ -404,7 +404,7 @@ struct Panic {
     os_name: String,
     os_version: Option<String>,
     architecture: String,
-    panicked_on: u128,
+    panicked_on: i64,
     #[serde(skip_serializing_if = "Option::is_none")]
     installation_id: Option<String>,
     session_id: String,
@@ -490,10 +490,7 @@ fn init_panic_hook(app: &App, installation_id: Option<String>, session_id: Strin
                 .ok()
                 .map(|os_version| os_version.to_string()),
             architecture: env::consts::ARCH.into(),
-            panicked_on: SystemTime::now()
-                .duration_since(UNIX_EPOCH)
-                .unwrap()
-                .as_millis(),
+            panicked_on: Utc::now().timestamp_millis(),
             backtrace,
             installation_id: installation_id.clone(),
             session_id: session_id.clone(),

crates/zed/src/only_instance.rs 🔗

@@ -17,6 +17,7 @@ fn address() -> SocketAddr {
         ReleaseChannel::Dev => 43737,
         ReleaseChannel::Preview => 43738,
         ReleaseChannel::Stable => 43739,
+        ReleaseChannel::Nightly => 43740,
     };
 
     SocketAddr::V4(SocketAddrV4::new(LOCALHOST, port))
@@ -25,6 +26,7 @@ fn address() -> SocketAddr {
 fn instance_handshake() -> &'static str {
     match *util::channel::RELEASE_CHANNEL {
         ReleaseChannel::Dev => "Zed Editor Dev Instance Running",
+        ReleaseChannel::Nightly => "Zed Editor Nightly Instance Running",
         ReleaseChannel::Preview => "Zed Editor Preview Instance Running",
         ReleaseChannel::Stable => "Zed Editor Stable Instance Running",
     }

crates/zed2/Cargo.toml 🔗

@@ -11,14 +11,14 @@ path = "src/zed2.rs"
 doctest = false
 
 [[bin]]
-name = "Zed2"
+name = "zed2"
 path = "src/main.rs"
 
 [dependencies]
 ai = { package = "ai2", path = "../ai2"}
 # audio = { path = "../audio" }
 # activity_indicator = { path = "../activity_indicator" }
-# auto_update = { path = "../auto_update" }
+auto_update = { package = "auto_update2", path = "../auto_update2" }
 # breadcrumbs = { path = "../breadcrumbs" }
 call = { package = "call2", path = "../call2" }
 # channel = { path = "../channel" }
@@ -31,7 +31,7 @@ client = { package = "client2", path = "../client2" }
 # clock = { path = "../clock" }
 copilot = { package = "copilot2", path = "../copilot2" }
 # copilot_button = { path = "../copilot_button" }
-# diagnostics = { path = "../diagnostics" }
+diagnostics = { package = "diagnostics2", path = "../diagnostics2" }
 db = { package = "db2", path = "../db2" }
 editor = { package="editor2", path = "../editor2" }
 # feedback = { path = "../feedback" }
@@ -48,7 +48,7 @@ language = { package = "language2", path = "../language2" }
 # language_selector = { path = "../language_selector" }
 lsp = { package = "lsp2", path = "../lsp2" }
 menu = { package = "menu2", path = "../menu2" }
-language_tools = { path = "../language_tools" }
+# language_tools = { path = "../language_tools" }
 node_runtime = { path = "../node_runtime" }
 # assistant = { path = "../assistant" }
 # outline = { path = "../outline" }
@@ -166,6 +166,14 @@ osx_minimum_system_version = "10.15.7"
 osx_info_plist_exts = ["resources/info/*"]
 osx_url_schemes = ["zed-dev"]
 
+[package.metadata.bundle-nightly]
+icon = ["resources/app-icon-preview@2x.png", "resources/app-icon-preview.png"]
+identifier = "dev.zed.Zed-Dev"
+name = "Zed Nightly"
+osx_minimum_system_version = "10.15.7"
+osx_info_plist_exts = ["resources/info/*"]
+osx_url_schemes = ["zed-dev"]
+
 [package.metadata.bundle-preview]
 icon = ["resources/app-icon-preview@2x.png", "resources/app-icon-preview.png"]
 identifier = "dev.zed.Zed-Preview"

crates/zed2/build.rs 🔗

@@ -1,3 +1,5 @@
+use std::process::Command;
+
 fn main() {
     println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET=10.15.7");
 
@@ -21,4 +23,14 @@ fn main() {
 
     // Register exported Objective-C selectors, protocols, etc
     println!("cargo:rustc-link-arg=-Wl,-ObjC");
+
+    // Populate git sha environment variable if git is available
+    if let Ok(output) = Command::new("git").args(["rev-parse", "HEAD"]).output() {
+        if output.status.success() {
+            println!(
+                "cargo:rustc-env=ZED_COMMIT_SHA={}",
+                String::from_utf8_lossy(&output.stdout).trim()
+            );
+        }
+    }
 }

crates/zed2/src/main.rs 🔗

@@ -6,6 +6,7 @@
 
 use anyhow::{anyhow, Context as _, Result};
 use backtrace::Backtrace;
+use chrono::Utc;
 use cli::FORCE_CLI_MODE_ENV_VAR_NAME;
 use client::UserStore;
 use db::kvp::KEY_VALUE_STORE;
@@ -38,12 +39,11 @@ use std::{
         Arc,
     },
     thread,
-    time::{SystemTime, UNIX_EPOCH},
 };
 use theme::ActiveTheme;
 use util::{
     async_maybe,
-    channel::{parse_zed_link, ReleaseChannel, RELEASE_CHANNEL},
+    channel::{parse_zed_link, AppCommitSha, ReleaseChannel, RELEASE_CHANNEL},
     http::{self, HttpClient},
     paths, ResultExt,
 };
@@ -113,6 +113,10 @@ fn main() {
 
     app.run(move |cx| {
         cx.set_global(*RELEASE_CHANNEL);
+        if let Some(build_sha) = option_env!("ZED_COMMIT_SHA") {
+            cx.set_global(AppCommitSha(build_sha.into()))
+        }
+
         cx.set_global(listener.clone());
 
         load_embedded_fonts(cx);
@@ -146,6 +150,7 @@ fn main() {
         command_palette::init(cx);
         language::init(cx);
         editor::init(cx);
+        diagnostics::init(cx);
         copilot::init(
             copilot_language_server_id,
             http.clone(),
@@ -167,7 +172,7 @@ fn main() {
         // })
         // .detach();
 
-        // client.telemetry().start(installation_id, session_id, cx);
+        client.telemetry().start(installation_id, session_id, cx);
 
         let app_state = Arc::new(AppState {
             languages,
@@ -182,7 +187,7 @@ fn main() {
         cx.set_global(Arc::downgrade(&app_state));
 
         // audio::init(Assets, cx);
-        // auto_update::init(http.clone(), client::ZED_SERVER_URL.clone(), cx);
+        auto_update::init(http.clone(), client::ZED_SERVER_URL.clone(), cx);
 
         workspace::init(app_state.clone(), cx);
         // recent_projects::init(cx);
@@ -423,7 +428,7 @@ struct Panic {
     os_name: String,
     os_version: Option<String>,
     architecture: String,
-    panicked_on: u128,
+    panicked_on: i64,
     #[serde(skip_serializing_if = "Option::is_none")]
     installation_id: Option<String>,
     session_id: String,
@@ -509,10 +514,7 @@ fn init_panic_hook(app: &App, installation_id: Option<String>, session_id: Strin
                 .as_ref()
                 .map(SemanticVersion::to_string),
             architecture: env::consts::ARCH.into(),
-            panicked_on: SystemTime::now()
-                .duration_since(UNIX_EPOCH)
-                .unwrap()
-                .as_millis(),
+            panicked_on: Utc::now().timestamp_millis(),
             backtrace,
             installation_id: installation_id.clone(),
             session_id: session_id.clone(),

crates/zed2/src/only_instance.rs 🔗

@@ -17,6 +17,7 @@ fn address() -> SocketAddr {
         ReleaseChannel::Dev => 43737,
         ReleaseChannel::Preview => 43738,
         ReleaseChannel::Stable => 43739,
+        ReleaseChannel::Nightly => 43740,
     };
 
     SocketAddr::V4(SocketAddrV4::new(LOCALHOST, port))
@@ -25,6 +26,7 @@ fn address() -> SocketAddr {
 fn instance_handshake() -> &'static str {
     match *util::channel::RELEASE_CHANNEL {
         ReleaseChannel::Dev => "Zed Editor Dev Instance Running",
+        ReleaseChannel::Nightly => "Zed Editor Nightly Instance Running",
         ReleaseChannel::Preview => "Zed Editor Preview Instance Running",
         ReleaseChannel::Stable => "Zed Editor Stable Instance Running",
     }

crates/zed2/src/zed2.rs 🔗

@@ -10,8 +10,8 @@ pub use assets::*;
 use collections::VecDeque;
 use editor::{Editor, MultiBuffer};
 use gpui::{
-    actions, point, px, AppContext, Context, PromptLevel, TitlebarOptions, ViewContext,
-    VisualContext, WindowBounds, WindowKind, WindowOptions,
+    actions, point, px, AppContext, Context, FocusableView, PromptLevel, TitlebarOptions,
+    ViewContext, VisualContext, WindowBounds, WindowKind, WindowOptions,
 };
 pub use only_instance::*;
 pub use open_listener::*;
@@ -23,7 +23,7 @@ use std::{borrow::Cow, ops::Deref, sync::Arc};
 use terminal_view::terminal_panel::TerminalPanel;
 use util::{
     asset_str,
-    channel::ReleaseChannel,
+    channel::{AppCommitSha, ReleaseChannel},
     paths::{self, LOCAL_SETTINGS_RELATIVE_PATH},
     ResultExt,
 };
@@ -104,8 +104,8 @@ pub fn initialize_workspace(app_state: Arc<AppState>, cx: &mut AppContext) {
                             //         QuickActionBar::new(buffer_search_bar, workspace)
                             //     });
                             //     toolbar.add_item(quick_action_bar, cx);
-                            //     let diagnostic_editor_controls =
-                            //         cx.add_view(|_| diagnostics2::ToolbarControls::new());
+                            let diagnostic_editor_controls =
+                                cx.build_view(|_| diagnostics::ToolbarControls::new());
                             //     toolbar.add_item(diagnostic_editor_controls, cx);
                             //     let project_search_bar = cx.add_view(|_| ProjectSearchBar::new());
                             //     toolbar.add_item(project_search_bar, cx);
@@ -137,8 +137,8 @@ pub fn initialize_workspace(app_state: Arc<AppState>, cx: &mut AppContext) {
 
         //     let copilot =
         //         cx.add_view(|cx| copilot_button::CopilotButton::new(app_state.fs.clone(), cx));
-        //     let diagnostic_summary =
-        //         cx.add_view(|cx| diagnostics::items::DiagnosticIndicator::new(workspace, cx));
+        let diagnostic_summary =
+            cx.build_view(|cx| diagnostics::items::DiagnosticIndicator::new(workspace, cx));
         //     let activity_indicator = activity_indicator::ActivityIndicator::new(
         //         workspace,
         //         app_state.languages.clone(),
@@ -152,7 +152,7 @@ pub fn initialize_workspace(app_state: Arc<AppState>, cx: &mut AppContext) {
         //     });
         //     let cursor_position = cx.add_view(|_| editor::items::CursorPosition::new());
         workspace.status_bar().update(cx, |status_bar, cx| {
-            // status_bar.add_left_item(diagnostic_summary, cx);
+            status_bar.add_left_item(diagnostic_summary, cx);
             // status_bar.add_left_item(activity_indicator, cx);
 
             // status_bar.add_right_item(feedback_button, cx);
@@ -162,7 +162,7 @@ pub fn initialize_workspace(app_state: Arc<AppState>, cx: &mut AppContext) {
             // status_bar.add_right_item(cursor_position, cx);
         });
 
-        //     auto_update::notify_of_any_new_update(cx.weak_handle(), cx);
+        auto_update::notify_of_any_new_update(cx);
 
         //     vim::observe_keystrokes(cx);
 
@@ -425,6 +425,8 @@ pub fn initialize_workspace(app_state: Arc<AppState>, cx: &mut AppContext) {
                     }
                 }
             });
+
+        workspace.focus_handle(cx).focus(cx);
         //todo!()
         // load_default_keymap(cx);
     })
@@ -432,9 +434,16 @@ pub fn initialize_workspace(app_state: Arc<AppState>, cx: &mut AppContext) {
 }
 
 fn about(_: &mut Workspace, _: &About, cx: &mut gpui::ViewContext<Workspace>) {
+    use std::fmt::Write as _;
+
     let app_name = cx.global::<ReleaseChannel>().display_name();
     let version = env!("CARGO_PKG_VERSION");
-    let prompt = cx.prompt(PromptLevel::Info, &format!("{app_name} {version}"), &["OK"]);
+    let mut message = format!("{app_name} {version}");
+    if let Some(sha) = cx.try_global::<AppCommitSha>() {
+        write!(&mut message, "\n\n{}", sha.0).unwrap();
+    }
+
+    let prompt = cx.prompt(PromptLevel::Info, &message, &["OK"]);
     cx.foreground_executor()
         .spawn(async {
             prompt.await.ok();

script/bump-nightly 🔗

@@ -0,0 +1,11 @@
+#!/bin/bash
+
+branch=$(git rev-parse --abbrev-ref HEAD)
+if [ "$branch" != "main" ]; then
+  echo "You must be on main to run this script"
+  exit 1
+fi
+
+git pull --ff-only origin main
+git tag -f nightly
+git push -f origin nightly

script/bump-zed-minor-versions 🔗

@@ -43,8 +43,8 @@ if [[ $patch != 0 ]]; then
   echo "patch version on main should be zero"
   exit 1
 fi
-if [[ $(cat crates/zed/RELEASE_CHANNEL) != dev ]]; then
-  echo "release channel on main should be dev"
+if [[ $(cat crates/zed/RELEASE_CHANNEL) != dev && $(cat crates/zed/RELEASE_CHANNEL) != nightly ]]; then
+  echo "release channel on main should be dev or nightly"
   exit 1
 fi
 if git show-ref --quiet refs/tags/${preview_tag_name}; then
@@ -59,6 +59,7 @@ if ! git show-ref --quiet refs/heads/${prev_minor_branch_name}; then
   echo "previous branch ${minor_branch_name} doesn't exist"
   exit 1
 fi
+# TODO kb anything else for RELEASE_CHANNEL == nightly needs to be done below?
 if [[ $(git show ${prev_minor_branch_name}:crates/zed/RELEASE_CHANNEL) != preview ]]; then
   echo "release channel on branch ${prev_minor_branch_name} should be preview"
   exit 1

script/bump-zed-patch-version 🔗

@@ -9,8 +9,11 @@ case $channel in
   preview)
     tag_suffix="-pre"
     ;;
+  nightly)
+    tag_suffix="-nightly"
+    ;;
   *)
-    echo "this must be run on a stable or preview release branch" >&2
+    echo "this must be run on either of stable|preview|nightly release branches" >&2
     exit 1
     ;;
 esac

script/bundle 🔗

@@ -9,6 +9,7 @@ local_arch=false
 local_only=false
 overwrite_local_app=false
 bundle_name=""
+zed_crate="zed"
 
 # This must match the team in the provsiioning profile.
 APPLE_NOTORIZATION_TEAM="MQ55VZLNZQ"
@@ -25,13 +26,11 @@ Options:
   -o    Open the resulting DMG or the app itself in local mode.
   -f    Overwrite the local app bundle if it exists.
   -h    Display this help and exit.
+  -2    Build zed 2 instead of zed 1.
   "
 }
 
-# If -o option is specified, the folder of the resulting dmg will be opened in finder
-# If -d is specified, Zed will be compiled in debug mode and the application's path printed
-# If -od or -do is specified Zed will be bundled in debug and the application will be run.
-while getopts 'dlfoh' flag
+while getopts 'dlfoh2' flag
 do
     case "${flag}" in
         o) open_result=true;;
@@ -51,6 +50,7 @@ do
             target_dir="debug"
             ;;
         f) overwrite_local_app=true;;
+        2) zed_crate="zed2";;
         h)
            help_info
            exit 0
@@ -83,16 +83,19 @@ local_target_triple=${host_line#*: }
 
 if [ "$local_arch" = true ]; then
     echo "Building for local target only."
-    cargo build ${build_flag} --package zed
+    cargo build ${build_flag} --package ${zed_crate}
     cargo build ${build_flag} --package cli
 else
     echo "Compiling zed binaries"
-    cargo build ${build_flag} --package zed --package cli --target aarch64-apple-darwin --target x86_64-apple-darwin
+    cargo build ${build_flag} --package ${zed_crate} --package cli --target aarch64-apple-darwin --target x86_64-apple-darwin
 fi
 
 echo "Creating application bundle"
 pushd crates/zed
 channel=$(<RELEASE_CHANNEL)
+popd
+
+pushd crates/${zed_crate}
 cp Cargo.toml Cargo.toml.backup
 sed \
     -i .backup \
@@ -115,7 +118,7 @@ if [ "$local_arch" = false ]; then
         -create \
         target/{x86_64-apple-darwin,aarch64-apple-darwin}/${target_dir}/Zed \
         -output \
-        "${app_path}/Contents/MacOS/zed"
+        "${app_path}/Contents/MacOS/${zed_crate}"
     lipo \
         -create \
         target/{x86_64-apple-darwin,aarch64-apple-darwin}/${target_dir}/cli \
@@ -131,7 +134,8 @@ else
     cp -R target/${target_dir}/WebRTC.framework "${app_path}/Contents/Frameworks/"
 fi
 
-cp crates/zed/contents/$channel/embedded.provisionprofile "${app_path}/Contents/"
+#todo!(The app identifier has been set to 'Dev', but the channel is nightly, RATIONALIZE ALL OF THIS MESS)
+cp crates/${zed_crate}/contents/$channel/embedded.provisionprofile "${app_path}/Contents/"
 
 if [[ -n $MACOS_CERTIFICATE && -n $MACOS_CERTIFICATE_PASSWORD && -n $APPLE_NOTARIZATION_USERNAME && -n $APPLE_NOTARIZATION_PASSWORD ]]; then
     echo "Signing bundle with Apple-issued certificate"
@@ -145,9 +149,14 @@ if [[ -n $MACOS_CERTIFICATE && -n $MACOS_CERTIFICATE_PASSWORD && -n $APPLE_NOTAR
 
     # sequence of codesign commands modeled after this example: https://developer.apple.com/forums/thread/701514
     /usr/bin/codesign --deep --force --timestamp --sign "Zed Industries, Inc." "${app_path}/Contents/Frameworks/WebRTC.framework" -v
-    /usr/bin/codesign --deep --force --timestamp --options runtime --sign "Zed Industries, Inc." "${app_path}/Contents/MacOS/cli" -v
-    /usr/bin/codesign --deep --force --timestamp --options runtime --entitlements crates/zed/resources/zed.entitlements --sign "Zed Industries, Inc." "${app_path}/Contents/MacOS/zed" -v
-    /usr/bin/codesign --force --timestamp --options runtime --entitlements crates/zed/resources/zed.entitlements --sign "Zed Industries, Inc." "${app_path}" -v
+
+    # todo!(restore cli to zed2)
+    if [[ "$zed_crate" == "zed" ]]; then
+        /usr/bin/codesign --deep --force --timestamp --options runtime --sign "Zed Industries, Inc." "${app_path}/Contents/MacOS/cli" -v
+    fi
+
+    /usr/bin/codesign --deep --force --timestamp --options runtime --entitlements crates/${zed_crate}/resources/zed.entitlements --sign "Zed Industries, Inc." "${app_path}/Contents/MacOS/${zed_crate}" -v
+    /usr/bin/codesign --force --timestamp --options runtime --entitlements crates/${zed_crate}/resources/zed.entitlements --sign "Zed Industries, Inc." "${app_path}" -v
 
     security default-keychain -s login.keychain
 else
@@ -166,7 +175,7 @@ else
     # - get a signing key for the MQ55VZLNZQ team from Nathan.
     # - create your own signing key, and update references to MQ55VZLNZQ to your own team ID
     # then comment out this line.
-    cat crates/zed/resources/zed.entitlements | sed '/com.apple.developer.associated-domains/,+1d' > "${app_path}/Contents/Resources/zed.entitlements"
+    cat crates/${zed_crate}/resources/zed.entitlements | sed '/com.apple.developer.associated-domains/,+1d' > "${app_path}/Contents/Resources/zed.entitlements"
 
     codesign --force --deep --entitlements "${app_path}/Contents/Resources/zed.entitlements" --sign ${MACOS_SIGNING_KEY:- -} "${app_path}" -v
 fi

script/deploy 🔗

@@ -4,12 +4,17 @@ set -eu
 source script/lib/deploy-helpers.sh
 
 if [[ $# < 2 ]]; then
-  echo "Usage: $0 <production|staging|preview> <tag-name>"
+  echo "Usage: $0 <production|staging|preview> <tag-name> (nightly is not yet supported)"
   exit 1
 fi
 environment=$1
 version=$2
 
+if [[ ${environment} == "nightly" ]]; then
+  echo "nightly is not yet supported"
+  exit 1
+fi
+
 export_vars_for_environment ${environment}
 image_id=$(image_id_for_version ${version})
 

script/deploy-migration 🔗

@@ -4,12 +4,17 @@ set -eu
 source script/lib/deploy-helpers.sh
 
 if [[ $# < 2 ]]; then
-  echo "Usage: $0 <production|staging|preview> <tag-name>"
+  echo "Usage: $0 <production|staging|preview> <tag-name> (nightly is not yet supported)"
   exit 1
 fi
 environment=$1
 version=$2
 
+if [[ ${environment} == "nightly" ]]; then
+  echo "nightly is not yet supported"
+  exit 1
+fi
+
 export_vars_for_environment ${environment}
 image_id=$(image_id_for_version ${version})
 
@@ -23,4 +28,4 @@ envsubst < crates/collab/k8s/migrate.template.yml | kubectl apply -f -
 pod=$(kubectl --namespace=${environment} get pods --selector=job-name=${ZED_MIGRATE_JOB_NAME} --output=jsonpath='{.items[0].metadata.name}')
 
 echo "Job pod:" $pod
-kubectl --namespace=${environment} logs -f ${pod}
+kubectl --namespace=${environment} logs -f ${pod}

script/upload-nightly 🔗

@@ -0,0 +1,37 @@
+#!/bin/bash
+
+# Based on the template in: https://docs.digitalocean.com/reference/api/spaces-api/
+set -ux
+
+# Step 1: Define the parameters for the Space you want to upload to.
+SPACE="zed-nightly-host" # Find your endpoint in the control panel, under Settings.
+REGION="nyc3" # Must be "us-east-1" when creating new Spaces. Otherwise, use the region in your endpoint (e.g. nyc3).
+
+# Step 2: Define a function that uploads your object via cURL.
+function uploadToSpaces
+{
+  file_to_upload="$1"
+  file_name="$2"
+  space_path="nightly"
+  date=$(date +"%a, %d %b %Y %T %z")
+  acl="x-amz-acl:private"
+  content_type="application/octet-stream"
+  storage_type="x-amz-storage-class:STANDARD"
+  string="PUT\n\n${content_type}\n${date}\n${acl}\n${storage_type}\n/${SPACE}/${space_path}/${file_name}"
+  signature=$(echo -en "${string}" | openssl sha1 -hmac "${DIGITALOCEAN_SPACES_SECRET_KEY}" -binary | base64)
+
+  curl -vv -s -X PUT -T "$file_to_upload" \
+    -H "Host: ${SPACE}.${REGION}.digitaloceanspaces.com" \
+    -H "Date: $date" \
+    -H "Content-Type: $content_type" \
+    -H "$storage_type" \
+    -H "$acl" \
+    -H "Authorization: AWS ${DIGITALOCEAN_SPACES_ACCESS_KEY}:$signature" \
+    "https://${SPACE}.${REGION}.digitaloceanspaces.com/${space_path}/${file_name}"
+}
+
+sha=$(git rev-parse HEAD)
+echo ${sha} > target/latest-sha
+
+uploadToSpaces "target/release/Zed.dmg" "Zed.dmg"
+uploadToSpaces "target/latest-sha" "latest-sha"

script/what-is-deployed 🔗

@@ -4,11 +4,16 @@ set -eu
 source script/lib/deploy-helpers.sh
 
 if [[ $# < 1 ]]; then
-  echo "Usage: $0 <production|staging|preview>"
+  echo "Usage: $0 <production|staging|preview> (nightly is not yet supported)"
   exit 1
 fi
 environment=$1
 
+if [[ ${environment} == "nightly" ]]; then
+  echo "nightly is not yet supported"
+  exit 1
+fi
+
 export_vars_for_environment ${environment}
 target_zed_kube_cluster