Merge remote-tracking branch 'origin/main' into assistant-buffer

Antonio Scandurra created

Change summary

Cargo.lock                                         |   61 
Cargo.toml                                         |    5 
assets/settings/default.json                       |    2 
assets/settings/initial_local_settings.json        |   12 
assets/settings/initial_user_settings.json         |    6 
crates/editor/Cargo.toml                           |    2 
crates/editor/src/editor.rs                        |  138 
crates/editor/src/editor_tests.rs                  |  180 +
crates/editor/src/multi_buffer.rs                  |   14 
crates/language/Cargo.toml                         |    5 
crates/language/src/buffer.rs                      |   19 
crates/language/src/buffer_tests.rs                |    2 
crates/language/src/language.rs                    |    1 
crates/language/src/language_settings.rs           |    8 
crates/language/src/syntax_map.rs                  | 1248 -----------
crates/language/src/syntax_map/syntax_map_tests.rs | 1199 +++++++++++
crates/language_tools/Cargo.toml                   |    5 
crates/language_tools/src/language_tools.rs        |   15 
crates/language_tools/src/lsp_log.rs               |   61 
crates/language_tools/src/lsp_log_tests.rs         |    9 
crates/language_tools/src/syntax_tree_view.rs      |  675 ++++++
crates/project/src/project.rs                      |    2 
crates/project/src/worktree.rs                     | 1682 ---------------
crates/project/src/worktree_tests.rs               | 1523 ++++++++++++++
crates/settings/Cargo.toml                         |    2 
crates/settings/src/settings_store.rs              |   71 
crates/theme/src/theme.rs                          |   21 
crates/zed/Cargo.toml                              |    7 
crates/zed/src/languages.rs                        |  206 
crates/zed/src/languages/elixir/injections.scm     |    7 
crates/zed/src/languages/erb/highlights.scm        |    2 
crates/zed/src/languages/heex/config.toml          |    7 
crates/zed/src/languages/heex/highlights.scm       |   54 
crates/zed/src/languages/heex/injections.scm       |   13 
crates/zed/src/main.rs                             |    2 
crates/zed/src/zed.rs                              |    5 
styles/src/styleTree/app.ts                        |    4 
styles/src/styleTree/toolbarDropdownMenu.ts        |    8 
38 files changed, 4,099 insertions(+), 3,184 deletions(-)

Detailed changes

Cargo.lock ๐Ÿ”—

@@ -3515,6 +3515,29 @@ dependencies = [
  "workspace",
 ]
 
+[[package]]
+name = "language_tools"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "client",
+ "collections",
+ "editor",
+ "env_logger 0.9.3",
+ "futures 0.3.28",
+ "gpui",
+ "language",
+ "lsp",
+ "project",
+ "serde",
+ "settings",
+ "theme",
+ "tree-sitter",
+ "unindent",
+ "util",
+ "workspace",
+]
+
 [[package]]
 name = "lazy_static"
 version = "1.4.0"
@@ -3759,28 +3782,6 @@ dependencies = [
  "url",
 ]
 
-[[package]]
-name = "lsp_log"
-version = "0.1.0"
-dependencies = [
- "anyhow",
- "client",
- "collections",
- "editor",
- "env_logger 0.9.3",
- "futures 0.3.28",
- "gpui",
- "language",
- "lsp",
- "project",
- "serde",
- "settings",
- "theme",
- "unindent",
- "util",
- "workspace",
-]
-
 [[package]]
 name = "mach"
 version = "0.3.2"
@@ -7358,8 +7359,8 @@ dependencies = [
 
 [[package]]
 name = "tree-sitter"
-version = "0.20.9"
-source = "git+https://github.com/tree-sitter/tree-sitter?rev=c51896d32dcc11a38e41f36e3deb1a6a9c4f4b14#c51896d32dcc11a38e41f36e3deb1a6a9c4f4b14"
+version = "0.20.10"
+source = "git+https://github.com/tree-sitter/tree-sitter?rev=49226023693107fba9a1191136a4f47f38cdca73#49226023693107fba9a1191136a4f47f38cdca73"
 dependencies = [
  "cc",
  "regex",
@@ -7422,6 +7423,15 @@ dependencies = [
  "tree-sitter",
 ]
 
+[[package]]
+name = "tree-sitter-heex"
+version = "0.0.1"
+source = "git+https://github.com/phoenixframework/tree-sitter-heex?rev=2e1348c3cf2c9323e87c2744796cf3f3868aa82a#2e1348c3cf2c9323e87c2744796cf3f3868aa82a"
+dependencies = [
+ "cc",
+ "tree-sitter",
+]
+
 [[package]]
 name = "tree-sitter-html"
 version = "0.19.0"
@@ -8829,11 +8839,11 @@ dependencies = [
  "journal",
  "language",
  "language_selector",
+ "language_tools",
  "lazy_static",
  "libc",
  "log",
  "lsp",
- "lsp_log",
  "node_runtime",
  "num_cpus",
  "outline",
@@ -8875,6 +8885,7 @@ dependencies = [
  "tree-sitter-elixir",
  "tree-sitter-embedded-template",
  "tree-sitter-go",
+ "tree-sitter-heex",
  "tree-sitter-html",
  "tree-sitter-json 0.20.0",
  "tree-sitter-lua",

Cargo.toml ๐Ÿ”—

@@ -32,10 +32,10 @@ members = [
     "crates/journal",
     "crates/language",
     "crates/language_selector",
+    "crates/language_tools",
     "crates/live_kit_client",
     "crates/live_kit_server",
     "crates/lsp",
-    "crates/lsp_log",
     "crates/media",
     "crates/menu",
     "crates/node_runtime",
@@ -98,10 +98,11 @@ tempdir = { version = "0.3.7" }
 thiserror = { version = "1.0.29" }
 time = { version = "0.3", features = ["serde", "serde-well-known"] }
 toml = { version = "0.5" }
+tree-sitter = "0.20"
 unindent = { version = "0.1.7" }
 
 [patch.crates-io]
-tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "c51896d32dcc11a38e41f36e3deb1a6a9c4f4b14" }
+tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "49226023693107fba9a1191136a4f47f38cdca73" }
 async-task = { git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e" }
 
 # TODO - Remove when a version is released with this PR: https://github.com/servo/core-foundation-rs/pull/457

assets/settings/default.json ๐Ÿ”—

@@ -108,6 +108,8 @@
   // Whether or not to remove any trailing whitespace from lines of a buffer
   // before saving it.
   "remove_trailing_whitespace_on_save": true,
+  // Whether to start a new line with a comment when a previous line is a comment as well.
+  "extend_comment_on_newline": true,
   // Whether or not to ensure there's a single newline at the end of a buffer
   // when saving it.
   "ensure_final_newline_on_save": true,

assets/settings/initial_local_settings.json ๐Ÿ”—

@@ -1,11 +1,5 @@
-// Folder-specific Zed settings
+// Folder-specific settings
 //
-// A subset of Zed's settings can be configured on a per-folder basis.
-//
-// For information on how to configure Zed, see the Zed
-// documentation: https://zed.dev/docs/configuring-zed
-//
-// To see all of Zed's default settings without changing your
-// custom settings, run the `open default settings` command
-// from the command palette or from `Zed` application menu.
+// For a full list of overridable settings, and general information on folder-specific settings,
+// see the documentation: https://docs.zed.dev/configuration/configuring-zed#folder-specific-settings
 {}

assets/settings/initial_user_settings.json ๐Ÿ”—

@@ -1,7 +1,7 @@
-// Folder-specific settings
+// Zed settings
 //
-// For a full list of overridable settings, and general information on folder-specific settings, see the documentation:
-// https://docs.zed.dev/configuration/configuring-zed#folder-specific-settings
+// For information on how to configure Zed, see the Zed
+// documentation: https://zed.dev/docs/configuring-zed
 //
 // To see all of Zed's default settings without changing your
 // custom settings, run the `open default settings` command

crates/editor/Cargo.toml ๐Ÿ”—

@@ -83,7 +83,7 @@ ctor.workspace = true
 env_logger.workspace = true
 rand.workspace = true
 unindent.workspace = true
-tree-sitter = "0.20"
+tree-sitter.workspace = true
 tree-sitter-rust = "0.20"
 tree-sitter-html = "0.19"
 tree-sitter-typescript = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "5d20856f34315b068c41edaee2ac8a100081d259" }

crates/editor/src/editor.rs ๐Ÿ”—

@@ -2169,8 +2169,8 @@ impl Editor {
         self.transact(cx, |this, cx| {
             let (edits, selection_fixup_info): (Vec<_>, Vec<_>) = {
                 let selections = this.selections.all::<usize>(cx);
-
-                let buffer = this.buffer.read(cx).snapshot(cx);
+                let multi_buffer = this.buffer.read(cx);
+                let buffer = multi_buffer.snapshot(cx);
                 selections
                     .iter()
                     .map(|selection| {
@@ -2181,70 +2181,74 @@ impl Editor {
                         let end = selection.end;
                         let is_cursor = start == end;
                         let language_scope = buffer.language_scope_at(start);
-                        let (comment_delimiter, insert_extra_newline) =
-                            if let Some(language) = &language_scope {
-                                let leading_whitespace_len = buffer
-                                    .reversed_chars_at(start)
-                                    .take_while(|c| c.is_whitespace() && *c != '\n')
-                                    .map(|c| c.len_utf8())
-                                    .sum::<usize>();
-
-                                let trailing_whitespace_len = buffer
-                                    .chars_at(end)
-                                    .take_while(|c| c.is_whitespace() && *c != '\n')
-                                    .map(|c| c.len_utf8())
-                                    .sum::<usize>();
-
-                                let insert_extra_newline =
-                                    language.brackets().any(|(pair, enabled)| {
-                                        let pair_start = pair.start.trim_end();
-                                        let pair_end = pair.end.trim_start();
-
-                                        enabled
-                                            && pair.newline
-                                            && buffer.contains_str_at(
-                                                end + trailing_whitespace_len,
-                                                pair_end,
-                                            )
-                                            && buffer.contains_str_at(
-                                                (start - leading_whitespace_len)
-                                                    .saturating_sub(pair_start.len()),
-                                                pair_start,
-                                            )
-                                    });
-                                // Comment extension on newline is allowed only for cursor selections
-                                let comment_delimiter =
-                                    language.line_comment_prefix().filter(|_| is_cursor);
-                                let comment_delimiter = if let Some(delimiter) = comment_delimiter {
-                                    buffer
-                                        .buffer_line_for_row(start_point.row)
-                                        .is_some_and(|(snapshot, range)| {
-                                            let mut index_of_first_non_whitespace = 0;
-                                            let line_starts_with_comment = snapshot
-                                                .chars_for_range(range)
-                                                .skip_while(|c| {
-                                                    let should_skip = c.is_whitespace();
-                                                    if should_skip {
-                                                        index_of_first_non_whitespace += 1;
-                                                    }
-                                                    should_skip
-                                                })
-                                                .take(delimiter.len())
-                                                .eq(delimiter.chars());
-                                            let cursor_is_placed_after_comment_marker =
-                                                index_of_first_non_whitespace + delimiter.len()
-                                                    <= start_point.column as usize;
-                                            line_starts_with_comment
-                                                && cursor_is_placed_after_comment_marker
-                                        })
-                                        .then(|| delimiter.clone())
-                                } else {
-                                    None
-                                };
-                                (comment_delimiter, insert_extra_newline)
+                        let (comment_delimiter, insert_extra_newline) = if let Some(language) =
+                            &language_scope
+                        {
+                            let leading_whitespace_len = buffer
+                                .reversed_chars_at(start)
+                                .take_while(|c| c.is_whitespace() && *c != '\n')
+                                .map(|c| c.len_utf8())
+                                .sum::<usize>();
+
+                            let trailing_whitespace_len = buffer
+                                .chars_at(end)
+                                .take_while(|c| c.is_whitespace() && *c != '\n')
+                                .map(|c| c.len_utf8())
+                                .sum::<usize>();
+
+                            let insert_extra_newline =
+                                language.brackets().any(|(pair, enabled)| {
+                                    let pair_start = pair.start.trim_end();
+                                    let pair_end = pair.end.trim_start();
+
+                                    enabled
+                                        && pair.newline
+                                        && buffer.contains_str_at(
+                                            end + trailing_whitespace_len,
+                                            pair_end,
+                                        )
+                                        && buffer.contains_str_at(
+                                            (start - leading_whitespace_len)
+                                                .saturating_sub(pair_start.len()),
+                                            pair_start,
+                                        )
+                                });
+                            // Comment extension on newline is allowed only for cursor selections
+                            let comment_delimiter = language.line_comment_prefix().filter(|_| {
+                                let is_comment_extension_enabled =
+                                    multi_buffer.settings_at(0, cx).extend_comment_on_newline;
+                                is_cursor && is_comment_extension_enabled
+                            });
+                            let comment_delimiter = if let Some(delimiter) = comment_delimiter {
+                                buffer
+                                    .buffer_line_for_row(start_point.row)
+                                    .is_some_and(|(snapshot, range)| {
+                                        let mut index_of_first_non_whitespace = 0;
+                                        let line_starts_with_comment = snapshot
+                                            .chars_for_range(range)
+                                            .skip_while(|c| {
+                                                let should_skip = c.is_whitespace();
+                                                if should_skip {
+                                                    index_of_first_non_whitespace += 1;
+                                                }
+                                                should_skip
+                                            })
+                                            .take(delimiter.len())
+                                            .eq(delimiter.chars());
+                                        let cursor_is_placed_after_comment_marker =
+                                            index_of_first_non_whitespace + delimiter.len()
+                                                <= start_point.column as usize;
+                                        line_starts_with_comment
+                                            && cursor_is_placed_after_comment_marker
+                                    })
+                                    .then(|| delimiter.clone())
                             } else {
-                                (None, false)
+                                None
                             };
+                            (comment_delimiter, insert_extra_newline)
+                        } else {
+                            (None, false)
+                        };
 
                         let capacity_for_delimiter = comment_delimiter
                             .as_deref()
@@ -5492,7 +5496,7 @@ impl Editor {
                     let mut all_selection_lines_are_comments = true;
 
                     for row in start_row..=end_row {
-                        if snapshot.is_line_blank(row) {
+                        if snapshot.is_line_blank(row) && start_row < end_row {
                             continue;
                         }
 
@@ -7102,7 +7106,7 @@ impl Editor {
 
         let mut new_selections_by_buffer = HashMap::default();
         for selection in editor.selections.all::<usize>(cx) {
-            for (buffer, mut range) in
+            for (buffer, mut range, _) in
                 buffer.range_to_buffer_ranges(selection.start..selection.end, cx)
             {
                 if selection.reversed {
@@ -7272,7 +7276,7 @@ impl Editor {
 
         let vim_mode = cx
             .global::<SettingsStore>()
-            .untyped_user_settings()
+            .raw_user_settings()
             .get("vim_mode")
             == Some(&serde_json::Value::Bool(true));
         let telemetry_settings = *settings::get::<TelemetrySettings>(cx);

crates/editor/src/editor_tests.rs ๐Ÿ”—

@@ -1732,27 +1732,41 @@ async fn test_newline_comments(cx: &mut gpui::TestAppContext) {
         },
         None,
     ));
-
-    let mut cx = EditorTestContext::new(cx).await;
-    cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx));
-    cx.set_state(indoc! {"
+    {
+        let mut cx = EditorTestContext::new(cx).await;
+        cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx));
+        cx.set_state(indoc! {"
         // Fooห‡
     "});
 
-    cx.update_editor(|e, cx| e.newline(&Newline, cx));
-    cx.assert_editor_state(indoc! {"
+        cx.update_editor(|e, cx| e.newline(&Newline, cx));
+        cx.assert_editor_state(indoc! {"
         // Foo
         //ห‡
     "});
-    // Ensure that if cursor is before the comment start, we do not actually insert a comment prefix.
-    cx.set_state(indoc! {"
+        // Ensure that if cursor is before the comment start, we do not actually insert a comment prefix.
+        cx.set_state(indoc! {"
         ห‡// Foo
     "});
-    cx.update_editor(|e, cx| e.newline(&Newline, cx));
-    cx.assert_editor_state(indoc! {"
+        cx.update_editor(|e, cx| e.newline(&Newline, cx));
+        cx.assert_editor_state(indoc! {"
 
         ห‡// Foo
     "});
+    }
+    // Ensure that comment continuations can be disabled.
+    update_test_settings(cx, |settings| {
+        settings.defaults.extend_comment_on_newline = Some(false);
+    });
+    let mut cx = EditorTestContext::new(cx).await;
+    cx.set_state(indoc! {"
+        // Fooห‡
+    "});
+    cx.update_editor(|e, cx| e.newline(&Newline, cx));
+    cx.assert_editor_state(indoc! {"
+        // Foo
+        ห‡
+    "});
 }
 
 #[gpui::test]
@@ -4930,7 +4944,7 @@ async fn test_completion(cx: &mut gpui::TestAppContext) {
 #[gpui::test]
 async fn test_toggle_comment(cx: &mut gpui::TestAppContext) {
     init_test(cx, |_| {});
-
+    let mut cx = EditorTestContext::new(cx).await;
     let language = Arc::new(Language::new(
         LanguageConfig {
             line_comment: Some("// ".into()),
@@ -4938,77 +4952,95 @@ async fn test_toggle_comment(cx: &mut gpui::TestAppContext) {
         },
         Some(tree_sitter_rust::language()),
     ));
+    cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx));
 
-    let text = "
+    // If multiple selections intersect a line, the line is only toggled once.
+    cx.set_state(indoc! {"
         fn a() {
-            //b();
+            ยซ//b();
+            ห‡ยป// ยซc();
+            //ห‡ยป  d();
+        }
+    "});
+
+    cx.update_editor(|e, cx| e.toggle_comments(&ToggleComments::default(), cx));
+
+    cx.assert_editor_state(indoc! {"
+        fn a() {
+            ยซb();
+            c();
+            ห‡ยป d();
+        }
+    "});
+
+    // The comment prefix is inserted at the same column for every line in a
+    // selection.
+    cx.update_editor(|e, cx| e.toggle_comments(&ToggleComments::default(), cx));
+
+    cx.assert_editor_state(indoc! {"
+        fn a() {
+            // ยซb();
             // c();
-            //  d();
+            ห‡ยป//  d();
         }
-    "
-    .unindent();
+    "});
 
-    let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
-    let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
-    let (_, view) = cx.add_window(|cx| build_editor(buffer, cx));
+    // If a selection ends at the beginning of a line, that line is not toggled.
+    cx.set_selections_state(indoc! {"
+        fn a() {
+            // b();
+            ยซ// c();
+        ห‡ยป    //  d();
+        }
+    "});
 
-    view.update(cx, |editor, cx| {
-        // If multiple selections intersect a line, the line is only
-        // toggled once.
-        editor.change_selections(None, cx, |s| {
-            s.select_display_ranges([
-                DisplayPoint::new(1, 3)..DisplayPoint::new(2, 3),
-                DisplayPoint::new(3, 5)..DisplayPoint::new(3, 6),
-            ])
-        });
-        editor.toggle_comments(&ToggleComments::default(), cx);
-        assert_eq!(
-            editor.text(cx),
-            "
-                fn a() {
-                    b();
-                    c();
-                     d();
-                }
-            "
-            .unindent()
-        );
+    cx.update_editor(|e, cx| e.toggle_comments(&ToggleComments::default(), cx));
 
-        // The comment prefix is inserted at the same column for every line
-        // in a selection.
-        editor.change_selections(None, cx, |s| {
-            s.select_display_ranges([DisplayPoint::new(1, 3)..DisplayPoint::new(3, 6)])
-        });
-        editor.toggle_comments(&ToggleComments::default(), cx);
-        assert_eq!(
-            editor.text(cx),
-            "
-                fn a() {
-                    // b();
-                    // c();
-                    //  d();
-                }
-            "
-            .unindent()
-        );
+    cx.assert_editor_state(indoc! {"
+        fn a() {
+            // b();
+            ยซc();
+        ห‡ยป    //  d();
+        }
+    "});
 
-        // If a selection ends at the beginning of a line, that line is not toggled.
-        editor.change_selections(None, cx, |s| {
-            s.select_display_ranges([DisplayPoint::new(2, 0)..DisplayPoint::new(3, 0)])
-        });
-        editor.toggle_comments(&ToggleComments::default(), cx);
-        assert_eq!(
-            editor.text(cx),
-            "
-                fn a() {
-                    // b();
-                    c();
-                    //  d();
-                }
-            "
-            .unindent()
-        );
-    });
+    // If a selection span a single line and is empty, the line is toggled.
+    cx.set_state(indoc! {"
+        fn a() {
+            a();
+            b();
+        ห‡
+        }
+    "});
+
+    cx.update_editor(|e, cx| e.toggle_comments(&ToggleComments::default(), cx));
+
+    cx.assert_editor_state(indoc! {"
+        fn a() {
+            a();
+            b();
+        //โ€ขห‡
+        }
+    "});
+
+    // If a selection span multiple lines, empty lines are not toggled.
+    cx.set_state(indoc! {"
+        fn a() {
+            ยซa();
+
+            c();ห‡ยป
+        }
+    "});
+
+    cx.update_editor(|e, cx| e.toggle_comments(&ToggleComments::default(), cx));
+
+    cx.assert_editor_state(indoc! {"
+        fn a() {
+            // ยซa();
+
+            // c();ห‡ยป
+        }
+    "});
 }
 
 #[gpui::test]

crates/editor/src/multi_buffer.rs ๐Ÿ”—

@@ -1118,7 +1118,7 @@ impl MultiBuffer {
         &self,
         point: T,
         cx: &AppContext,
-    ) -> Option<(ModelHandle<Buffer>, usize)> {
+    ) -> Option<(ModelHandle<Buffer>, usize, ExcerptId)> {
         let snapshot = self.read(cx);
         let offset = point.to_offset(&snapshot);
         let mut cursor = snapshot.excerpts.cursor::<usize>();
@@ -1132,7 +1132,7 @@ impl MultiBuffer {
             let buffer_point = excerpt_start + offset - *cursor.start();
             let buffer = self.buffers.borrow()[&excerpt.buffer_id].buffer.clone();
 
-            (buffer, buffer_point)
+            (buffer, buffer_point, excerpt.id)
         })
     }
 
@@ -1140,7 +1140,7 @@ impl MultiBuffer {
         &self,
         range: Range<T>,
         cx: &AppContext,
-    ) -> Vec<(ModelHandle<Buffer>, Range<usize>)> {
+    ) -> Vec<(ModelHandle<Buffer>, Range<usize>, ExcerptId)> {
         let snapshot = self.read(cx);
         let start = range.start.to_offset(&snapshot);
         let end = range.end.to_offset(&snapshot);
@@ -1165,7 +1165,7 @@ impl MultiBuffer {
             let start = excerpt_start + (cmp::max(start, *cursor.start()) - *cursor.start());
             let end = excerpt_start + (cmp::min(end, end_before_newline) - *cursor.start());
             let buffer = self.buffers.borrow()[&excerpt.buffer_id].buffer.clone();
-            result.push((buffer, start..end));
+            result.push((buffer, start..end, excerpt.id));
             cursor.next(&());
         }
 
@@ -1387,7 +1387,7 @@ impl MultiBuffer {
         cx: &'a AppContext,
     ) -> Option<Arc<Language>> {
         self.point_to_buffer_offset(point, cx)
-            .and_then(|(buffer, offset)| buffer.read(cx).language_at(offset))
+            .and_then(|(buffer, offset, _)| buffer.read(cx).language_at(offset))
     }
 
     pub fn settings_at<'a, T: ToOffset>(
@@ -1397,7 +1397,7 @@ impl MultiBuffer {
     ) -> &'a LanguageSettings {
         let mut language = None;
         let mut file = None;
-        if let Some((buffer, offset)) = self.point_to_buffer_offset(point, cx) {
+        if let Some((buffer, offset, _)) = self.point_to_buffer_offset(point, cx) {
             let buffer = buffer.read(cx);
             language = buffer.language_at(offset);
             file = buffer.file();
@@ -5196,7 +5196,7 @@ mod tests {
                     .range_to_buffer_ranges(start_ix..end_ix, cx);
                 let excerpted_buffers_text = excerpted_buffer_ranges
                     .iter()
-                    .map(|(buffer, buffer_range)| {
+                    .map(|(buffer, buffer_range, _)| {
                         buffer
                             .read(cx)
                             .text_for_range(buffer_range.clone())

crates/language/Cargo.toml ๐Ÿ”—

@@ -55,7 +55,7 @@ serde_json.workspace = true
 similar = "1.3"
 smallvec.workspace = true
 smol.workspace = true
-tree-sitter = "0.20"
+tree-sitter.workspace = true
 tree-sitter-rust = { version = "*", optional = true }
 tree-sitter-typescript = { version = "*", optional = true }
 unicase = "2.6"
@@ -72,6 +72,8 @@ ctor.workspace = true
 env_logger.workspace = true
 indoc.workspace = true
 rand.workspace = true
+unindent.workspace = true
+
 tree-sitter-embedded-template = "*"
 tree-sitter-html = "*"
 tree-sitter-javascript = "*"
@@ -81,4 +83,3 @@ tree-sitter-rust = "*"
 tree-sitter-python = "*"
 tree-sitter-typescript = "*"
 tree-sitter-ruby = "*"
-unindent.workspace = true

crates/language/src/buffer.rs ๐Ÿ”—

@@ -8,7 +8,8 @@ use crate::{
     language_settings::{language_settings, LanguageSettings},
     outline::OutlineItem,
     syntax_map::{
-        SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxSnapshot, ToTreeSitterPoint,
+        SyntaxLayerInfo, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxSnapshot,
+        ToTreeSitterPoint,
     },
     CodeLabel, LanguageScope, Outline,
 };
@@ -2116,12 +2117,20 @@ impl BufferSnapshot {
         }
     }
 
-    pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
+    pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayerInfo> + '_ {
+        self.syntax.layers_for_range(0..self.len(), &self.text)
+    }
+
+    pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayerInfo> {
         let offset = position.to_offset(self);
         self.syntax
             .layers_for_range(offset..offset, &self.text)
-            .filter(|l| l.node.end_byte() > offset)
+            .filter(|l| l.node().end_byte() > offset)
             .last()
+    }
+
+    pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
+        self.syntax_layer_at(position)
             .map(|info| info.language)
             .or(self.language.as_ref())
     }
@@ -2140,7 +2149,7 @@ impl BufferSnapshot {
         if let Some(layer_info) = self
             .syntax
             .layers_for_range(offset..offset, &self.text)
-            .filter(|l| l.node.end_byte() > offset)
+            .filter(|l| l.node().end_byte() > offset)
             .last()
         {
             Some(LanguageScope {
@@ -2188,7 +2197,7 @@ impl BufferSnapshot {
         let range = range.start.to_offset(self)..range.end.to_offset(self);
         let mut result: Option<Range<usize>> = None;
         'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
-            let mut cursor = layer.node.walk();
+            let mut cursor = layer.node().walk();
 
             // Descend to the first leaf that touches the start of the range,
             // and if the range is non-empty, extends beyond the start.

crates/language/src/buffer_tests.rs ๐Ÿ”—

@@ -2242,7 +2242,7 @@ fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> Str
     buffer.read_with(cx, |buffer, _| {
         let snapshot = buffer.snapshot();
         let layers = snapshot.syntax.layers(buffer.as_text_snapshot());
-        layers[0].node.to_sexp()
+        layers[0].node().to_sexp()
     })
 }
 

crates/language/src/language.rs ๐Ÿ”—

@@ -57,6 +57,7 @@ pub use buffer::*;
 pub use diagnostic_set::DiagnosticEntry;
 pub use lsp::LanguageServerId;
 pub use outline::{Outline, OutlineItem};
+pub use syntax_map::{OwnedSyntaxLayerInfo, SyntaxLayerInfo};
 pub use tree_sitter::{Parser, Tree};
 
 pub fn init(cx: &mut AppContext) {

crates/language/src/language_settings.rs ๐Ÿ”—

@@ -51,6 +51,7 @@ pub struct LanguageSettings {
     pub enable_language_server: bool,
     pub show_copilot_suggestions: bool,
     pub show_whitespaces: ShowWhitespaceSetting,
+    pub extend_comment_on_newline: bool,
 }
 
 #[derive(Clone, Debug, Default)]
@@ -95,6 +96,8 @@ pub struct LanguageSettingsContent {
     pub show_copilot_suggestions: Option<bool>,
     #[serde(default)]
     pub show_whitespaces: Option<ShowWhitespaceSetting>,
+    #[serde(default)]
+    pub extend_comment_on_newline: Option<bool>,
 }
 
 #[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
@@ -340,7 +343,10 @@ fn merge_settings(settings: &mut LanguageSettings, src: &LanguageSettingsContent
         src.show_copilot_suggestions,
     );
     merge(&mut settings.show_whitespaces, src.show_whitespaces);
-
+    merge(
+        &mut settings.extend_comment_on_newline,
+        src.extend_comment_on_newline,
+    );
     fn merge<T>(target: &mut T, value: Option<T>) {
         if let Some(value) = value {
             *target = value;

crates/language/src/syntax_map.rs ๐Ÿ”—

@@ -1,3 +1,6 @@
+#[cfg(test)]
+mod syntax_map_tests;
+
 use crate::{Grammar, InjectionConfig, Language, LanguageRegistry};
 use collections::HashMap;
 use futures::FutureExt;
@@ -125,8 +128,17 @@ impl SyntaxLayerContent {
 #[derive(Debug)]
 pub struct SyntaxLayerInfo<'a> {
     pub depth: usize,
-    pub node: Node<'a>,
     pub language: &'a Arc<Language>,
+    tree: &'a Tree,
+    offset: (usize, tree_sitter::Point),
+}
+
+#[derive(Clone)]
+pub struct OwnedSyntaxLayerInfo {
+    pub depth: usize,
+    pub language: Arc<Language>,
+    tree: tree_sitter::Tree,
+    offset: (usize, tree_sitter::Point),
 }
 
 #[derive(Debug, Clone)]
@@ -529,6 +541,15 @@ impl SyntaxSnapshot {
                             .to_ts_point();
                     }
 
+                    if included_ranges.is_empty() {
+                        included_ranges.push(tree_sitter::Range {
+                            start_byte: 0,
+                            end_byte: 0,
+                            start_point: Default::default(),
+                            end_point: Default::default(),
+                        });
+                    }
+
                     if let Some(SyntaxLayerContent::Parsed { tree: old_tree, .. }) =
                         old_layer.map(|layer| &layer.content)
                     {
@@ -664,8 +685,9 @@ impl SyntaxSnapshot {
             text,
             [SyntaxLayerInfo {
                 language,
+                tree,
                 depth: 0,
-                node: tree.root_node(),
+                offset: (0, tree_sitter::Point::new(0, 0)),
             }]
             .into_iter(),
             query,
@@ -728,9 +750,10 @@ impl SyntaxSnapshot {
             while let Some(layer) = cursor.item() {
                 if let SyntaxLayerContent::Parsed { tree, language } = &layer.content {
                     let info = SyntaxLayerInfo {
+                        tree,
                         language,
                         depth: layer.depth,
-                        node: tree.root_node_with_offset(
+                        offset: (
                             layer.range.start.to_offset(buffer),
                             layer.range.start.to_point(buffer).to_ts_point(),
                         ),
@@ -766,13 +789,8 @@ impl<'a> SyntaxMapCaptures<'a> {
             grammars: Vec::new(),
             active_layer_count: 0,
         };
-        for SyntaxLayerInfo {
-            language,
-            depth,
-            node,
-        } in layers
-        {
-            let grammar = match &language.grammar {
+        for layer in layers {
+            let grammar = match &layer.language.grammar {
                 Some(grammar) => grammar,
                 None => continue,
             };
@@ -789,7 +807,7 @@ impl<'a> SyntaxMapCaptures<'a> {
             };
 
             cursor.set_byte_range(range.clone());
-            let captures = cursor.captures(query, node, TextProvider(text));
+            let captures = cursor.captures(query, layer.node(), TextProvider(text));
             let grammar_index = result
                 .grammars
                 .iter()
@@ -799,7 +817,7 @@ impl<'a> SyntaxMapCaptures<'a> {
                     result.grammars.len() - 1
                 });
             let mut layer = SyntaxMapCapturesLayer {
-                depth,
+                depth: layer.depth,
                 grammar_index,
                 next_capture: None,
                 captures,
@@ -889,13 +907,8 @@ impl<'a> SyntaxMapMatches<'a> {
         query: fn(&Grammar) -> Option<&Query>,
     ) -> Self {
         let mut result = Self::default();
-        for SyntaxLayerInfo {
-            language,
-            depth,
-            node,
-        } in layers
-        {
-            let grammar = match &language.grammar {
+        for layer in layers {
+            let grammar = match &layer.language.grammar {
                 Some(grammar) => grammar,
                 None => continue,
             };
@@ -912,7 +925,7 @@ impl<'a> SyntaxMapMatches<'a> {
             };
 
             cursor.set_byte_range(range.clone());
-            let matches = cursor.matches(query, node, TextProvider(text));
+            let matches = cursor.matches(query, layer.node(), TextProvider(text));
             let grammar_index = result
                 .grammars
                 .iter()
@@ -922,7 +935,7 @@ impl<'a> SyntaxMapMatches<'a> {
                     result.grammars.len() - 1
                 });
             let mut layer = SyntaxMapMatchesLayer {
-                depth,
+                depth: layer.depth,
                 grammar_index,
                 matches,
                 next_pattern_index: 0,
@@ -1219,7 +1232,7 @@ fn get_injections(
     }
 }
 
-fn splice_included_ranges(
+pub(crate) fn splice_included_ranges(
     mut ranges: Vec<tree_sitter::Range>,
     changed_ranges: &[Range<usize>],
     new_ranges: &[tree_sitter::Range],
@@ -1290,7 +1303,28 @@ fn splice_included_ranges(
     ranges
 }
 
+impl OwnedSyntaxLayerInfo {
+    pub fn node(&self) -> Node {
+        self.tree
+            .root_node_with_offset(self.offset.0, self.offset.1)
+    }
+}
+
 impl<'a> SyntaxLayerInfo<'a> {
+    pub fn to_owned(&self) -> OwnedSyntaxLayerInfo {
+        OwnedSyntaxLayerInfo {
+            tree: self.tree.clone(),
+            offset: self.offset,
+            depth: self.depth,
+            language: self.language.clone(),
+        }
+    }
+
+    pub fn node(&self) -> Node<'a> {
+        self.tree
+            .root_node_with_offset(self.offset.0, self.offset.1)
+    }
+
     pub(crate) fn override_id(&self, offset: usize, text: &text::BufferSnapshot) -> Option<u32> {
         let text = TextProvider(text.as_rope());
         let config = self.language.grammar.as_ref()?.override_config.as_ref()?;
@@ -1299,7 +1333,7 @@ impl<'a> SyntaxLayerInfo<'a> {
         query_cursor.set_byte_range(offset..offset);
 
         let mut smallest_match: Option<(u32, Range<usize>)> = None;
-        for mat in query_cursor.matches(&config.query, self.node, text) {
+        for mat in query_cursor.matches(&config.query, self.node(), text) {
             for capture in mat.captures {
                 if !config.values.contains_key(&capture.index) {
                     continue;
@@ -1594,1171 +1628,3 @@ impl ToTreeSitterPoint for Point {
         Point::new(point.row as u32, point.column as u32)
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use crate::LanguageConfig;
-    use rand::rngs::StdRng;
-    use std::env;
-    use text::Buffer;
-    use unindent::Unindent as _;
-    use util::test::marked_text_ranges;
-
-    #[test]
-    fn test_splice_included_ranges() {
-        let ranges = vec![ts_range(20..30), ts_range(50..60), ts_range(80..90)];
-
-        let new_ranges = splice_included_ranges(
-            ranges.clone(),
-            &[54..56, 58..68],
-            &[ts_range(50..54), ts_range(59..67)],
-        );
-        assert_eq!(
-            new_ranges,
-            &[
-                ts_range(20..30),
-                ts_range(50..54),
-                ts_range(59..67),
-                ts_range(80..90),
-            ]
-        );
-
-        let new_ranges = splice_included_ranges(ranges.clone(), &[70..71, 91..100], &[]);
-        assert_eq!(
-            new_ranges,
-            &[ts_range(20..30), ts_range(50..60), ts_range(80..90)]
-        );
-
-        let new_ranges =
-            splice_included_ranges(ranges.clone(), &[], &[ts_range(0..2), ts_range(70..75)]);
-        assert_eq!(
-            new_ranges,
-            &[
-                ts_range(0..2),
-                ts_range(20..30),
-                ts_range(50..60),
-                ts_range(70..75),
-                ts_range(80..90)
-            ]
-        );
-
-        let new_ranges = splice_included_ranges(ranges.clone(), &[30..50], &[ts_range(25..55)]);
-        assert_eq!(new_ranges, &[ts_range(25..55), ts_range(80..90)]);
-
-        fn ts_range(range: Range<usize>) -> tree_sitter::Range {
-            tree_sitter::Range {
-                start_byte: range.start,
-                start_point: tree_sitter::Point {
-                    row: 0,
-                    column: range.start,
-                },
-                end_byte: range.end,
-                end_point: tree_sitter::Point {
-                    row: 0,
-                    column: range.end,
-                },
-            }
-        }
-    }
-
-    #[gpui::test]
-    fn test_syntax_map_layers_for_range() {
-        let registry = Arc::new(LanguageRegistry::test());
-        let language = Arc::new(rust_lang());
-        registry.add(language.clone());
-
-        let mut buffer = Buffer::new(
-            0,
-            0,
-            r#"
-                fn a() {
-                    assert_eq!(
-                        b(vec![C {}]),
-                        vec![d.e],
-                    );
-                    println!("{}", f(|_| true));
-                }
-            "#
-            .unindent(),
-        );
-
-        let mut syntax_map = SyntaxMap::new();
-        syntax_map.set_language_registry(registry.clone());
-        syntax_map.reparse(language.clone(), &buffer);
-
-        assert_layers_for_range(
-            &syntax_map,
-            &buffer,
-            Point::new(2, 0)..Point::new(2, 0),
-            &[
-                "...(function_item ... (block (expression_statement (macro_invocation...",
-                "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
-            ],
-        );
-        assert_layers_for_range(
-            &syntax_map,
-            &buffer,
-            Point::new(2, 14)..Point::new(2, 16),
-            &[
-                "...(function_item ...",
-                "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
-                "...(array_expression (struct_expression ...",
-            ],
-        );
-        assert_layers_for_range(
-            &syntax_map,
-            &buffer,
-            Point::new(3, 14)..Point::new(3, 16),
-            &[
-                "...(function_item ...",
-                "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
-                "...(array_expression (field_expression ...",
-            ],
-        );
-        assert_layers_for_range(
-            &syntax_map,
-            &buffer,
-            Point::new(5, 12)..Point::new(5, 16),
-            &[
-                "...(function_item ...",
-                "...(call_expression ... (arguments (closure_expression ...",
-            ],
-        );
-
-        // Replace a vec! macro invocation with a plain slice, removing a syntactic layer.
-        let macro_name_range = range_for_text(&buffer, "vec!");
-        buffer.edit([(macro_name_range, "&")]);
-        syntax_map.interpolate(&buffer);
-        syntax_map.reparse(language.clone(), &buffer);
-
-        assert_layers_for_range(
-            &syntax_map,
-            &buffer,
-            Point::new(2, 14)..Point::new(2, 16),
-            &[
-                "...(function_item ...",
-                "...(tuple_expression (call_expression ... arguments: (arguments (reference_expression value: (array_expression...",
-            ],
-        );
-
-        // Put the vec! macro back, adding back the syntactic layer.
-        buffer.undo();
-        syntax_map.interpolate(&buffer);
-        syntax_map.reparse(language.clone(), &buffer);
-
-        assert_layers_for_range(
-            &syntax_map,
-            &buffer,
-            Point::new(2, 14)..Point::new(2, 16),
-            &[
-                "...(function_item ...",
-                "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
-                "...(array_expression (struct_expression ...",
-            ],
-        );
-    }
-
-    #[gpui::test]
-    fn test_dynamic_language_injection() {
-        let registry = Arc::new(LanguageRegistry::test());
-        let markdown = Arc::new(markdown_lang());
-        registry.add(markdown.clone());
-        registry.add(Arc::new(rust_lang()));
-        registry.add(Arc::new(ruby_lang()));
-
-        let mut buffer = Buffer::new(
-            0,
-            0,
-            r#"
-                This is a code block:
-
-                ```rs
-                fn foo() {}
-                ```
-            "#
-            .unindent(),
-        );
-
-        let mut syntax_map = SyntaxMap::new();
-        syntax_map.set_language_registry(registry.clone());
-        syntax_map.reparse(markdown.clone(), &buffer);
-        assert_layers_for_range(
-            &syntax_map,
-            &buffer,
-            Point::new(3, 0)..Point::new(3, 0),
-            &[
-                "...(fenced_code_block (fenced_code_block_delimiter) (info_string (language)) (code_fence_content) (fenced_code_block_delimiter...",
-                "...(function_item name: (identifier) parameters: (parameters) body: (block)...",
-            ],
-        );
-
-        // Replace Rust with Ruby in code block.
-        let macro_name_range = range_for_text(&buffer, "rs");
-        buffer.edit([(macro_name_range, "ruby")]);
-        syntax_map.interpolate(&buffer);
-        syntax_map.reparse(markdown.clone(), &buffer);
-        assert_layers_for_range(
-            &syntax_map,
-            &buffer,
-            Point::new(3, 0)..Point::new(3, 0),
-            &[
-                "...(fenced_code_block (fenced_code_block_delimiter) (info_string (language)) (code_fence_content) (fenced_code_block_delimiter...",
-                "...(call method: (identifier) arguments: (argument_list (call method: (identifier) arguments: (argument_list) block: (block)...",
-            ],
-        );
-
-        // Replace Ruby with a language that hasn't been loaded yet.
-        let macro_name_range = range_for_text(&buffer, "ruby");
-        buffer.edit([(macro_name_range, "html")]);
-        syntax_map.interpolate(&buffer);
-        syntax_map.reparse(markdown.clone(), &buffer);
-        assert_layers_for_range(
-            &syntax_map,
-            &buffer,
-            Point::new(3, 0)..Point::new(3, 0),
-            &[
-                "...(fenced_code_block (fenced_code_block_delimiter) (info_string (language)) (code_fence_content) (fenced_code_block_delimiter..."
-            ],
-        );
-        assert!(syntax_map.contains_unknown_injections());
-
-        registry.add(Arc::new(html_lang()));
-        syntax_map.reparse(markdown.clone(), &buffer);
-        assert_layers_for_range(
-            &syntax_map,
-            &buffer,
-            Point::new(3, 0)..Point::new(3, 0),
-            &[
-                "...(fenced_code_block (fenced_code_block_delimiter) (info_string (language)) (code_fence_content) (fenced_code_block_delimiter...",
-                "(fragment (text))",
-            ],
-        );
-        assert!(!syntax_map.contains_unknown_injections());
-    }
-
-    #[gpui::test]
-    fn test_typing_multiple_new_injections() {
-        let (buffer, syntax_map) = test_edit_sequence(
-            "Rust",
-            &[
-                "fn a() { dbg }",
-                "fn a() { dbgยซ!ยป }",
-                "fn a() { dbg!ยซ()ยป }",
-                "fn a() { dbg!(ยซbยป) }",
-                "fn a() { dbg!(bยซ.ยป) }",
-                "fn a() { dbg!(b.ยซcยป) }",
-                "fn a() { dbg!(b.cยซ()ยป) }",
-                "fn a() { dbg!(b.c(ยซvecยป)) }",
-                "fn a() { dbg!(b.c(vecยซ!ยป)) }",
-                "fn a() { dbg!(b.c(vec!ยซ[]ยป)) }",
-                "fn a() { dbg!(b.c(vec![ยซdยป])) }",
-                "fn a() { dbg!(b.c(vec![dยซ.ยป])) }",
-                "fn a() { dbg!(b.c(vec![d.ยซeยป])) }",
-            ],
-        );
-
-        assert_capture_ranges(
-            &syntax_map,
-            &buffer,
-            &["field"],
-            "fn a() { dbg!(b.ยซcยป(vec![d.ยซeยป])) }",
-        );
-    }
-
-    #[gpui::test]
-    fn test_pasting_new_injection_line_between_others() {
-        let (buffer, syntax_map) = test_edit_sequence(
-            "Rust",
-            &[
-                "
-                    fn a() {
-                        b!(B {});
-                        c!(C {});
-                        d!(D {});
-                        e!(E {});
-                        f!(F {});
-                        g!(G {});
-                    }
-                ",
-                "
-                    fn a() {
-                        b!(B {});
-                        c!(C {});
-                        d!(D {});
-                    ยซ    h!(H {});
-                    ยป    e!(E {});
-                        f!(F {});
-                        g!(G {});
-                    }
-                ",
-            ],
-        );
-
-        assert_capture_ranges(
-            &syntax_map,
-            &buffer,
-            &["struct"],
-            "
-            fn a() {
-                b!(ยซB {}ยป);
-                c!(ยซC {}ยป);
-                d!(ยซD {}ยป);
-                h!(ยซH {}ยป);
-                e!(ยซE {}ยป);
-                f!(ยซF {}ยป);
-                g!(ยซG {}ยป);
-            }
-            ",
-        );
-    }
-
-    #[gpui::test]
-    fn test_joining_injections_with_child_injections() {
-        let (buffer, syntax_map) = test_edit_sequence(
-            "Rust",
-            &[
-                "
-                    fn a() {
-                        b!(
-                            c![one.two.three],
-                            d![four.five.six],
-                        );
-                        e!(
-                            f![seven.eight],
-                        );
-                    }
-                ",
-                "
-                    fn a() {
-                        b!(
-                            c![one.two.three],
-                            d![four.five.six],
-                        ห‡    f![seven.eight],
-                        );
-                    }
-                ",
-            ],
-        );
-
-        assert_capture_ranges(
-            &syntax_map,
-            &buffer,
-            &["field"],
-            "
-            fn a() {
-                b!(
-                    c![one.ยซtwoยป.ยซthreeยป],
-                    d![four.ยซfiveยป.ยซsixยป],
-                    f![seven.ยซeightยป],
-                );
-            }
-            ",
-        );
-    }
-
-    #[gpui::test]
-    fn test_editing_edges_of_injection() {
-        test_edit_sequence(
-            "Rust",
-            &[
-                "
-                    fn a() {
-                        b!(c!())
-                    }
-                ",
-                "
-                    fn a() {
-                        ยซdยป!(c!())
-                    }
-                ",
-                "
-                    fn a() {
-                        ยซeยปd!(c!())
-                    }
-                ",
-                "
-                    fn a() {
-                        ed!ยซ[ยปc!()ยซ]ยป
-                    }
-            ",
-            ],
-        );
-    }
-
-    #[gpui::test]
-    fn test_edits_preceding_and_intersecting_injection() {
-        test_edit_sequence(
-            "Rust",
-            &[
-                //
-                "const aaaaaaaaaaaa: B = c!(d(e.f));",
-                "const aห‡a: B = c!(d(eห‡));",
-            ],
-        );
-    }
-
-    #[gpui::test]
-    fn test_non_local_changes_create_injections() {
-        test_edit_sequence(
-            "Rust",
-            &[
-                "
-                    // a! {
-                        static B: C = d;
-                    // }
-                ",
-                "
-                    ห‡a! {
-                        static B: C = d;
-                    ห‡}
-                ",
-            ],
-        );
-    }
-
-    #[gpui::test]
-    fn test_creating_many_injections_in_one_edit() {
-        test_edit_sequence(
-            "Rust",
-            &[
-                "
-                    fn a() {
-                        one(Two::three(3));
-                        four(Five::six(6));
-                        seven(Eight::nine(9));
-                    }
-                ",
-                "
-                    fn a() {
-                        oneยซ!ยป(Two::three(3));
-                        fourยซ!ยป(Five::six(6));
-                        sevenยซ!ยป(Eight::nine(9));
-                    }
-                ",
-                "
-                    fn a() {
-                        one!(Two::threeยซ!ยป(3));
-                        four!(Five::sixยซ!ยป(6));
-                        seven!(Eight::nineยซ!ยป(9));
-                    }
-                ",
-            ],
-        );
-    }
-
-    #[gpui::test]
-    fn test_editing_across_injection_boundary() {
-        test_edit_sequence(
-            "Rust",
-            &[
-                "
-                    fn one() {
-                        two();
-                        three!(
-                            three.four,
-                            five.six,
-                        );
-                    }
-                ",
-                "
-                    fn one() {
-                        two();
-                        thยซirty_five![ยป
-                            three.four,
-                            five.six,
-                        ยซ   seven.eight,
-                        ];ยป
-                    }
-                ",
-            ],
-        );
-    }
-
-    #[gpui::test]
-    fn test_removing_injection_by_replacing_across_boundary() {
-        test_edit_sequence(
-            "Rust",
-            &[
-                "
-                    fn one() {
-                        two!(
-                            three.four,
-                        );
-                    }
-                ",
-                "
-                    fn one() {
-                        tยซen
-                            .eleven(
-                            twelve,
-                        ยป
-                            three.four,
-                        );
-                    }
-                ",
-            ],
-        );
-    }
-
-    #[gpui::test]
-    fn test_combined_injections() {
-        let (buffer, syntax_map) = test_edit_sequence(
-            "ERB",
-            &[
-                "
-                    <body>
-                        <% if @one %>
-                            <div class=one>
-                        <% else %>
-                            <div class=two>
-                        <% end %>
-                        </div>
-                    </body>
-                ",
-                "
-                    <body>
-                        <% if @one %>
-                            <div class=one>
-                        ห‡ else ห‡
-                            <div class=two>
-                        <% end %>
-                        </div>
-                    </body>
-                ",
-                "
-                    <body>
-                        <% if @one ยซ;ยป end %>
-                        </div>
-                    </body>
-                ",
-            ],
-        );
-
-        assert_capture_ranges(
-            &syntax_map,
-            &buffer,
-            &["tag", "ivar"],
-            "
-                <ยซbodyยป>
-                    <% if ยซ@oneยป ; end %>
-                    </ยซdivยป>
-                </ยซbodyยป>
-            ",
-        );
-    }
-
-    #[gpui::test]
-    fn test_combined_injections_empty_ranges() {
-        test_edit_sequence(
-            "ERB",
-            &[
-                "
-                    <% if @one %>
-                    <% else %>
-                    <% end %>
-                ",
-                "
-                    <% if @one %>
-                    ห‡<% end %>
-                ",
-            ],
-        );
-    }
-
-    #[gpui::test]
-    fn test_combined_injections_edit_edges_of_ranges() {
-        let (buffer, syntax_map) = test_edit_sequence(
-            "ERB",
-            &[
-                "
-                    <%= one @two %>
-                    <%= three @four %>
-                ",
-                "
-                    <%= one @two %ห‡
-                    <%= three @four %>
-                ",
-                "
-                    <%= one @two %ยซ>ยป
-                    <%= three @four %>
-                ",
-            ],
-        );
-
-        assert_capture_ranges(
-            &syntax_map,
-            &buffer,
-            &["tag", "ivar"],
-            "
-                <%= one ยซ@twoยป %>
-                <%= three ยซ@fourยป %>
-            ",
-        );
-    }
-
-    #[gpui::test]
-    fn test_combined_injections_splitting_some_injections() {
-        let (_buffer, _syntax_map) = test_edit_sequence(
-            "ERB",
-            &[
-                r#"
-                      <%A if b(:c) %>
-                        d
-                      <% end %>
-                      eee
-                      <% f %>
-                "#,
-                r#"
-                      <%ยซ AAAAAAA %>
-                        hhhhhhh
-                      <%=ยป if b(:c) %>
-                        d
-                      <% end %>
-                      eee
-                      <% f %>
-                "#,
-            ],
-        );
-    }
-
-    #[gpui::test]
-    fn test_combined_injections_inside_injections() {
-        let (_buffer, _syntax_map) = test_edit_sequence(
-            "Markdown",
-            &[
-                r#"
-                      here is some ERB code:
-
-                      ```erb
-                      <ul>
-                        <% people.each do |person| %>
-                          <li><%= person.name %></li>
-                        <% end %>
-                      </ul>
-                      ```
-                "#,
-                r#"
-                    here is some ERB code:
-
-                    ```erb
-                    <ul>
-                      <% peopleยซ2ยป.each do |person| %>
-                        <li><%= person.name %></li>
-                      <% end %>
-                    </ul>
-                    ```
-                "#,
-            ],
-        );
-    }
-
-    #[gpui::test(iterations = 50)]
-    fn test_random_syntax_map_edits(mut rng: StdRng) {
-        let operations = env::var("OPERATIONS")
-            .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
-            .unwrap_or(10);
-
-        let text = r#"
-            fn test_something() {
-                let vec = vec![5, 1, 3, 8];
-                assert_eq!(
-                    vec
-                        .into_iter()
-                        .map(|i| i * 2)
-                        .collect::<Vec<usize>>(),
-                    vec![
-                        5 * 2, 1 * 2, 3 * 2, 8 * 2
-                    ],
-                );
-            }
-        "#
-        .unindent()
-        .repeat(2);
-
-        let registry = Arc::new(LanguageRegistry::test());
-        let language = Arc::new(rust_lang());
-        registry.add(language.clone());
-        let mut buffer = Buffer::new(0, 0, text);
-
-        let mut syntax_map = SyntaxMap::new();
-        syntax_map.set_language_registry(registry.clone());
-        syntax_map.reparse(language.clone(), &buffer);
-
-        let mut reference_syntax_map = SyntaxMap::new();
-        reference_syntax_map.set_language_registry(registry.clone());
-
-        log::info!("initial text:\n{}", buffer.text());
-
-        for _ in 0..operations {
-            let prev_buffer = buffer.snapshot();
-            let prev_syntax_map = syntax_map.snapshot();
-
-            buffer.randomly_edit(&mut rng, 3);
-            log::info!("text:\n{}", buffer.text());
-
-            syntax_map.interpolate(&buffer);
-            check_interpolation(&prev_syntax_map, &syntax_map, &prev_buffer, &buffer);
-
-            syntax_map.reparse(language.clone(), &buffer);
-
-            reference_syntax_map.clear();
-            reference_syntax_map.reparse(language.clone(), &buffer);
-        }
-
-        for i in 0..operations {
-            let i = operations - i - 1;
-            buffer.undo();
-            log::info!("undoing operation {}", i);
-            log::info!("text:\n{}", buffer.text());
-
-            syntax_map.interpolate(&buffer);
-            syntax_map.reparse(language.clone(), &buffer);
-
-            reference_syntax_map.clear();
-            reference_syntax_map.reparse(language.clone(), &buffer);
-            assert_eq!(
-                syntax_map.layers(&buffer).len(),
-                reference_syntax_map.layers(&buffer).len(),
-                "wrong number of layers after undoing edit {i}"
-            );
-        }
-
-        let layers = syntax_map.layers(&buffer);
-        let reference_layers = reference_syntax_map.layers(&buffer);
-        for (edited_layer, reference_layer) in layers.into_iter().zip(reference_layers.into_iter())
-        {
-            assert_eq!(edited_layer.node.to_sexp(), reference_layer.node.to_sexp());
-            assert_eq!(edited_layer.node.range(), reference_layer.node.range());
-        }
-    }
-
-    #[gpui::test(iterations = 50)]
-    fn test_random_syntax_map_edits_with_combined_injections(mut rng: StdRng) {
-        let operations = env::var("OPERATIONS")
-            .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
-            .unwrap_or(10);
-
-        let text = r#"
-          <div id="main">
-            <% if one?(:two) %>
-              <p class="three" four>
-                <%= yield :five %>
-              </p>
-            <% elsif Six.seven(8) %>
-              <p id="three" four>
-                <%= yield :five %>
-              </p>
-            <% else %>
-              <span>Ok</span>
-            <% end %>
-          </div>
-        "#
-        .unindent()
-        .repeat(8);
-
-        let registry = Arc::new(LanguageRegistry::test());
-        let language = Arc::new(erb_lang());
-        registry.add(language.clone());
-        registry.add(Arc::new(ruby_lang()));
-        registry.add(Arc::new(html_lang()));
-        let mut buffer = Buffer::new(0, 0, text);
-
-        let mut syntax_map = SyntaxMap::new();
-        syntax_map.set_language_registry(registry.clone());
-        syntax_map.reparse(language.clone(), &buffer);
-
-        let mut reference_syntax_map = SyntaxMap::new();
-        reference_syntax_map.set_language_registry(registry.clone());
-
-        log::info!("initial text:\n{}", buffer.text());
-
-        for _ in 0..operations {
-            let prev_buffer = buffer.snapshot();
-            let prev_syntax_map = syntax_map.snapshot();
-
-            buffer.randomly_edit(&mut rng, 3);
-            log::info!("text:\n{}", buffer.text());
-
-            syntax_map.interpolate(&buffer);
-            check_interpolation(&prev_syntax_map, &syntax_map, &prev_buffer, &buffer);
-
-            syntax_map.reparse(language.clone(), &buffer);
-
-            reference_syntax_map.clear();
-            reference_syntax_map.reparse(language.clone(), &buffer);
-        }
-
-        for i in 0..operations {
-            let i = operations - i - 1;
-            buffer.undo();
-            log::info!("undoing operation {}", i);
-            log::info!("text:\n{}", buffer.text());
-
-            syntax_map.interpolate(&buffer);
-            syntax_map.reparse(language.clone(), &buffer);
-
-            reference_syntax_map.clear();
-            reference_syntax_map.reparse(language.clone(), &buffer);
-            assert_eq!(
-                syntax_map.layers(&buffer).len(),
-                reference_syntax_map.layers(&buffer).len(),
-                "wrong number of layers after undoing edit {i}"
-            );
-        }
-
-        let layers = syntax_map.layers(&buffer);
-        let reference_layers = reference_syntax_map.layers(&buffer);
-        for (edited_layer, reference_layer) in layers.into_iter().zip(reference_layers.into_iter())
-        {
-            assert_eq!(edited_layer.node.to_sexp(), reference_layer.node.to_sexp());
-            assert_eq!(edited_layer.node.range(), reference_layer.node.range());
-        }
-    }
-
-    fn check_interpolation(
-        old_syntax_map: &SyntaxSnapshot,
-        new_syntax_map: &SyntaxSnapshot,
-        old_buffer: &BufferSnapshot,
-        new_buffer: &BufferSnapshot,
-    ) {
-        let edits = new_buffer
-            .edits_since::<usize>(&old_buffer.version())
-            .collect::<Vec<_>>();
-
-        for (old_layer, new_layer) in old_syntax_map
-            .layers
-            .iter()
-            .zip(new_syntax_map.layers.iter())
-        {
-            assert_eq!(old_layer.range, new_layer.range);
-            let Some(old_tree) = old_layer.content.tree() else { continue };
-            let Some(new_tree) = new_layer.content.tree() else { continue };
-            let old_start_byte = old_layer.range.start.to_offset(old_buffer);
-            let new_start_byte = new_layer.range.start.to_offset(new_buffer);
-            let old_start_point = old_layer.range.start.to_point(old_buffer).to_ts_point();
-            let new_start_point = new_layer.range.start.to_point(new_buffer).to_ts_point();
-            let old_node = old_tree.root_node_with_offset(old_start_byte, old_start_point);
-            let new_node = new_tree.root_node_with_offset(new_start_byte, new_start_point);
-            check_node_edits(
-                old_layer.depth,
-                &old_layer.range,
-                old_node,
-                new_node,
-                old_buffer,
-                new_buffer,
-                &edits,
-            );
-        }
-
-        fn check_node_edits(
-            depth: usize,
-            range: &Range<Anchor>,
-            old_node: Node,
-            new_node: Node,
-            old_buffer: &BufferSnapshot,
-            new_buffer: &BufferSnapshot,
-            edits: &[text::Edit<usize>],
-        ) {
-            assert_eq!(old_node.kind(), new_node.kind());
-
-            let old_range = old_node.byte_range();
-            let new_range = new_node.byte_range();
-
-            let is_edited = edits
-                .iter()
-                .any(|edit| edit.new.start < new_range.end && edit.new.end > new_range.start);
-            if is_edited {
-                assert!(
-                    new_node.has_changes(),
-                    concat!(
-                        "failed to mark node as edited.\n",
-                        "layer depth: {}, old layer range: {:?}, new layer range: {:?},\n",
-                        "node kind: {}, old node range: {:?}, new node range: {:?}",
-                    ),
-                    depth,
-                    range.to_offset(old_buffer),
-                    range.to_offset(new_buffer),
-                    new_node.kind(),
-                    old_range,
-                    new_range,
-                );
-            }
-
-            if !new_node.has_changes() {
-                assert_eq!(
-                    old_buffer
-                        .text_for_range(old_range.clone())
-                        .collect::<String>(),
-                    new_buffer
-                        .text_for_range(new_range.clone())
-                        .collect::<String>(),
-                    concat!(
-                        "mismatched text for node\n",
-                        "layer depth: {}, old layer range: {:?}, new layer range: {:?},\n",
-                        "node kind: {}, old node range:{:?}, new node range:{:?}",
-                    ),
-                    depth,
-                    range.to_offset(old_buffer),
-                    range.to_offset(new_buffer),
-                    new_node.kind(),
-                    old_range,
-                    new_range,
-                );
-            }
-
-            for i in 0..new_node.child_count() {
-                check_node_edits(
-                    depth,
-                    range,
-                    old_node.child(i).unwrap(),
-                    new_node.child(i).unwrap(),
-                    old_buffer,
-                    new_buffer,
-                    edits,
-                )
-            }
-        }
-    }
-
-    fn test_edit_sequence(language_name: &str, steps: &[&str]) -> (Buffer, SyntaxMap) {
-        let registry = Arc::new(LanguageRegistry::test());
-        registry.add(Arc::new(rust_lang()));
-        registry.add(Arc::new(ruby_lang()));
-        registry.add(Arc::new(html_lang()));
-        registry.add(Arc::new(erb_lang()));
-        registry.add(Arc::new(markdown_lang()));
-        let language = registry
-            .language_for_name(language_name)
-            .now_or_never()
-            .unwrap()
-            .unwrap();
-        let mut buffer = Buffer::new(0, 0, Default::default());
-
-        let mut mutated_syntax_map = SyntaxMap::new();
-        mutated_syntax_map.set_language_registry(registry.clone());
-        mutated_syntax_map.reparse(language.clone(), &buffer);
-
-        for (i, marked_string) in steps.into_iter().enumerate() {
-            buffer.edit_via_marked_text(&marked_string.unindent());
-
-            // Reparse the syntax map
-            mutated_syntax_map.interpolate(&buffer);
-            mutated_syntax_map.reparse(language.clone(), &buffer);
-
-            // Create a second syntax map from scratch
-            let mut reference_syntax_map = SyntaxMap::new();
-            reference_syntax_map.set_language_registry(registry.clone());
-            reference_syntax_map.reparse(language.clone(), &buffer);
-
-            // Compare the mutated syntax map to the new syntax map
-            let mutated_layers = mutated_syntax_map.layers(&buffer);
-            let reference_layers = reference_syntax_map.layers(&buffer);
-            assert_eq!(
-                mutated_layers.len(),
-                reference_layers.len(),
-                "wrong number of layers at step {i}"
-            );
-            for (edited_layer, reference_layer) in
-                mutated_layers.into_iter().zip(reference_layers.into_iter())
-            {
-                assert_eq!(
-                    edited_layer.node.to_sexp(),
-                    reference_layer.node.to_sexp(),
-                    "different layer at step {i}"
-                );
-                assert_eq!(
-                    edited_layer.node.range(),
-                    reference_layer.node.range(),
-                    "different layer at step {i}"
-                );
-            }
-        }
-
-        (buffer, mutated_syntax_map)
-    }
-
-    fn html_lang() -> Language {
-        Language::new(
-            LanguageConfig {
-                name: "HTML".into(),
-                path_suffixes: vec!["html".to_string()],
-                ..Default::default()
-            },
-            Some(tree_sitter_html::language()),
-        )
-        .with_highlights_query(
-            r#"
-                (tag_name) @tag
-                (erroneous_end_tag_name) @tag
-                (attribute_name) @property
-            "#,
-        )
-        .unwrap()
-    }
-
-    fn ruby_lang() -> Language {
-        Language::new(
-            LanguageConfig {
-                name: "Ruby".into(),
-                path_suffixes: vec!["rb".to_string()],
-                ..Default::default()
-            },
-            Some(tree_sitter_ruby::language()),
-        )
-        .with_highlights_query(
-            r#"
-                ["if" "do" "else" "end"] @keyword
-                (instance_variable) @ivar
-            "#,
-        )
-        .unwrap()
-    }
-
-    fn erb_lang() -> Language {
-        Language::new(
-            LanguageConfig {
-                name: "ERB".into(),
-                path_suffixes: vec!["erb".to_string()],
-                ..Default::default()
-            },
-            Some(tree_sitter_embedded_template::language()),
-        )
-        .with_highlights_query(
-            r#"
-                ["<%" "%>"] @keyword
-            "#,
-        )
-        .unwrap()
-        .with_injection_query(
-            r#"
-                ((code) @content
-                 (#set! "language" "ruby")
-                 (#set! "combined"))
-
-                 ((content) @content
-                 (#set! "language" "html")
-                 (#set! "combined"))
-            "#,
-        )
-        .unwrap()
-    }
-
-    fn rust_lang() -> Language {
-        Language::new(
-            LanguageConfig {
-                name: "Rust".into(),
-                path_suffixes: vec!["rs".to_string()],
-                ..Default::default()
-            },
-            Some(tree_sitter_rust::language()),
-        )
-        .with_highlights_query(
-            r#"
-                (field_identifier) @field
-                (struct_expression) @struct
-            "#,
-        )
-        .unwrap()
-        .with_injection_query(
-            r#"
-                (macro_invocation
-                    (token_tree) @content
-                    (#set! "language" "rust"))
-            "#,
-        )
-        .unwrap()
-    }
-
-    fn markdown_lang() -> Language {
-        Language::new(
-            LanguageConfig {
-                name: "Markdown".into(),
-                path_suffixes: vec!["md".into()],
-                ..Default::default()
-            },
-            Some(tree_sitter_markdown::language()),
-        )
-        .with_injection_query(
-            r#"
-                (fenced_code_block
-                    (info_string
-                        (language) @language)
-                    (code_fence_content) @content)
-            "#,
-        )
-        .unwrap()
-    }
-
-    fn range_for_text(buffer: &Buffer, text: &str) -> Range<usize> {
-        let start = buffer.as_rope().to_string().find(text).unwrap();
-        start..start + text.len()
-    }
-
-    fn assert_layers_for_range(
-        syntax_map: &SyntaxMap,
-        buffer: &BufferSnapshot,
-        range: Range<Point>,
-        expected_layers: &[&str],
-    ) {
-        let layers = syntax_map
-            .layers_for_range(range, &buffer)
-            .collect::<Vec<_>>();
-        assert_eq!(
-            layers.len(),
-            expected_layers.len(),
-            "wrong number of layers"
-        );
-        for (i, (SyntaxLayerInfo { node, .. }, expected_s_exp)) in
-            layers.iter().zip(expected_layers.iter()).enumerate()
-        {
-            let actual_s_exp = node.to_sexp();
-            assert!(
-                string_contains_sequence(
-                    &actual_s_exp,
-                    &expected_s_exp.split("...").collect::<Vec<_>>()
-                ),
-                "layer {i}:\n\nexpected: {expected_s_exp}\nactual:   {actual_s_exp}",
-            );
-        }
-    }
-
-    fn assert_capture_ranges(
-        syntax_map: &SyntaxMap,
-        buffer: &BufferSnapshot,
-        highlight_query_capture_names: &[&str],
-        marked_string: &str,
-    ) {
-        let mut actual_ranges = Vec::<Range<usize>>::new();
-        let captures = syntax_map.captures(0..buffer.len(), buffer, |grammar| {
-            grammar.highlights_query.as_ref()
-        });
-        let queries = captures
-            .grammars()
-            .iter()
-            .map(|grammar| grammar.highlights_query.as_ref().unwrap())
-            .collect::<Vec<_>>();
-        for capture in captures {
-            let name = &queries[capture.grammar_index].capture_names()[capture.index as usize];
-            if highlight_query_capture_names.contains(&name.as_str()) {
-                actual_ranges.push(capture.node.byte_range());
-            }
-        }
-
-        let (text, expected_ranges) = marked_text_ranges(&marked_string.unindent(), false);
-        assert_eq!(text, buffer.text());
-        assert_eq!(actual_ranges, expected_ranges);
-    }
-
-    pub fn string_contains_sequence(text: &str, parts: &[&str]) -> bool {
-        let mut last_part_end = 0;
-        for part in parts {
-            if let Some(start_ix) = text[last_part_end..].find(part) {
-                last_part_end = start_ix + part.len();
-            } else {
-                return false;
-            }
-        }
-        true
-    }
-}

crates/language/src/syntax_map/syntax_map_tests.rs ๐Ÿ”—

@@ -0,0 +1,1199 @@
+use super::*;
+use crate::LanguageConfig;
+use rand::rngs::StdRng;
+use std::{env, ops::Range, sync::Arc};
+use text::Buffer;
+use tree_sitter::Node;
+use unindent::Unindent as _;
+use util::test::marked_text_ranges;
+
+#[test]
+fn test_splice_included_ranges() {
+    let ranges = vec![ts_range(20..30), ts_range(50..60), ts_range(80..90)];
+
+    let new_ranges = splice_included_ranges(
+        ranges.clone(),
+        &[54..56, 58..68],
+        &[ts_range(50..54), ts_range(59..67)],
+    );
+    assert_eq!(
+        new_ranges,
+        &[
+            ts_range(20..30),
+            ts_range(50..54),
+            ts_range(59..67),
+            ts_range(80..90),
+        ]
+    );
+
+    let new_ranges = splice_included_ranges(ranges.clone(), &[70..71, 91..100], &[]);
+    assert_eq!(
+        new_ranges,
+        &[ts_range(20..30), ts_range(50..60), ts_range(80..90)]
+    );
+
+    let new_ranges =
+        splice_included_ranges(ranges.clone(), &[], &[ts_range(0..2), ts_range(70..75)]);
+    assert_eq!(
+        new_ranges,
+        &[
+            ts_range(0..2),
+            ts_range(20..30),
+            ts_range(50..60),
+            ts_range(70..75),
+            ts_range(80..90)
+        ]
+    );
+
+    let new_ranges = splice_included_ranges(ranges.clone(), &[30..50], &[ts_range(25..55)]);
+    assert_eq!(new_ranges, &[ts_range(25..55), ts_range(80..90)]);
+
+    fn ts_range(range: Range<usize>) -> tree_sitter::Range {
+        tree_sitter::Range {
+            start_byte: range.start,
+            start_point: tree_sitter::Point {
+                row: 0,
+                column: range.start,
+            },
+            end_byte: range.end,
+            end_point: tree_sitter::Point {
+                row: 0,
+                column: range.end,
+            },
+        }
+    }
+}
+
+#[gpui::test]
+fn test_syntax_map_layers_for_range() {
+    let registry = Arc::new(LanguageRegistry::test());
+    let language = Arc::new(rust_lang());
+    registry.add(language.clone());
+
+    let mut buffer = Buffer::new(
+        0,
+        0,
+        r#"
+            fn a() {
+                assert_eq!(
+                    b(vec![C {}]),
+                    vec![d.e],
+                );
+                println!("{}", f(|_| true));
+            }
+        "#
+        .unindent(),
+    );
+
+    let mut syntax_map = SyntaxMap::new();
+    syntax_map.set_language_registry(registry.clone());
+    syntax_map.reparse(language.clone(), &buffer);
+
+    assert_layers_for_range(
+        &syntax_map,
+        &buffer,
+        Point::new(2, 0)..Point::new(2, 0),
+        &[
+            "...(function_item ... (block (expression_statement (macro_invocation...",
+            "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
+        ],
+    );
+    assert_layers_for_range(
+        &syntax_map,
+        &buffer,
+        Point::new(2, 14)..Point::new(2, 16),
+        &[
+            "...(function_item ...",
+            "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
+            "...(array_expression (struct_expression ...",
+        ],
+    );
+    assert_layers_for_range(
+        &syntax_map,
+        &buffer,
+        Point::new(3, 14)..Point::new(3, 16),
+        &[
+            "...(function_item ...",
+            "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
+            "...(array_expression (field_expression ...",
+        ],
+    );
+    assert_layers_for_range(
+        &syntax_map,
+        &buffer,
+        Point::new(5, 12)..Point::new(5, 16),
+        &[
+            "...(function_item ...",
+            "...(call_expression ... (arguments (closure_expression ...",
+        ],
+    );
+
+    // Replace a vec! macro invocation with a plain slice, removing a syntactic layer.
+    let macro_name_range = range_for_text(&buffer, "vec!");
+    buffer.edit([(macro_name_range, "&")]);
+    syntax_map.interpolate(&buffer);
+    syntax_map.reparse(language.clone(), &buffer);
+
+    assert_layers_for_range(
+            &syntax_map,
+            &buffer,
+            Point::new(2, 14)..Point::new(2, 16),
+            &[
+                "...(function_item ...",
+                "...(tuple_expression (call_expression ... arguments: (arguments (reference_expression value: (array_expression...",
+            ],
+        );
+
+    // Put the vec! macro back, adding back the syntactic layer.
+    buffer.undo();
+    syntax_map.interpolate(&buffer);
+    syntax_map.reparse(language.clone(), &buffer);
+
+    assert_layers_for_range(
+        &syntax_map,
+        &buffer,
+        Point::new(2, 14)..Point::new(2, 16),
+        &[
+            "...(function_item ...",
+            "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
+            "...(array_expression (struct_expression ...",
+        ],
+    );
+}
+
+#[gpui::test]
+fn test_dynamic_language_injection() {
+    let registry = Arc::new(LanguageRegistry::test());
+    let markdown = Arc::new(markdown_lang());
+    registry.add(markdown.clone());
+    registry.add(Arc::new(rust_lang()));
+    registry.add(Arc::new(ruby_lang()));
+
+    let mut buffer = Buffer::new(
+        0,
+        0,
+        r#"
+            This is a code block:
+
+            ```rs
+            fn foo() {}
+            ```
+        "#
+        .unindent(),
+    );
+
+    let mut syntax_map = SyntaxMap::new();
+    syntax_map.set_language_registry(registry.clone());
+    syntax_map.reparse(markdown.clone(), &buffer);
+    assert_layers_for_range(
+            &syntax_map,
+            &buffer,
+            Point::new(3, 0)..Point::new(3, 0),
+            &[
+                "...(fenced_code_block (fenced_code_block_delimiter) (info_string (language)) (code_fence_content) (fenced_code_block_delimiter...",
+                "...(function_item name: (identifier) parameters: (parameters) body: (block)...",
+            ],
+        );
+
+    // Replace Rust with Ruby in code block.
+    let macro_name_range = range_for_text(&buffer, "rs");
+    buffer.edit([(macro_name_range, "ruby")]);
+    syntax_map.interpolate(&buffer);
+    syntax_map.reparse(markdown.clone(), &buffer);
+    assert_layers_for_range(
+            &syntax_map,
+            &buffer,
+            Point::new(3, 0)..Point::new(3, 0),
+            &[
+                "...(fenced_code_block (fenced_code_block_delimiter) (info_string (language)) (code_fence_content) (fenced_code_block_delimiter...",
+                "...(call method: (identifier) arguments: (argument_list (call method: (identifier) arguments: (argument_list) block: (block)...",
+            ],
+        );
+
+    // Replace Ruby with a language that hasn't been loaded yet.
+    let macro_name_range = range_for_text(&buffer, "ruby");
+    buffer.edit([(macro_name_range, "html")]);
+    syntax_map.interpolate(&buffer);
+    syntax_map.reparse(markdown.clone(), &buffer);
+    assert_layers_for_range(
+            &syntax_map,
+            &buffer,
+            Point::new(3, 0)..Point::new(3, 0),
+            &[
+                "...(fenced_code_block (fenced_code_block_delimiter) (info_string (language)) (code_fence_content) (fenced_code_block_delimiter..."
+            ],
+        );
+    assert!(syntax_map.contains_unknown_injections());
+
+    registry.add(Arc::new(html_lang()));
+    syntax_map.reparse(markdown.clone(), &buffer);
+    assert_layers_for_range(
+            &syntax_map,
+            &buffer,
+            Point::new(3, 0)..Point::new(3, 0),
+            &[
+                "...(fenced_code_block (fenced_code_block_delimiter) (info_string (language)) (code_fence_content) (fenced_code_block_delimiter...",
+                "(fragment (text))",
+            ],
+        );
+    assert!(!syntax_map.contains_unknown_injections());
+}
+
+#[gpui::test]
+fn test_typing_multiple_new_injections() {
+    let (buffer, syntax_map) = test_edit_sequence(
+        "Rust",
+        &[
+            "fn a() { dbg }",
+            "fn a() { dbgยซ!ยป }",
+            "fn a() { dbg!ยซ()ยป }",
+            "fn a() { dbg!(ยซbยป) }",
+            "fn a() { dbg!(bยซ.ยป) }",
+            "fn a() { dbg!(b.ยซcยป) }",
+            "fn a() { dbg!(b.cยซ()ยป) }",
+            "fn a() { dbg!(b.c(ยซvecยป)) }",
+            "fn a() { dbg!(b.c(vecยซ!ยป)) }",
+            "fn a() { dbg!(b.c(vec!ยซ[]ยป)) }",
+            "fn a() { dbg!(b.c(vec![ยซdยป])) }",
+            "fn a() { dbg!(b.c(vec![dยซ.ยป])) }",
+            "fn a() { dbg!(b.c(vec![d.ยซeยป])) }",
+        ],
+    );
+
+    assert_capture_ranges(
+        &syntax_map,
+        &buffer,
+        &["field"],
+        "fn a() { dbg!(b.ยซcยป(vec![d.ยซeยป])) }",
+    );
+}
+
+#[gpui::test]
+fn test_pasting_new_injection_line_between_others() {
+    let (buffer, syntax_map) = test_edit_sequence(
+        "Rust",
+        &[
+            "
+                fn a() {
+                    b!(B {});
+                    c!(C {});
+                    d!(D {});
+                    e!(E {});
+                    f!(F {});
+                    g!(G {});
+                }
+            ",
+            "
+                fn a() {
+                    b!(B {});
+                    c!(C {});
+                    d!(D {});
+                ยซ    h!(H {});
+                ยป    e!(E {});
+                    f!(F {});
+                    g!(G {});
+                }
+            ",
+        ],
+    );
+
+    assert_capture_ranges(
+        &syntax_map,
+        &buffer,
+        &["struct"],
+        "
+        fn a() {
+            b!(ยซB {}ยป);
+            c!(ยซC {}ยป);
+            d!(ยซD {}ยป);
+            h!(ยซH {}ยป);
+            e!(ยซE {}ยป);
+            f!(ยซF {}ยป);
+            g!(ยซG {}ยป);
+        }
+        ",
+    );
+}
+
+#[gpui::test]
+fn test_joining_injections_with_child_injections() {
+    let (buffer, syntax_map) = test_edit_sequence(
+        "Rust",
+        &[
+            "
+                fn a() {
+                    b!(
+                        c![one.two.three],
+                        d![four.five.six],
+                    );
+                    e!(
+                        f![seven.eight],
+                    );
+                }
+            ",
+            "
+                fn a() {
+                    b!(
+                        c![one.two.three],
+                        d![four.five.six],
+                    ห‡    f![seven.eight],
+                    );
+                }
+            ",
+        ],
+    );
+
+    assert_capture_ranges(
+        &syntax_map,
+        &buffer,
+        &["field"],
+        "
+        fn a() {
+            b!(
+                c![one.ยซtwoยป.ยซthreeยป],
+                d![four.ยซfiveยป.ยซsixยป],
+                f![seven.ยซeightยป],
+            );
+        }
+        ",
+    );
+}
+
+#[gpui::test]
+fn test_editing_edges_of_injection() {
+    test_edit_sequence(
+        "Rust",
+        &[
+            "
+                fn a() {
+                    b!(c!())
+                }
+            ",
+            "
+                fn a() {
+                    ยซdยป!(c!())
+                }
+            ",
+            "
+                fn a() {
+                    ยซeยปd!(c!())
+                }
+            ",
+            "
+                fn a() {
+                    ed!ยซ[ยปc!()ยซ]ยป
+                }
+            ",
+        ],
+    );
+}
+
+#[gpui::test]
+fn test_edits_preceding_and_intersecting_injection() {
+    test_edit_sequence(
+        "Rust",
+        &[
+            //
+            "const aaaaaaaaaaaa: B = c!(d(e.f));",
+            "const aห‡a: B = c!(d(eห‡));",
+        ],
+    );
+}
+
+#[gpui::test]
+fn test_non_local_changes_create_injections() {
+    test_edit_sequence(
+        "Rust",
+        &[
+            "
+                // a! {
+                    static B: C = d;
+                // }
+            ",
+            "
+                ห‡a! {
+                    static B: C = d;
+                ห‡}
+            ",
+        ],
+    );
+}
+
+#[gpui::test]
+fn test_creating_many_injections_in_one_edit() {
+    test_edit_sequence(
+        "Rust",
+        &[
+            "
+                fn a() {
+                    one(Two::three(3));
+                    four(Five::six(6));
+                    seven(Eight::nine(9));
+                }
+            ",
+            "
+                fn a() {
+                    oneยซ!ยป(Two::three(3));
+                    fourยซ!ยป(Five::six(6));
+                    sevenยซ!ยป(Eight::nine(9));
+                }
+            ",
+            "
+                fn a() {
+                    one!(Two::threeยซ!ยป(3));
+                    four!(Five::sixยซ!ยป(6));
+                    seven!(Eight::nineยซ!ยป(9));
+                }
+            ",
+        ],
+    );
+}
+
+#[gpui::test]
+fn test_editing_across_injection_boundary() {
+    test_edit_sequence(
+        "Rust",
+        &[
+            "
+                fn one() {
+                    two();
+                    three!(
+                        three.four,
+                        five.six,
+                    );
+                }
+            ",
+            "
+                fn one() {
+                    two();
+                    thยซirty_five![ยป
+                        three.four,
+                        five.six,
+                    ยซ   seven.eight,
+                    ];ยป
+                }
+            ",
+        ],
+    );
+}
+
+#[gpui::test]
+fn test_removing_injection_by_replacing_across_boundary() {
+    test_edit_sequence(
+        "Rust",
+        &[
+            "
+                fn one() {
+                    two!(
+                        three.four,
+                    );
+                }
+            ",
+            "
+                fn one() {
+                    tยซen
+                        .eleven(
+                        twelve,
+                    ยป
+                        three.four,
+                    );
+                }
+            ",
+        ],
+    );
+}
+
+#[gpui::test]
+fn test_combined_injections() {
+    let (buffer, syntax_map) = test_edit_sequence(
+        "ERB",
+        &[
+            "
+                <body>
+                    <% if @one %>
+                        <div class=one>
+                    <% else %>
+                        <div class=two>
+                    <% end %>
+                    </div>
+                </body>
+            ",
+            "
+                <body>
+                    <% if @one %>
+                        <div class=one>
+                    ห‡ else ห‡
+                        <div class=two>
+                    <% end %>
+                    </div>
+                </body>
+            ",
+            "
+                <body>
+                    <% if @one ยซ;ยป end %>
+                    </div>
+                </body>
+            ",
+        ],
+    );
+
+    assert_capture_ranges(
+        &syntax_map,
+        &buffer,
+        &["tag", "ivar"],
+        "
+            <ยซbodyยป>
+                <% if ยซ@oneยป ; end %>
+                </ยซdivยป>
+            </ยซbodyยป>
+        ",
+    );
+}
+
+#[gpui::test]
+fn test_combined_injections_empty_ranges() {
+    test_edit_sequence(
+        "ERB",
+        &[
+            "
+                <% if @one %>
+                <% else %>
+                <% end %>
+            ",
+            "
+                <% if @one %>
+                ห‡<% end %>
+            ",
+        ],
+    );
+}
+
+#[gpui::test]
+fn test_combined_injections_edit_edges_of_ranges() {
+    let (buffer, syntax_map) = test_edit_sequence(
+        "ERB",
+        &[
+            "
+                <%= one @two %>
+                <%= three @four %>
+            ",
+            "
+                <%= one @two %ห‡
+                <%= three @four %>
+            ",
+            "
+                <%= one @two %ยซ>ยป
+                <%= three @four %>
+            ",
+        ],
+    );
+
+    assert_capture_ranges(
+        &syntax_map,
+        &buffer,
+        &["tag", "ivar"],
+        "
+            <%= one ยซ@twoยป %>
+            <%= three ยซ@fourยป %>
+        ",
+    );
+}
+
+#[gpui::test]
+fn test_combined_injections_splitting_some_injections() {
+    let (_buffer, _syntax_map) = test_edit_sequence(
+        "ERB",
+        &[
+            r#"
+                <%A if b(:c) %>
+                d
+                <% end %>
+                eee
+                <% f %>
+            "#,
+            r#"
+                <%ยซ AAAAAAA %>
+                hhhhhhh
+                <%=ยป if b(:c) %>
+                d
+                <% end %>
+                eee
+                <% f %>
+            "#,
+        ],
+    );
+}
+
+#[gpui::test]
+fn test_combined_injections_inside_injections() {
+    let (_buffer, _syntax_map) = test_edit_sequence(
+        "Markdown",
+        &[
+            r#"
+                here is some ERB code:
+
+                ```erb
+                <ul>
+                <% people.each do |person| %>
+                    <li><%= person.name %></li>
+                <% end %>
+                </ul>
+                ```
+            "#,
+            r#"
+                here is some ERB code:
+
+                ```erb
+                <ul>
+                <% peopleยซ2ยป.each do |person| %>
+                    <li><%= person.name %></li>
+                <% end %>
+                </ul>
+                ```
+            "#,
+        ],
+    );
+}
+
+#[gpui::test]
+fn test_empty_combined_injections_inside_injections() {
+    let (buffer, syntax_map) = test_edit_sequence(
+        "Markdown",
+        &[r#"
+            ```erb
+            hello
+            ```
+
+            goodbye
+        "#],
+    );
+
+    assert_layers_for_range(
+        &syntax_map,
+        &buffer,
+        Point::new(0, 0)..Point::new(5, 0),
+        &[
+            "...(paragraph)...",
+            "(template...",
+            "(fragment...",
+            // The ruby syntax tree should be empty, since there are
+            // no interpolations in the ERB template.
+            "(program)",
+        ],
+    );
+}
+
+#[gpui::test(iterations = 50)]
+fn test_random_syntax_map_edits(mut rng: StdRng) {
+    let operations = env::var("OPERATIONS")
+        .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+        .unwrap_or(10);
+
+    let text = r#"
+        fn test_something() {
+            let vec = vec![5, 1, 3, 8];
+            assert_eq!(
+                vec
+                    .into_iter()
+                    .map(|i| i * 2)
+                    .collect::<Vec<usize>>(),
+                vec![
+                    5 * 2, 1 * 2, 3 * 2, 8 * 2
+                ],
+            );
+        }
+    "#
+    .unindent()
+    .repeat(2);
+
+    let registry = Arc::new(LanguageRegistry::test());
+    let language = Arc::new(rust_lang());
+    registry.add(language.clone());
+    let mut buffer = Buffer::new(0, 0, text);
+
+    let mut syntax_map = SyntaxMap::new();
+    syntax_map.set_language_registry(registry.clone());
+    syntax_map.reparse(language.clone(), &buffer);
+
+    let mut reference_syntax_map = SyntaxMap::new();
+    reference_syntax_map.set_language_registry(registry.clone());
+
+    log::info!("initial text:\n{}", buffer.text());
+
+    for _ in 0..operations {
+        let prev_buffer = buffer.snapshot();
+        let prev_syntax_map = syntax_map.snapshot();
+
+        buffer.randomly_edit(&mut rng, 3);
+        log::info!("text:\n{}", buffer.text());
+
+        syntax_map.interpolate(&buffer);
+        check_interpolation(&prev_syntax_map, &syntax_map, &prev_buffer, &buffer);
+
+        syntax_map.reparse(language.clone(), &buffer);
+
+        reference_syntax_map.clear();
+        reference_syntax_map.reparse(language.clone(), &buffer);
+    }
+
+    for i in 0..operations {
+        let i = operations - i - 1;
+        buffer.undo();
+        log::info!("undoing operation {}", i);
+        log::info!("text:\n{}", buffer.text());
+
+        syntax_map.interpolate(&buffer);
+        syntax_map.reparse(language.clone(), &buffer);
+
+        reference_syntax_map.clear();
+        reference_syntax_map.reparse(language.clone(), &buffer);
+        assert_eq!(
+            syntax_map.layers(&buffer).len(),
+            reference_syntax_map.layers(&buffer).len(),
+            "wrong number of layers after undoing edit {i}"
+        );
+    }
+
+    let layers = syntax_map.layers(&buffer);
+    let reference_layers = reference_syntax_map.layers(&buffer);
+    for (edited_layer, reference_layer) in layers.into_iter().zip(reference_layers.into_iter()) {
+        assert_eq!(
+            edited_layer.node().to_sexp(),
+            reference_layer.node().to_sexp()
+        );
+        assert_eq!(edited_layer.node().range(), reference_layer.node().range());
+    }
+}
+
+#[gpui::test(iterations = 50)]
+fn test_random_syntax_map_edits_with_combined_injections(mut rng: StdRng) {
+    let operations = env::var("OPERATIONS")
+        .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+        .unwrap_or(10);
+
+    let text = r#"
+        <div id="main">
+        <% if one?(:two) %>
+            <p class="three" four>
+            <%= yield :five %>
+            </p>
+        <% elsif Six.seven(8) %>
+            <p id="three" four>
+            <%= yield :five %>
+            </p>
+        <% else %>
+            <span>Ok</span>
+        <% end %>
+        </div>
+    "#
+    .unindent()
+    .repeat(8);
+
+    let registry = Arc::new(LanguageRegistry::test());
+    let language = Arc::new(erb_lang());
+    registry.add(language.clone());
+    registry.add(Arc::new(ruby_lang()));
+    registry.add(Arc::new(html_lang()));
+    let mut buffer = Buffer::new(0, 0, text);
+
+    let mut syntax_map = SyntaxMap::new();
+    syntax_map.set_language_registry(registry.clone());
+    syntax_map.reparse(language.clone(), &buffer);
+
+    let mut reference_syntax_map = SyntaxMap::new();
+    reference_syntax_map.set_language_registry(registry.clone());
+
+    log::info!("initial text:\n{}", buffer.text());
+
+    for _ in 0..operations {
+        let prev_buffer = buffer.snapshot();
+        let prev_syntax_map = syntax_map.snapshot();
+
+        buffer.randomly_edit(&mut rng, 3);
+        log::info!("text:\n{}", buffer.text());
+
+        syntax_map.interpolate(&buffer);
+        check_interpolation(&prev_syntax_map, &syntax_map, &prev_buffer, &buffer);
+
+        syntax_map.reparse(language.clone(), &buffer);
+
+        reference_syntax_map.clear();
+        reference_syntax_map.reparse(language.clone(), &buffer);
+    }
+
+    for i in 0..operations {
+        let i = operations - i - 1;
+        buffer.undo();
+        log::info!("undoing operation {}", i);
+        log::info!("text:\n{}", buffer.text());
+
+        syntax_map.interpolate(&buffer);
+        syntax_map.reparse(language.clone(), &buffer);
+
+        reference_syntax_map.clear();
+        reference_syntax_map.reparse(language.clone(), &buffer);
+        assert_eq!(
+            syntax_map.layers(&buffer).len(),
+            reference_syntax_map.layers(&buffer).len(),
+            "wrong number of layers after undoing edit {i}"
+        );
+    }
+
+    let layers = syntax_map.layers(&buffer);
+    let reference_layers = reference_syntax_map.layers(&buffer);
+    for (edited_layer, reference_layer) in layers.into_iter().zip(reference_layers.into_iter()) {
+        assert_eq!(
+            edited_layer.node().to_sexp(),
+            reference_layer.node().to_sexp()
+        );
+        assert_eq!(edited_layer.node().range(), reference_layer.node().range());
+    }
+}
+
+fn check_interpolation(
+    old_syntax_map: &SyntaxSnapshot,
+    new_syntax_map: &SyntaxSnapshot,
+    old_buffer: &BufferSnapshot,
+    new_buffer: &BufferSnapshot,
+) {
+    let edits = new_buffer
+        .edits_since::<usize>(&old_buffer.version())
+        .collect::<Vec<_>>();
+
+    for (old_layer, new_layer) in old_syntax_map
+        .layers
+        .iter()
+        .zip(new_syntax_map.layers.iter())
+    {
+        assert_eq!(old_layer.range, new_layer.range);
+        let Some(old_tree) = old_layer.content.tree() else { continue };
+        let Some(new_tree) = new_layer.content.tree() else { continue };
+        let old_start_byte = old_layer.range.start.to_offset(old_buffer);
+        let new_start_byte = new_layer.range.start.to_offset(new_buffer);
+        let old_start_point = old_layer.range.start.to_point(old_buffer).to_ts_point();
+        let new_start_point = new_layer.range.start.to_point(new_buffer).to_ts_point();
+        let old_node = old_tree.root_node_with_offset(old_start_byte, old_start_point);
+        let new_node = new_tree.root_node_with_offset(new_start_byte, new_start_point);
+        check_node_edits(
+            old_layer.depth,
+            &old_layer.range,
+            old_node,
+            new_node,
+            old_buffer,
+            new_buffer,
+            &edits,
+        );
+    }
+
+    fn check_node_edits(
+        depth: usize,
+        range: &Range<Anchor>,
+        old_node: Node,
+        new_node: Node,
+        old_buffer: &BufferSnapshot,
+        new_buffer: &BufferSnapshot,
+        edits: &[text::Edit<usize>],
+    ) {
+        assert_eq!(old_node.kind(), new_node.kind());
+
+        let old_range = old_node.byte_range();
+        let new_range = new_node.byte_range();
+
+        let is_edited = edits
+            .iter()
+            .any(|edit| edit.new.start < new_range.end && edit.new.end > new_range.start);
+        if is_edited {
+            assert!(
+                new_node.has_changes(),
+                concat!(
+                    "failed to mark node as edited.\n",
+                    "layer depth: {}, old layer range: {:?}, new layer range: {:?},\n",
+                    "node kind: {}, old node range: {:?}, new node range: {:?}",
+                ),
+                depth,
+                range.to_offset(old_buffer),
+                range.to_offset(new_buffer),
+                new_node.kind(),
+                old_range,
+                new_range,
+            );
+        }
+
+        if !new_node.has_changes() {
+            assert_eq!(
+                old_buffer
+                    .text_for_range(old_range.clone())
+                    .collect::<String>(),
+                new_buffer
+                    .text_for_range(new_range.clone())
+                    .collect::<String>(),
+                concat!(
+                    "mismatched text for node\n",
+                    "layer depth: {}, old layer range: {:?}, new layer range: {:?},\n",
+                    "node kind: {}, old node range:{:?}, new node range:{:?}",
+                ),
+                depth,
+                range.to_offset(old_buffer),
+                range.to_offset(new_buffer),
+                new_node.kind(),
+                old_range,
+                new_range,
+            );
+        }
+
+        for i in 0..new_node.child_count() {
+            check_node_edits(
+                depth,
+                range,
+                old_node.child(i).unwrap(),
+                new_node.child(i).unwrap(),
+                old_buffer,
+                new_buffer,
+                edits,
+            )
+        }
+    }
+}
+
+fn test_edit_sequence(language_name: &str, steps: &[&str]) -> (Buffer, SyntaxMap) {
+    let registry = Arc::new(LanguageRegistry::test());
+    registry.add(Arc::new(rust_lang()));
+    registry.add(Arc::new(ruby_lang()));
+    registry.add(Arc::new(html_lang()));
+    registry.add(Arc::new(erb_lang()));
+    registry.add(Arc::new(markdown_lang()));
+    let language = registry
+        .language_for_name(language_name)
+        .now_or_never()
+        .unwrap()
+        .unwrap();
+    let mut buffer = Buffer::new(0, 0, Default::default());
+
+    let mut mutated_syntax_map = SyntaxMap::new();
+    mutated_syntax_map.set_language_registry(registry.clone());
+    mutated_syntax_map.reparse(language.clone(), &buffer);
+
+    for (i, marked_string) in steps.into_iter().enumerate() {
+        buffer.edit_via_marked_text(&marked_string.unindent());
+
+        // Reparse the syntax map
+        mutated_syntax_map.interpolate(&buffer);
+        mutated_syntax_map.reparse(language.clone(), &buffer);
+
+        // Create a second syntax map from scratch
+        let mut reference_syntax_map = SyntaxMap::new();
+        reference_syntax_map.set_language_registry(registry.clone());
+        reference_syntax_map.reparse(language.clone(), &buffer);
+
+        // Compare the mutated syntax map to the new syntax map
+        let mutated_layers = mutated_syntax_map.layers(&buffer);
+        let reference_layers = reference_syntax_map.layers(&buffer);
+        assert_eq!(
+            mutated_layers.len(),
+            reference_layers.len(),
+            "wrong number of layers at step {i}"
+        );
+        for (edited_layer, reference_layer) in
+            mutated_layers.into_iter().zip(reference_layers.into_iter())
+        {
+            assert_eq!(
+                edited_layer.node().to_sexp(),
+                reference_layer.node().to_sexp(),
+                "different layer at step {i}"
+            );
+            assert_eq!(
+                edited_layer.node().range(),
+                reference_layer.node().range(),
+                "different layer at step {i}"
+            );
+        }
+    }
+
+    (buffer, mutated_syntax_map)
+}
+
+fn html_lang() -> Language {
+    Language::new(
+        LanguageConfig {
+            name: "HTML".into(),
+            path_suffixes: vec!["html".to_string()],
+            ..Default::default()
+        },
+        Some(tree_sitter_html::language()),
+    )
+    .with_highlights_query(
+        r#"
+            (tag_name) @tag
+            (erroneous_end_tag_name) @tag
+            (attribute_name) @property
+        "#,
+    )
+    .unwrap()
+}
+
+fn ruby_lang() -> Language {
+    Language::new(
+        LanguageConfig {
+            name: "Ruby".into(),
+            path_suffixes: vec!["rb".to_string()],
+            ..Default::default()
+        },
+        Some(tree_sitter_ruby::language()),
+    )
+    .with_highlights_query(
+        r#"
+            ["if" "do" "else" "end"] @keyword
+            (instance_variable) @ivar
+        "#,
+    )
+    .unwrap()
+}
+
+fn erb_lang() -> Language {
+    Language::new(
+        LanguageConfig {
+            name: "ERB".into(),
+            path_suffixes: vec!["erb".to_string()],
+            ..Default::default()
+        },
+        Some(tree_sitter_embedded_template::language()),
+    )
+    .with_highlights_query(
+        r#"
+            ["<%" "%>"] @keyword
+        "#,
+    )
+    .unwrap()
+    .with_injection_query(
+        r#"
+            (
+                (code) @content
+                (#set! "language" "ruby")
+                (#set! "combined")
+            )
+
+            (
+                (content) @content
+                (#set! "language" "html")
+                (#set! "combined")
+            )
+        "#,
+    )
+    .unwrap()
+}
+
+fn rust_lang() -> Language {
+    Language::new(
+        LanguageConfig {
+            name: "Rust".into(),
+            path_suffixes: vec!["rs".to_string()],
+            ..Default::default()
+        },
+        Some(tree_sitter_rust::language()),
+    )
+    .with_highlights_query(
+        r#"
+            (field_identifier) @field
+            (struct_expression) @struct
+        "#,
+    )
+    .unwrap()
+    .with_injection_query(
+        r#"
+            (macro_invocation
+                (token_tree) @content
+                (#set! "language" "rust"))
+        "#,
+    )
+    .unwrap()
+}
+
+fn markdown_lang() -> Language {
+    Language::new(
+        LanguageConfig {
+            name: "Markdown".into(),
+            path_suffixes: vec!["md".into()],
+            ..Default::default()
+        },
+        Some(tree_sitter_markdown::language()),
+    )
+    .with_injection_query(
+        r#"
+            (fenced_code_block
+                (info_string
+                    (language) @language)
+                (code_fence_content) @content)
+        "#,
+    )
+    .unwrap()
+}
+
+fn range_for_text(buffer: &Buffer, text: &str) -> Range<usize> {
+    let start = buffer.as_rope().to_string().find(text).unwrap();
+    start..start + text.len()
+}
+
+fn assert_layers_for_range(
+    syntax_map: &SyntaxMap,
+    buffer: &BufferSnapshot,
+    range: Range<Point>,
+    expected_layers: &[&str],
+) {
+    let layers = syntax_map
+        .layers_for_range(range, &buffer)
+        .collect::<Vec<_>>();
+    assert_eq!(
+        layers.len(),
+        expected_layers.len(),
+        "wrong number of layers"
+    );
+    for (i, (layer, expected_s_exp)) in layers.iter().zip(expected_layers.iter()).enumerate() {
+        let actual_s_exp = layer.node().to_sexp();
+        assert!(
+            string_contains_sequence(
+                &actual_s_exp,
+                &expected_s_exp.split("...").collect::<Vec<_>>()
+            ),
+            "layer {i}:\n\nexpected: {expected_s_exp}\nactual:   {actual_s_exp}",
+        );
+    }
+}
+
+fn assert_capture_ranges(
+    syntax_map: &SyntaxMap,
+    buffer: &BufferSnapshot,
+    highlight_query_capture_names: &[&str],
+    marked_string: &str,
+) {
+    let mut actual_ranges = Vec::<Range<usize>>::new();
+    let captures = syntax_map.captures(0..buffer.len(), buffer, |grammar| {
+        grammar.highlights_query.as_ref()
+    });
+    let queries = captures
+        .grammars()
+        .iter()
+        .map(|grammar| grammar.highlights_query.as_ref().unwrap())
+        .collect::<Vec<_>>();
+    for capture in captures {
+        let name = &queries[capture.grammar_index].capture_names()[capture.index as usize];
+        if highlight_query_capture_names.contains(&name.as_str()) {
+            actual_ranges.push(capture.node.byte_range());
+        }
+    }
+
+    let (text, expected_ranges) = marked_text_ranges(&marked_string.unindent(), false);
+    assert_eq!(text, buffer.text());
+    assert_eq!(actual_ranges, expected_ranges);
+}
+
+pub fn string_contains_sequence(text: &str, parts: &[&str]) -> bool {
+    let mut last_part_end = 0;
+    for part in parts {
+        if let Some(start_ix) = text[last_part_end..].find(part) {
+            last_part_end = start_ix + part.len();
+        } else {
+            return false;
+        }
+    }
+    true
+}

crates/lsp_log/Cargo.toml โ†’ crates/language_tools/Cargo.toml ๐Ÿ”—

@@ -1,11 +1,11 @@
 [package]
-name = "lsp_log"
+name = "language_tools"
 version = "0.1.0"
 edition = "2021"
 publish = false
 
 [lib]
-path = "src/lsp_log.rs"
+path = "src/language_tools.rs"
 doctest = false
 
 [dependencies]
@@ -22,6 +22,7 @@ lsp = { path = "../lsp" }
 futures.workspace = true
 serde.workspace = true
 anyhow.workspace = true
+tree-sitter.workspace = true
 
 [dev-dependencies]
 client = { path = "../client", features = ["test-support"] }

crates/language_tools/src/language_tools.rs ๐Ÿ”—

@@ -0,0 +1,15 @@
+mod lsp_log;
+mod syntax_tree_view;
+
+#[cfg(test)]
+mod lsp_log_tests;
+
+use gpui::AppContext;
+
+pub use lsp_log::{LogStore, LspLogToolbarItemView, LspLogView};
+pub use syntax_tree_view::{SyntaxTreeToolbarItemView, SyntaxTreeView};
+
+pub fn init(cx: &mut AppContext) {
+    lsp_log::init(cx);
+    syntax_tree_view::init(cx);
+}

crates/lsp_log/src/lsp_log.rs โ†’ crates/language_tools/src/lsp_log.rs ๐Ÿ”—

@@ -1,6 +1,3 @@
-#[cfg(test)]
-mod lsp_log_tests;
-
 use collections::HashMap;
 use editor::Editor;
 use futures::{channel::mpsc, StreamExt};
@@ -27,7 +24,7 @@ use workspace::{
 const SEND_LINE: &str = "// Send:\n";
 const RECEIVE_LINE: &str = "// Receive:\n";
 
-struct LogStore {
+pub struct LogStore {
     projects: HashMap<WeakModelHandle<Project>, ProjectState>,
     io_tx: mpsc::UnboundedSender<(WeakModelHandle<Project>, LanguageServerId, bool, String)>,
 }
@@ -49,10 +46,10 @@ struct LanguageServerRpcState {
 }
 
 pub struct LspLogView {
+    pub(crate) editor: ViewHandle<Editor>,
     log_store: ModelHandle<LogStore>,
     current_server_id: Option<LanguageServerId>,
     is_showing_rpc_trace: bool,
-    editor: ViewHandle<Editor>,
     project: ModelHandle<Project>,
 }
 
@@ -68,16 +65,16 @@ enum MessageKind {
 }
 
 #[derive(Clone, Debug, PartialEq)]
-struct LogMenuItem {
-    server_id: LanguageServerId,
-    server_name: LanguageServerName,
-    worktree: ModelHandle<Worktree>,
-    rpc_trace_enabled: bool,
-    rpc_trace_selected: bool,
-    logs_selected: bool,
+pub(crate) struct LogMenuItem {
+    pub server_id: LanguageServerId,
+    pub server_name: LanguageServerName,
+    pub worktree: ModelHandle<Worktree>,
+    pub rpc_trace_enabled: bool,
+    pub rpc_trace_selected: bool,
+    pub logs_selected: bool,
 }
 
-actions!(log, [OpenLanguageServerLogs]);
+actions!(debug, [OpenLanguageServerLogs]);
 
 pub fn init(cx: &mut AppContext) {
     let log_store = cx.add_model(|cx| LogStore::new(cx));
@@ -114,7 +111,7 @@ pub fn init(cx: &mut AppContext) {
 }
 
 impl LogStore {
-    fn new(cx: &mut ModelContext<Self>) -> Self {
+    pub fn new(cx: &mut ModelContext<Self>) -> Self {
         let (io_tx, mut io_rx) = mpsc::unbounded();
         let this = Self {
             projects: HashMap::default(),
@@ -320,7 +317,7 @@ impl LogStore {
 }
 
 impl LspLogView {
-    fn new(
+    pub fn new(
         project: ModelHandle<Project>,
         log_store: ModelHandle<LogStore>,
         cx: &mut ViewContext<Self>,
@@ -360,7 +357,7 @@ impl LspLogView {
         editor
     }
 
-    fn menu_items<'a>(&'a self, cx: &'a AppContext) -> Option<Vec<LogMenuItem>> {
+    pub(crate) fn menu_items<'a>(&'a self, cx: &'a AppContext) -> Option<Vec<LogMenuItem>> {
         let log_store = self.log_store.read(cx);
         let state = log_store.projects.get(&self.project.downgrade())?;
         let mut rows = self
@@ -544,12 +541,7 @@ impl View for LspLogToolbarItemView {
         let theme = theme::current(cx).clone();
         let Some(log_view) = self.log_view.as_ref() else { return Empty::new().into_any() };
         let log_view = log_view.read(cx);
-
-        let menu_rows = self
-            .log_view
-            .as_ref()
-            .and_then(|view| view.read(cx).menu_items(cx))
-            .unwrap_or_default();
+        let menu_rows = log_view.menu_items(cx).unwrap_or_default();
 
         let current_server_id = log_view.current_server_id;
         let current_server = current_server_id.and_then(|current_server_id| {
@@ -586,7 +578,7 @@ impl View for LspLogToolbarItemView {
                                     )
                                 }))
                                 .contained()
-                                .with_style(theme.lsp_log_menu.container)
+                                .with_style(theme.toolbar_dropdown_menu.container)
                                 .constrained()
                                 .with_width(400.)
                                 .with_height(400.)
@@ -596,6 +588,7 @@ impl View for LspLogToolbarItemView {
                             cx.notify()
                         }),
                     )
+                    .with_hoverable(true)
                     .with_fit_mode(OverlayFitMode::SwitchAnchor)
                     .with_anchor_corner(AnchorCorner::TopLeft)
                     .with_z_index(999)
@@ -688,7 +681,7 @@ impl LspLogToolbarItemView {
                     )
                 })
                 .unwrap_or_else(|| "No server selected".into());
-            let style = theme.lsp_log_menu.header.style_for(state, false);
+            let style = theme.toolbar_dropdown_menu.header.style_for(state, false);
             Label::new(label, style.text.clone())
                 .contained()
                 .with_style(style.container)
@@ -714,7 +707,7 @@ impl LspLogToolbarItemView {
 
         Flex::column()
             .with_child({
-                let style = &theme.lsp_log_menu.server;
+                let style = &theme.toolbar_dropdown_menu.section_header;
                 Label::new(
                     format!("{} ({})", name.0, worktree.read(cx).root_name()),
                     style.text.clone(),
@@ -722,16 +715,19 @@ impl LspLogToolbarItemView {
                 .contained()
                 .with_style(style.container)
                 .constrained()
-                .with_height(theme.lsp_log_menu.row_height)
+                .with_height(theme.toolbar_dropdown_menu.row_height)
             })
             .with_child(
                 MouseEventHandler::<ActivateLog, _>::new(id.0, cx, move |state, _| {
-                    let style = theme.lsp_log_menu.item.style_for(state, logs_selected);
+                    let style = theme
+                        .toolbar_dropdown_menu
+                        .item
+                        .style_for(state, logs_selected);
                     Label::new(SERVER_LOGS, style.text.clone())
                         .contained()
                         .with_style(style.container)
                         .constrained()
-                        .with_height(theme.lsp_log_menu.row_height)
+                        .with_height(theme.toolbar_dropdown_menu.row_height)
                 })
                 .with_cursor_style(CursorStyle::PointingHand)
                 .on_click(MouseButton::Left, move |_, view, cx| {
@@ -740,12 +736,15 @@ impl LspLogToolbarItemView {
             )
             .with_child(
                 MouseEventHandler::<ActivateRpcTrace, _>::new(id.0, cx, move |state, cx| {
-                    let style = theme.lsp_log_menu.item.style_for(state, rpc_trace_selected);
+                    let style = theme
+                        .toolbar_dropdown_menu
+                        .item
+                        .style_for(state, rpc_trace_selected);
                     Flex::row()
                         .with_child(
                             Label::new(RPC_MESSAGES, style.text.clone())
                                 .constrained()
-                                .with_height(theme.lsp_log_menu.row_height),
+                                .with_height(theme.toolbar_dropdown_menu.row_height),
                         )
                         .with_child(
                             ui::checkbox_with_label::<Self, _, Self, _>(
@@ -764,7 +763,7 @@ impl LspLogToolbarItemView {
                         .contained()
                         .with_style(style.container)
                         .constrained()
-                        .with_height(theme.lsp_log_menu.row_height)
+                        .with_height(theme.toolbar_dropdown_menu.row_height)
                 })
                 .with_cursor_style(CursorStyle::PointingHand)
                 .on_click(MouseButton::Left, move |_, view, cx| {

crates/lsp_log/src/lsp_log_tests.rs โ†’ crates/language_tools/src/lsp_log_tests.rs ๐Ÿ”—

@@ -1,7 +1,12 @@
+use std::sync::Arc;
+
+use crate::lsp_log::LogMenuItem;
+
 use super::*;
+use futures::StreamExt;
 use gpui::{serde_json::json, TestAppContext};
-use language::{tree_sitter_rust, FakeLspAdapter, Language, LanguageConfig};
-use project::FakeFs;
+use language::{tree_sitter_rust, FakeLspAdapter, Language, LanguageConfig, LanguageServerName};
+use project::{FakeFs, Project};
 use settings::SettingsStore;
 
 #[gpui::test]

crates/language_tools/src/syntax_tree_view.rs ๐Ÿ”—

@@ -0,0 +1,675 @@
+use editor::{scroll::autoscroll::Autoscroll, Anchor, Editor, ExcerptId};
+use gpui::{
+    actions,
+    elements::{
+        AnchorCorner, Empty, Flex, Label, MouseEventHandler, Overlay, OverlayFitMode,
+        ParentElement, ScrollTarget, Stack, UniformList, UniformListState,
+    },
+    fonts::TextStyle,
+    platform::{CursorStyle, MouseButton},
+    AppContext, Element, Entity, ModelHandle, View, ViewContext, ViewHandle, WeakViewHandle,
+};
+use language::{Buffer, OwnedSyntaxLayerInfo, SyntaxLayerInfo};
+use std::{mem, ops::Range, sync::Arc};
+use theme::{Theme, ThemeSettings};
+use tree_sitter::{Node, TreeCursor};
+use workspace::{
+    item::{Item, ItemHandle},
+    ToolbarItemLocation, ToolbarItemView, Workspace,
+};
+
+actions!(debug, [OpenSyntaxTreeView]);
+
+pub fn init(cx: &mut AppContext) {
+    cx.add_action(
+        move |workspace: &mut Workspace, _: &OpenSyntaxTreeView, cx: _| {
+            let active_item = workspace.active_item(cx);
+            let workspace_handle = workspace.weak_handle();
+            let syntax_tree_view =
+                cx.add_view(|cx| SyntaxTreeView::new(workspace_handle, active_item, cx));
+            workspace.add_item(Box::new(syntax_tree_view), cx);
+        },
+    );
+}
+
+pub struct SyntaxTreeView {
+    workspace_handle: WeakViewHandle<Workspace>,
+    editor: Option<EditorState>,
+    mouse_y: Option<f32>,
+    line_height: Option<f32>,
+    list_state: UniformListState,
+    selected_descendant_ix: Option<usize>,
+    hovered_descendant_ix: Option<usize>,
+}
+
+pub struct SyntaxTreeToolbarItemView {
+    tree_view: Option<ViewHandle<SyntaxTreeView>>,
+    subscription: Option<gpui::Subscription>,
+    menu_open: bool,
+}
+
+struct EditorState {
+    editor: ViewHandle<Editor>,
+    active_buffer: Option<BufferState>,
+    _subscription: gpui::Subscription,
+}
+
+#[derive(Clone)]
+struct BufferState {
+    buffer: ModelHandle<Buffer>,
+    excerpt_id: ExcerptId,
+    active_layer: Option<OwnedSyntaxLayerInfo>,
+}
+
+impl SyntaxTreeView {
+    pub fn new(
+        workspace_handle: WeakViewHandle<Workspace>,
+        active_item: Option<Box<dyn ItemHandle>>,
+        cx: &mut ViewContext<Self>,
+    ) -> Self {
+        let mut this = Self {
+            workspace_handle: workspace_handle.clone(),
+            list_state: UniformListState::default(),
+            editor: None,
+            mouse_y: None,
+            line_height: None,
+            hovered_descendant_ix: None,
+            selected_descendant_ix: None,
+        };
+
+        this.workspace_updated(active_item, cx);
+        cx.observe(
+            &workspace_handle.upgrade(cx).unwrap(),
+            |this, workspace, cx| {
+                this.workspace_updated(workspace.read(cx).active_item(cx), cx);
+            },
+        )
+        .detach();
+
+        this
+    }
+
+    fn workspace_updated(
+        &mut self,
+        active_item: Option<Box<dyn ItemHandle>>,
+        cx: &mut ViewContext<Self>,
+    ) {
+        if let Some(item) = active_item {
+            if item.id() != cx.view_id() {
+                if let Some(editor) = item.act_as::<Editor>(cx) {
+                    self.set_editor(editor, cx);
+                }
+            }
+        }
+    }
+
+    fn set_editor(&mut self, editor: ViewHandle<Editor>, cx: &mut ViewContext<Self>) {
+        if let Some(state) = &self.editor {
+            if state.editor == editor {
+                return;
+            }
+            editor.update(cx, |editor, cx| {
+                editor.clear_background_highlights::<Self>(cx)
+            });
+        }
+
+        let subscription = cx.subscribe(&editor, |this, _, event, cx| {
+            let did_reparse = match event {
+                editor::Event::Reparsed => true,
+                editor::Event::SelectionsChanged { .. } => false,
+                _ => return,
+            };
+            this.editor_updated(did_reparse, cx);
+        });
+
+        self.editor = Some(EditorState {
+            editor,
+            _subscription: subscription,
+            active_buffer: None,
+        });
+        self.editor_updated(true, cx);
+    }
+
+    fn editor_updated(&mut self, did_reparse: bool, cx: &mut ViewContext<Self>) -> Option<()> {
+        // Find which excerpt the cursor is in, and the position within that excerpted buffer.
+        let editor_state = self.editor.as_mut()?;
+        let editor = &editor_state.editor.read(cx);
+        let selection_range = editor.selections.last::<usize>(cx).range();
+        let multibuffer = editor.buffer().read(cx);
+        let (buffer, range, excerpt_id) = multibuffer
+            .range_to_buffer_ranges(selection_range, cx)
+            .pop()?;
+
+        // If the cursor has moved into a different excerpt, retrieve a new syntax layer
+        // from that buffer.
+        let buffer_state = editor_state
+            .active_buffer
+            .get_or_insert_with(|| BufferState {
+                buffer: buffer.clone(),
+                excerpt_id,
+                active_layer: None,
+            });
+        let mut prev_layer = None;
+        if did_reparse {
+            prev_layer = buffer_state.active_layer.take();
+        }
+        if buffer_state.buffer != buffer || buffer_state.excerpt_id != buffer_state.excerpt_id {
+            buffer_state.buffer = buffer.clone();
+            buffer_state.excerpt_id = excerpt_id;
+            buffer_state.active_layer = None;
+        }
+
+        let layer = match &mut buffer_state.active_layer {
+            Some(layer) => layer,
+            None => {
+                let snapshot = buffer.read(cx).snapshot();
+                let layer = if let Some(prev_layer) = prev_layer {
+                    let prev_range = prev_layer.node().byte_range();
+                    snapshot
+                        .syntax_layers()
+                        .filter(|layer| layer.language == &prev_layer.language)
+                        .min_by_key(|layer| {
+                            let range = layer.node().byte_range();
+                            ((range.start as i64) - (prev_range.start as i64)).abs()
+                                + ((range.end as i64) - (prev_range.end as i64)).abs()
+                        })?
+                } else {
+                    snapshot.syntax_layers().next()?
+                };
+                buffer_state.active_layer.insert(layer.to_owned())
+            }
+        };
+
+        // Within the active layer, find the syntax node under the cursor,
+        // and scroll to it.
+        let mut cursor = layer.node().walk();
+        while cursor.goto_first_child_for_byte(range.start).is_some() {
+            if !range.is_empty() && cursor.node().end_byte() == range.start {
+                cursor.goto_next_sibling();
+            }
+        }
+
+        // Ascend to the smallest ancestor that contains the range.
+        loop {
+            let node_range = cursor.node().byte_range();
+            if node_range.start <= range.start && node_range.end >= range.end {
+                break;
+            }
+            if !cursor.goto_parent() {
+                break;
+            }
+        }
+
+        let descendant_ix = cursor.descendant_index();
+        self.selected_descendant_ix = Some(descendant_ix);
+        self.list_state.scroll_to(ScrollTarget::Show(descendant_ix));
+
+        cx.notify();
+        Some(())
+    }
+
+    fn handle_click(&mut self, y: f32, cx: &mut ViewContext<SyntaxTreeView>) -> Option<()> {
+        let line_height = self.line_height?;
+        let ix = ((self.list_state.scroll_top() + y) / line_height) as usize;
+
+        self.update_editor_with_range_for_descendant_ix(ix, cx, |editor, mut range, cx| {
+            // Put the cursor at the beginning of the node.
+            mem::swap(&mut range.start, &mut range.end);
+
+            editor.change_selections(Some(Autoscroll::newest()), cx, |selections| {
+                selections.select_ranges(vec![range]);
+            });
+        });
+        Some(())
+    }
+
+    fn hover_state_changed(&mut self, cx: &mut ViewContext<SyntaxTreeView>) {
+        if let Some((y, line_height)) = self.mouse_y.zip(self.line_height) {
+            let ix = ((self.list_state.scroll_top() + y) / line_height) as usize;
+            if self.hovered_descendant_ix != Some(ix) {
+                self.hovered_descendant_ix = Some(ix);
+                self.update_editor_with_range_for_descendant_ix(ix, cx, |editor, range, cx| {
+                    editor.clear_background_highlights::<Self>(cx);
+                    editor.highlight_background::<Self>(
+                        vec![range],
+                        |theme| theme.editor.document_highlight_write_background,
+                        cx,
+                    );
+                });
+                cx.notify();
+            }
+        }
+    }
+
+    fn update_editor_with_range_for_descendant_ix(
+        &self,
+        descendant_ix: usize,
+        cx: &mut ViewContext<Self>,
+        mut f: impl FnMut(&mut Editor, Range<Anchor>, &mut ViewContext<Editor>),
+    ) -> Option<()> {
+        let editor_state = self.editor.as_ref()?;
+        let buffer_state = editor_state.active_buffer.as_ref()?;
+        let layer = buffer_state.active_layer.as_ref()?;
+
+        // Find the node.
+        let mut cursor = layer.node().walk();
+        cursor.goto_descendant(descendant_ix);
+        let node = cursor.node();
+        let range = node.byte_range();
+
+        // Build a text anchor range.
+        let buffer = buffer_state.buffer.read(cx);
+        let range = buffer.anchor_before(range.start)..buffer.anchor_after(range.end);
+
+        // Build a multibuffer anchor range.
+        let multibuffer = editor_state.editor.read(cx).buffer();
+        let multibuffer = multibuffer.read(cx).snapshot(cx);
+        let excerpt_id = buffer_state.excerpt_id;
+        let range = multibuffer.anchor_in_excerpt(excerpt_id, range.start)
+            ..multibuffer.anchor_in_excerpt(excerpt_id, range.end);
+
+        // Update the editor with the anchor range.
+        editor_state.editor.update(cx, |editor, cx| {
+            f(editor, range, cx);
+        });
+        Some(())
+    }
+
+    fn render_node(
+        cursor: &TreeCursor,
+        depth: u32,
+        selected: bool,
+        hovered: bool,
+        list_hovered: bool,
+        style: &TextStyle,
+        editor_theme: &theme::Editor,
+        cx: &AppContext,
+    ) -> gpui::AnyElement<SyntaxTreeView> {
+        let node = cursor.node();
+        let mut range_style = style.clone();
+        let em_width = style.em_width(cx.font_cache());
+        let gutter_padding = (em_width * editor_theme.gutter_padding_factor).round();
+
+        range_style.color = editor_theme.line_number;
+
+        let mut anonymous_node_style = style.clone();
+        let string_color = editor_theme
+            .syntax
+            .highlights
+            .iter()
+            .find_map(|(name, style)| (name == "string").then(|| style.color)?);
+        let property_color = editor_theme
+            .syntax
+            .highlights
+            .iter()
+            .find_map(|(name, style)| (name == "property").then(|| style.color)?);
+        if let Some(color) = string_color {
+            anonymous_node_style.color = color;
+        }
+
+        let mut row = Flex::row();
+        if let Some(field_name) = cursor.field_name() {
+            let mut field_style = style.clone();
+            if let Some(color) = property_color {
+                field_style.color = color;
+            }
+
+            row.add_children([
+                Label::new(field_name, field_style),
+                Label::new(": ", style.clone()),
+            ]);
+        }
+
+        return row
+            .with_child(
+                if node.is_named() {
+                    Label::new(node.kind(), style.clone())
+                } else {
+                    Label::new(format!("\"{}\"", node.kind()), anonymous_node_style)
+                }
+                .contained()
+                .with_margin_right(em_width),
+            )
+            .with_child(Label::new(format_node_range(node), range_style))
+            .contained()
+            .with_background_color(if selected {
+                editor_theme.selection.selection
+            } else if hovered && list_hovered {
+                editor_theme.active_line_background
+            } else {
+                Default::default()
+            })
+            .with_padding_left(gutter_padding + depth as f32 * 18.0)
+            .into_any();
+    }
+}
+
+impl Entity for SyntaxTreeView {
+    type Event = ();
+}
+
+impl View for SyntaxTreeView {
+    fn ui_name() -> &'static str {
+        "SyntaxTreeView"
+    }
+
+    fn render(&mut self, cx: &mut gpui::ViewContext<'_, '_, Self>) -> gpui::AnyElement<Self> {
+        let settings = settings::get::<ThemeSettings>(cx);
+        let font_family_id = settings.buffer_font_family;
+        let font_family_name = cx.font_cache().family_name(font_family_id).unwrap();
+        let font_properties = Default::default();
+        let font_id = cx
+            .font_cache()
+            .select_font(font_family_id, &font_properties)
+            .unwrap();
+        let font_size = settings.buffer_font_size(cx);
+
+        let editor_theme = settings.theme.editor.clone();
+        let style = TextStyle {
+            color: editor_theme.text_color,
+            font_family_name,
+            font_family_id,
+            font_id,
+            font_size,
+            font_properties: Default::default(),
+            underline: Default::default(),
+        };
+
+        let line_height = cx.font_cache().line_height(font_size);
+        if Some(line_height) != self.line_height {
+            self.line_height = Some(line_height);
+            self.hover_state_changed(cx);
+        }
+
+        if let Some(layer) = self
+            .editor
+            .as_ref()
+            .and_then(|editor| editor.active_buffer.as_ref())
+            .and_then(|buffer| buffer.active_layer.as_ref())
+        {
+            let layer = layer.clone();
+            let theme = editor_theme.clone();
+            return MouseEventHandler::<Self, Self>::new(0, cx, move |state, cx| {
+                let list_hovered = state.hovered();
+                UniformList::new(
+                    self.list_state.clone(),
+                    layer.node().descendant_count(),
+                    cx,
+                    move |this, range, items, cx| {
+                        let mut cursor = layer.node().walk();
+                        let mut descendant_ix = range.start as usize;
+                        cursor.goto_descendant(descendant_ix);
+                        let mut depth = cursor.depth();
+                        let mut visited_children = false;
+                        while descendant_ix < range.end {
+                            if visited_children {
+                                if cursor.goto_next_sibling() {
+                                    visited_children = false;
+                                } else if cursor.goto_parent() {
+                                    depth -= 1;
+                                } else {
+                                    break;
+                                }
+                            } else {
+                                items.push(Self::render_node(
+                                    &cursor,
+                                    depth,
+                                    Some(descendant_ix) == this.selected_descendant_ix,
+                                    Some(descendant_ix) == this.hovered_descendant_ix,
+                                    list_hovered,
+                                    &style,
+                                    &theme,
+                                    cx,
+                                ));
+                                descendant_ix += 1;
+                                if cursor.goto_first_child() {
+                                    depth += 1;
+                                } else {
+                                    visited_children = true;
+                                }
+                            }
+                        }
+                    },
+                )
+            })
+            .on_move(move |event, this, cx| {
+                let y = event.position.y() - event.region.origin_y();
+                this.mouse_y = Some(y);
+                this.hover_state_changed(cx);
+            })
+            .on_click(MouseButton::Left, move |event, this, cx| {
+                let y = event.position.y() - event.region.origin_y();
+                this.handle_click(y, cx);
+            })
+            .contained()
+            .with_background_color(editor_theme.background)
+            .into_any();
+        }
+
+        Empty::new().into_any()
+    }
+}
+
+impl Item for SyntaxTreeView {
+    fn tab_content<V: View>(
+        &self,
+        _: Option<usize>,
+        style: &theme::Tab,
+        _: &AppContext,
+    ) -> gpui::AnyElement<V> {
+        Label::new("Syntax Tree", style.label.clone()).into_any()
+    }
+
+    fn clone_on_split(
+        &self,
+        _workspace_id: workspace::WorkspaceId,
+        cx: &mut ViewContext<Self>,
+    ) -> Option<Self>
+    where
+        Self: Sized,
+    {
+        let mut clone = Self::new(self.workspace_handle.clone(), None, cx);
+        if let Some(editor) = &self.editor {
+            clone.set_editor(editor.editor.clone(), cx)
+        }
+        Some(clone)
+    }
+}
+
+impl SyntaxTreeToolbarItemView {
+    pub fn new() -> Self {
+        Self {
+            menu_open: false,
+            tree_view: None,
+            subscription: None,
+        }
+    }
+
+    fn render_menu(
+        &mut self,
+        cx: &mut ViewContext<'_, '_, Self>,
+    ) -> Option<gpui::AnyElement<Self>> {
+        let theme = theme::current(cx).clone();
+        let tree_view = self.tree_view.as_ref()?;
+        let tree_view = tree_view.read(cx);
+
+        let editor_state = tree_view.editor.as_ref()?;
+        let buffer_state = editor_state.active_buffer.as_ref()?;
+        let active_layer = buffer_state.active_layer.clone()?;
+        let active_buffer = buffer_state.buffer.read(cx).snapshot();
+
+        enum Menu {}
+
+        Some(
+            Stack::new()
+                .with_child(Self::render_header(&theme, &active_layer, cx))
+                .with_children(self.menu_open.then(|| {
+                    Overlay::new(
+                        MouseEventHandler::<Menu, _>::new(0, cx, move |_, cx| {
+                            Flex::column()
+                                .with_children(active_buffer.syntax_layers().enumerate().map(
+                                    |(ix, layer)| {
+                                        Self::render_menu_item(&theme, &active_layer, layer, ix, cx)
+                                    },
+                                ))
+                                .contained()
+                                .with_style(theme.toolbar_dropdown_menu.container)
+                                .constrained()
+                                .with_width(400.)
+                                .with_height(400.)
+                        })
+                        .on_down_out(MouseButton::Left, |_, this, cx| {
+                            this.menu_open = false;
+                            cx.notify()
+                        }),
+                    )
+                    .with_hoverable(true)
+                    .with_fit_mode(OverlayFitMode::SwitchAnchor)
+                    .with_anchor_corner(AnchorCorner::TopLeft)
+                    .with_z_index(999)
+                    .aligned()
+                    .bottom()
+                    .left()
+                }))
+                .aligned()
+                .left()
+                .clipped()
+                .into_any(),
+        )
+    }
+
+    fn toggle_menu(&mut self, cx: &mut ViewContext<Self>) {
+        self.menu_open = !self.menu_open;
+        cx.notify();
+    }
+
+    fn select_layer(&mut self, layer_ix: usize, cx: &mut ViewContext<Self>) -> Option<()> {
+        let tree_view = self.tree_view.as_ref()?;
+        tree_view.update(cx, |view, cx| {
+            let editor_state = view.editor.as_mut()?;
+            let buffer_state = editor_state.active_buffer.as_mut()?;
+            let snapshot = buffer_state.buffer.read(cx).snapshot();
+            let layer = snapshot.syntax_layers().nth(layer_ix)?;
+            buffer_state.active_layer = Some(layer.to_owned());
+            view.selected_descendant_ix = None;
+            self.menu_open = false;
+            cx.notify();
+            Some(())
+        })
+    }
+
+    fn render_header(
+        theme: &Arc<Theme>,
+        active_layer: &OwnedSyntaxLayerInfo,
+        cx: &mut ViewContext<Self>,
+    ) -> impl Element<Self> {
+        enum ToggleMenu {}
+        MouseEventHandler::<ToggleMenu, Self>::new(0, cx, move |state, _| {
+            let style = theme.toolbar_dropdown_menu.header.style_for(state, false);
+            Flex::row()
+                .with_child(
+                    Label::new(active_layer.language.name().to_string(), style.text.clone())
+                        .contained()
+                        .with_margin_right(style.secondary_text_spacing),
+                )
+                .with_child(Label::new(
+                    format_node_range(active_layer.node()),
+                    style
+                        .secondary_text
+                        .clone()
+                        .unwrap_or_else(|| style.text.clone()),
+                ))
+                .contained()
+                .with_style(style.container)
+        })
+        .with_cursor_style(CursorStyle::PointingHand)
+        .on_click(MouseButton::Left, move |_, view, cx| {
+            view.toggle_menu(cx);
+        })
+    }
+
+    fn render_menu_item(
+        theme: &Arc<Theme>,
+        active_layer: &OwnedSyntaxLayerInfo,
+        layer: SyntaxLayerInfo,
+        layer_ix: usize,
+        cx: &mut ViewContext<Self>,
+    ) -> impl Element<Self> {
+        enum ActivateLayer {}
+        MouseEventHandler::<ActivateLayer, _>::new(layer_ix, cx, move |state, _| {
+            let is_selected = layer.node() == active_layer.node();
+            let style = theme
+                .toolbar_dropdown_menu
+                .item
+                .style_for(state, is_selected);
+            Flex::row()
+                .with_child(
+                    Label::new(layer.language.name().to_string(), style.text.clone())
+                        .contained()
+                        .with_margin_right(style.secondary_text_spacing),
+                )
+                .with_child(Label::new(
+                    format_node_range(layer.node()),
+                    style
+                        .secondary_text
+                        .clone()
+                        .unwrap_or_else(|| style.text.clone()),
+                ))
+                .contained()
+                .with_style(style.container)
+        })
+        .with_cursor_style(CursorStyle::PointingHand)
+        .on_click(MouseButton::Left, move |_, view, cx| {
+            view.select_layer(layer_ix, cx);
+        })
+    }
+}
+
+fn format_node_range(node: Node) -> String {
+    let start = node.start_position();
+    let end = node.end_position();
+    format!(
+        "[{}:{} - {}:{}]",
+        start.row + 1,
+        start.column + 1,
+        end.row + 1,
+        end.column + 1,
+    )
+}
+
+impl Entity for SyntaxTreeToolbarItemView {
+    type Event = ();
+}
+
+impl View for SyntaxTreeToolbarItemView {
+    fn ui_name() -> &'static str {
+        "SyntaxTreeToolbarItemView"
+    }
+
+    fn render(&mut self, cx: &mut ViewContext<'_, '_, Self>) -> gpui::AnyElement<Self> {
+        self.render_menu(cx)
+            .unwrap_or_else(|| Empty::new().into_any())
+    }
+}
+
+impl ToolbarItemView for SyntaxTreeToolbarItemView {
+    fn set_active_pane_item(
+        &mut self,
+        active_pane_item: Option<&dyn ItemHandle>,
+        cx: &mut ViewContext<Self>,
+    ) -> workspace::ToolbarItemLocation {
+        self.menu_open = false;
+        if let Some(item) = active_pane_item {
+            if let Some(view) = item.downcast::<SyntaxTreeView>() {
+                self.tree_view = Some(view.clone());
+                self.subscription = Some(cx.observe(&view, |_, _, cx| cx.notify()));
+                return ToolbarItemLocation::PrimaryLeft {
+                    flex: Some((1., false)),
+                };
+            }
+        }
+        self.tree_view = None;
+        self.subscription = None;
+        ToolbarItemLocation::Hidden
+    }
+}

crates/project/src/project.rs ๐Ÿ”—

@@ -7,6 +7,8 @@ pub mod worktree;
 
 #[cfg(test)]
 mod project_tests;
+#[cfg(test)]
+mod worktree_tests;
 
 use anyhow::{anyhow, Context, Result};
 use client::{proto, Client, TypedEnvelope, UserStore};

crates/project/src/worktree.rs ๐Ÿ”—

@@ -160,7 +160,7 @@ impl From<&RepositoryEntry> for proto::RepositoryEntry {
 
 /// This path corresponds to the 'content path' (the folder that contains the .git)
 #[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
-pub struct RepositoryWorkDirectory(Arc<Path>);
+pub struct RepositoryWorkDirectory(pub(crate) Arc<Path>);
 
 impl Default for RepositoryWorkDirectory {
     fn default() -> Self {
@@ -212,7 +212,7 @@ pub struct LocalSnapshot {
     git_repositories: TreeMap<ProjectEntryId, LocalRepositoryEntry>,
 }
 
-pub struct BackgroundScannerState {
+struct BackgroundScannerState {
     snapshot: LocalSnapshot,
     /// The ids of all of the entries that were removed from the snapshot
     /// as part of the current update. These entry ids may be re-used
@@ -1755,7 +1755,7 @@ impl Snapshot {
         }
     }
 
-    fn descendent_entries<'a>(
+    pub fn descendent_entries<'a>(
         &'a self,
         include_dirs: bool,
         include_ignored: bool,
@@ -2100,6 +2100,82 @@ impl LocalSnapshot {
     }
 }
 
+impl LocalSnapshot {
+    #[cfg(test)]
+    pub fn check_invariants(&self) {
+        assert_eq!(
+            self.entries_by_path
+                .cursor::<()>()
+                .map(|e| (&e.path, e.id))
+                .collect::<Vec<_>>(),
+            self.entries_by_id
+                .cursor::<()>()
+                .map(|e| (&e.path, e.id))
+                .collect::<collections::BTreeSet<_>>()
+                .into_iter()
+                .collect::<Vec<_>>(),
+            "entries_by_path and entries_by_id are inconsistent"
+        );
+
+        let mut files = self.files(true, 0);
+        let mut visible_files = self.files(false, 0);
+        for entry in self.entries_by_path.cursor::<()>() {
+            if entry.is_file() {
+                assert_eq!(files.next().unwrap().inode, entry.inode);
+                if !entry.is_ignored {
+                    assert_eq!(visible_files.next().unwrap().inode, entry.inode);
+                }
+            }
+        }
+
+        assert!(files.next().is_none());
+        assert!(visible_files.next().is_none());
+
+        let mut bfs_paths = Vec::new();
+        let mut stack = vec![Path::new("")];
+        while let Some(path) = stack.pop() {
+            bfs_paths.push(path);
+            let ix = stack.len();
+            for child_entry in self.child_entries(path) {
+                stack.insert(ix, &child_entry.path);
+            }
+        }
+
+        let dfs_paths_via_iter = self
+            .entries_by_path
+            .cursor::<()>()
+            .map(|e| e.path.as_ref())
+            .collect::<Vec<_>>();
+        assert_eq!(bfs_paths, dfs_paths_via_iter);
+
+        let dfs_paths_via_traversal = self
+            .entries(true)
+            .map(|e| e.path.as_ref())
+            .collect::<Vec<_>>();
+        assert_eq!(dfs_paths_via_traversal, dfs_paths_via_iter);
+
+        for ignore_parent_abs_path in self.ignores_by_parent_abs_path.keys() {
+            let ignore_parent_path = ignore_parent_abs_path.strip_prefix(&self.abs_path).unwrap();
+            assert!(self.entry_for_path(&ignore_parent_path).is_some());
+            assert!(self
+                .entry_for_path(ignore_parent_path.join(&*GITIGNORE))
+                .is_some());
+        }
+    }
+
+    #[cfg(test)]
+    pub fn entries_without_ids(&self, include_ignored: bool) -> Vec<(&Path, u64, bool)> {
+        let mut paths = Vec::new();
+        for entry in self.entries_by_path.cursor::<()>() {
+            if include_ignored || !entry.is_ignored {
+                paths.push((entry.path.as_ref(), entry.inode, entry.is_ignored));
+            }
+        }
+        paths.sort_by(|a, b| a.0.cmp(b.0));
+        paths
+    }
+}
+
 impl BackgroundScannerState {
     fn reuse_entry_id(&mut self, entry: &mut Entry) {
         if let Some(removed_entry_id) = self.removed_entry_ids.remove(&entry.inode) {
@@ -3877,7 +3953,7 @@ impl<'a> Iterator for ChildEntriesIter<'a> {
     }
 }
 
-struct DescendentEntriesIter<'a> {
+pub struct DescendentEntriesIter<'a> {
     parent_path: &'a Path,
     traversal: Traversal<'a>,
 }
@@ -3942,1601 +4018,3 @@ impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
         }
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use fs::{FakeFs, RealFs};
-    use gpui::{executor::Deterministic, TestAppContext};
-    use pretty_assertions::assert_eq;
-    use rand::prelude::*;
-    use serde_json::json;
-    use std::{env, fmt::Write};
-    use util::{http::FakeHttpClient, test::temp_tree};
-
-    #[gpui::test]
-    async fn test_traversal(cx: &mut TestAppContext) {
-        let fs = FakeFs::new(cx.background());
-        fs.insert_tree(
-            "/root",
-            json!({
-               ".gitignore": "a/b\n",
-               "a": {
-                   "b": "",
-                   "c": "",
-               }
-            }),
-        )
-        .await;
-
-        let http_client = FakeHttpClient::with_404_response();
-        let client = cx.read(|cx| Client::new(http_client, cx));
-
-        let tree = Worktree::local(
-            client,
-            Path::new("/root"),
-            true,
-            fs,
-            Default::default(),
-            &mut cx.to_async(),
-        )
-        .await
-        .unwrap();
-        cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-            .await;
-
-        tree.read_with(cx, |tree, _| {
-            assert_eq!(
-                tree.entries(false)
-                    .map(|entry| entry.path.as_ref())
-                    .collect::<Vec<_>>(),
-                vec![
-                    Path::new(""),
-                    Path::new(".gitignore"),
-                    Path::new("a"),
-                    Path::new("a/c"),
-                ]
-            );
-            assert_eq!(
-                tree.entries(true)
-                    .map(|entry| entry.path.as_ref())
-                    .collect::<Vec<_>>(),
-                vec![
-                    Path::new(""),
-                    Path::new(".gitignore"),
-                    Path::new("a"),
-                    Path::new("a/b"),
-                    Path::new("a/c"),
-                ]
-            );
-        })
-    }
-
-    #[gpui::test]
-    async fn test_descendent_entries(cx: &mut TestAppContext) {
-        let fs = FakeFs::new(cx.background());
-        fs.insert_tree(
-            "/root",
-            json!({
-                "a": "",
-                "b": {
-                   "c": {
-                       "d": ""
-                   },
-                   "e": {}
-                },
-                "f": "",
-                "g": {
-                    "h": {}
-                },
-                "i": {
-                    "j": {
-                        "k": ""
-                    },
-                    "l": {
-
-                    }
-                },
-                ".gitignore": "i/j\n",
-            }),
-        )
-        .await;
-
-        let http_client = FakeHttpClient::with_404_response();
-        let client = cx.read(|cx| Client::new(http_client, cx));
-
-        let tree = Worktree::local(
-            client,
-            Path::new("/root"),
-            true,
-            fs,
-            Default::default(),
-            &mut cx.to_async(),
-        )
-        .await
-        .unwrap();
-        cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-            .await;
-
-        tree.read_with(cx, |tree, _| {
-            assert_eq!(
-                tree.descendent_entries(false, false, Path::new("b"))
-                    .map(|entry| entry.path.as_ref())
-                    .collect::<Vec<_>>(),
-                vec![Path::new("b/c/d"),]
-            );
-            assert_eq!(
-                tree.descendent_entries(true, false, Path::new("b"))
-                    .map(|entry| entry.path.as_ref())
-                    .collect::<Vec<_>>(),
-                vec![
-                    Path::new("b"),
-                    Path::new("b/c"),
-                    Path::new("b/c/d"),
-                    Path::new("b/e"),
-                ]
-            );
-
-            assert_eq!(
-                tree.descendent_entries(false, false, Path::new("g"))
-                    .map(|entry| entry.path.as_ref())
-                    .collect::<Vec<_>>(),
-                Vec::<PathBuf>::new()
-            );
-            assert_eq!(
-                tree.descendent_entries(true, false, Path::new("g"))
-                    .map(|entry| entry.path.as_ref())
-                    .collect::<Vec<_>>(),
-                vec![Path::new("g"), Path::new("g/h"),]
-            );
-
-            assert_eq!(
-                tree.descendent_entries(false, false, Path::new("i"))
-                    .map(|entry| entry.path.as_ref())
-                    .collect::<Vec<_>>(),
-                Vec::<PathBuf>::new()
-            );
-            assert_eq!(
-                tree.descendent_entries(false, true, Path::new("i"))
-                    .map(|entry| entry.path.as_ref())
-                    .collect::<Vec<_>>(),
-                vec![Path::new("i/j/k")]
-            );
-            assert_eq!(
-                tree.descendent_entries(true, false, Path::new("i"))
-                    .map(|entry| entry.path.as_ref())
-                    .collect::<Vec<_>>(),
-                vec![Path::new("i"), Path::new("i/l"),]
-            );
-        })
-    }
-
-    #[gpui::test(iterations = 10)]
-    async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
-        let fs = FakeFs::new(cx.background());
-        fs.insert_tree(
-            "/root",
-            json!({
-                "lib": {
-                    "a": {
-                        "a.txt": ""
-                    },
-                    "b": {
-                        "b.txt": ""
-                    }
-                }
-            }),
-        )
-        .await;
-        fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
-        fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
-
-        let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
-        let tree = Worktree::local(
-            client,
-            Path::new("/root"),
-            true,
-            fs.clone(),
-            Default::default(),
-            &mut cx.to_async(),
-        )
-        .await
-        .unwrap();
-
-        cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-            .await;
-
-        tree.read_with(cx, |tree, _| {
-            assert_eq!(
-                tree.entries(false)
-                    .map(|entry| entry.path.as_ref())
-                    .collect::<Vec<_>>(),
-                vec![
-                    Path::new(""),
-                    Path::new("lib"),
-                    Path::new("lib/a"),
-                    Path::new("lib/a/a.txt"),
-                    Path::new("lib/a/lib"),
-                    Path::new("lib/b"),
-                    Path::new("lib/b/b.txt"),
-                    Path::new("lib/b/lib"),
-                ]
-            );
-        });
-
-        fs.rename(
-            Path::new("/root/lib/a/lib"),
-            Path::new("/root/lib/a/lib-2"),
-            Default::default(),
-        )
-        .await
-        .unwrap();
-        executor.run_until_parked();
-        tree.read_with(cx, |tree, _| {
-            assert_eq!(
-                tree.entries(false)
-                    .map(|entry| entry.path.as_ref())
-                    .collect::<Vec<_>>(),
-                vec![
-                    Path::new(""),
-                    Path::new("lib"),
-                    Path::new("lib/a"),
-                    Path::new("lib/a/a.txt"),
-                    Path::new("lib/a/lib-2"),
-                    Path::new("lib/b"),
-                    Path::new("lib/b/b.txt"),
-                    Path::new("lib/b/lib"),
-                ]
-            );
-        });
-    }
-
-    #[gpui::test]
-    async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
-        // .gitignores are handled explicitly by Zed and do not use the git
-        // machinery that the git_tests module checks
-        let parent_dir = temp_tree(json!({
-            ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
-            "tree": {
-                ".git": {},
-                ".gitignore": "ignored-dir\n",
-                "tracked-dir": {
-                    "tracked-file1": "",
-                    "ancestor-ignored-file1": "",
-                },
-                "ignored-dir": {
-                    "ignored-file1": ""
-                }
-            }
-        }));
-        let dir = parent_dir.path().join("tree");
-
-        let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
-
-        let tree = Worktree::local(
-            client,
-            dir.as_path(),
-            true,
-            Arc::new(RealFs),
-            Default::default(),
-            &mut cx.to_async(),
-        )
-        .await
-        .unwrap();
-        cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-            .await;
-        tree.flush_fs_events(cx).await;
-        cx.read(|cx| {
-            let tree = tree.read(cx);
-            assert!(
-                !tree
-                    .entry_for_path("tracked-dir/tracked-file1")
-                    .unwrap()
-                    .is_ignored
-            );
-            assert!(
-                tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
-                    .unwrap()
-                    .is_ignored
-            );
-            assert!(
-                tree.entry_for_path("ignored-dir/ignored-file1")
-                    .unwrap()
-                    .is_ignored
-            );
-        });
-
-        std::fs::write(dir.join("tracked-dir/tracked-file2"), "").unwrap();
-        std::fs::write(dir.join("tracked-dir/ancestor-ignored-file2"), "").unwrap();
-        std::fs::write(dir.join("ignored-dir/ignored-file2"), "").unwrap();
-        tree.flush_fs_events(cx).await;
-        cx.read(|cx| {
-            let tree = tree.read(cx);
-            assert!(
-                !tree
-                    .entry_for_path("tracked-dir/tracked-file2")
-                    .unwrap()
-                    .is_ignored
-            );
-            assert!(
-                tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
-                    .unwrap()
-                    .is_ignored
-            );
-            assert!(
-                tree.entry_for_path("ignored-dir/ignored-file2")
-                    .unwrap()
-                    .is_ignored
-            );
-            assert!(tree.entry_for_path(".git").unwrap().is_ignored);
-        });
-    }
-
-    #[gpui::test]
-    async fn test_write_file(cx: &mut TestAppContext) {
-        let dir = temp_tree(json!({
-            ".git": {},
-            ".gitignore": "ignored-dir\n",
-            "tracked-dir": {},
-            "ignored-dir": {}
-        }));
-
-        let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
-
-        let tree = Worktree::local(
-            client,
-            dir.path(),
-            true,
-            Arc::new(RealFs),
-            Default::default(),
-            &mut cx.to_async(),
-        )
-        .await
-        .unwrap();
-        cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-            .await;
-        tree.flush_fs_events(cx).await;
-
-        tree.update(cx, |tree, cx| {
-            tree.as_local().unwrap().write_file(
-                Path::new("tracked-dir/file.txt"),
-                "hello".into(),
-                Default::default(),
-                cx,
-            )
-        })
-        .await
-        .unwrap();
-        tree.update(cx, |tree, cx| {
-            tree.as_local().unwrap().write_file(
-                Path::new("ignored-dir/file.txt"),
-                "world".into(),
-                Default::default(),
-                cx,
-            )
-        })
-        .await
-        .unwrap();
-
-        tree.read_with(cx, |tree, _| {
-            let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
-            let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
-            assert!(!tracked.is_ignored);
-            assert!(ignored.is_ignored);
-        });
-    }
-
-    #[gpui::test(iterations = 30)]
-    async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
-        let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
-
-        let fs = FakeFs::new(cx.background());
-        fs.insert_tree(
-            "/root",
-            json!({
-                "b": {},
-                "c": {},
-                "d": {},
-            }),
-        )
-        .await;
-
-        let tree = Worktree::local(
-            client,
-            "/root".as_ref(),
-            true,
-            fs,
-            Default::default(),
-            &mut cx.to_async(),
-        )
-        .await
-        .unwrap();
-
-        let snapshot1 = tree.update(cx, |tree, cx| {
-            let tree = tree.as_local_mut().unwrap();
-            let snapshot = Arc::new(Mutex::new(tree.snapshot()));
-            let _ = tree.observe_updates(0, cx, {
-                let snapshot = snapshot.clone();
-                move |update| {
-                    snapshot.lock().apply_remote_update(update).unwrap();
-                    async { true }
-                }
-            });
-            snapshot
-        });
-
-        let entry = tree
-            .update(cx, |tree, cx| {
-                tree.as_local_mut()
-                    .unwrap()
-                    .create_entry("a/e".as_ref(), true, cx)
-            })
-            .await
-            .unwrap();
-        assert!(entry.is_dir());
-
-        cx.foreground().run_until_parked();
-        tree.read_with(cx, |tree, _| {
-            assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
-        });
-
-        let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
-        assert_eq!(
-            snapshot1.lock().entries(true).collect::<Vec<_>>(),
-            snapshot2.entries(true).collect::<Vec<_>>()
-        );
-    }
-
-    #[gpui::test(iterations = 100)]
-    async fn test_random_worktree_operations_during_initial_scan(
-        cx: &mut TestAppContext,
-        mut rng: StdRng,
-    ) {
-        let operations = env::var("OPERATIONS")
-            .map(|o| o.parse().unwrap())
-            .unwrap_or(5);
-        let initial_entries = env::var("INITIAL_ENTRIES")
-            .map(|o| o.parse().unwrap())
-            .unwrap_or(20);
-
-        let root_dir = Path::new("/test");
-        let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
-        fs.as_fake().insert_tree(root_dir, json!({})).await;
-        for _ in 0..initial_entries {
-            randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
-        }
-        log::info!("generated initial tree");
-
-        let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
-        let worktree = Worktree::local(
-            client.clone(),
-            root_dir,
-            true,
-            fs.clone(),
-            Default::default(),
-            &mut cx.to_async(),
-        )
-        .await
-        .unwrap();
-
-        let mut snapshots =
-            vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
-        let updates = Arc::new(Mutex::new(Vec::new()));
-        worktree.update(cx, |tree, cx| {
-            check_worktree_change_events(tree, cx);
-
-            let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
-                let updates = updates.clone();
-                move |update| {
-                    updates.lock().push(update);
-                    async { true }
-                }
-            });
-        });
-
-        for _ in 0..operations {
-            worktree
-                .update(cx, |worktree, cx| {
-                    randomly_mutate_worktree(worktree, &mut rng, cx)
-                })
-                .await
-                .log_err();
-            worktree.read_with(cx, |tree, _| {
-                tree.as_local().unwrap().snapshot.check_invariants()
-            });
-
-            if rng.gen_bool(0.6) {
-                snapshots
-                    .push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
-            }
-        }
-
-        worktree
-            .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
-            .await;
-
-        cx.foreground().run_until_parked();
-
-        let final_snapshot = worktree.read_with(cx, |tree, _| {
-            let tree = tree.as_local().unwrap();
-            tree.snapshot.check_invariants();
-            tree.snapshot()
-        });
-
-        for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
-            let mut updated_snapshot = snapshot.clone();
-            for update in updates.lock().iter() {
-                if update.scan_id >= updated_snapshot.scan_id() as u64 {
-                    updated_snapshot
-                        .apply_remote_update(update.clone())
-                        .unwrap();
-                }
-            }
-
-            assert_eq!(
-                updated_snapshot.entries(true).collect::<Vec<_>>(),
-                final_snapshot.entries(true).collect::<Vec<_>>(),
-                "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
-            );
-        }
-    }
-
-    #[gpui::test(iterations = 100)]
-    async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
-        let operations = env::var("OPERATIONS")
-            .map(|o| o.parse().unwrap())
-            .unwrap_or(40);
-        let initial_entries = env::var("INITIAL_ENTRIES")
-            .map(|o| o.parse().unwrap())
-            .unwrap_or(20);
-
-        let root_dir = Path::new("/test");
-        let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
-        fs.as_fake().insert_tree(root_dir, json!({})).await;
-        for _ in 0..initial_entries {
-            randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
-        }
-        log::info!("generated initial tree");
-
-        let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
-        let worktree = Worktree::local(
-            client.clone(),
-            root_dir,
-            true,
-            fs.clone(),
-            Default::default(),
-            &mut cx.to_async(),
-        )
-        .await
-        .unwrap();
-
-        let updates = Arc::new(Mutex::new(Vec::new()));
-        worktree.update(cx, |tree, cx| {
-            check_worktree_change_events(tree, cx);
-
-            let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
-                let updates = updates.clone();
-                move |update| {
-                    updates.lock().push(update);
-                    async { true }
-                }
-            });
-        });
-
-        worktree
-            .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
-            .await;
-
-        fs.as_fake().pause_events();
-        let mut snapshots = Vec::new();
-        let mut mutations_len = operations;
-        while mutations_len > 1 {
-            if rng.gen_bool(0.2) {
-                worktree
-                    .update(cx, |worktree, cx| {
-                        randomly_mutate_worktree(worktree, &mut rng, cx)
-                    })
-                    .await
-                    .log_err();
-            } else {
-                randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
-            }
-
-            let buffered_event_count = fs.as_fake().buffered_event_count();
-            if buffered_event_count > 0 && rng.gen_bool(0.3) {
-                let len = rng.gen_range(0..=buffered_event_count);
-                log::info!("flushing {} events", len);
-                fs.as_fake().flush_events(len);
-            } else {
-                randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
-                mutations_len -= 1;
-            }
-
-            cx.foreground().run_until_parked();
-            if rng.gen_bool(0.2) {
-                log::info!("storing snapshot {}", snapshots.len());
-                let snapshot =
-                    worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
-                snapshots.push(snapshot);
-            }
-        }
-
-        log::info!("quiescing");
-        fs.as_fake().flush_events(usize::MAX);
-        cx.foreground().run_until_parked();
-        let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
-        snapshot.check_invariants();
-
-        {
-            let new_worktree = Worktree::local(
-                client.clone(),
-                root_dir,
-                true,
-                fs.clone(),
-                Default::default(),
-                &mut cx.to_async(),
-            )
-            .await
-            .unwrap();
-            new_worktree
-                .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
-                .await;
-            let new_snapshot =
-                new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
-            assert_eq!(
-                snapshot.entries_without_ids(true),
-                new_snapshot.entries_without_ids(true)
-            );
-        }
-
-        for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
-            for update in updates.lock().iter() {
-                if update.scan_id >= prev_snapshot.scan_id() as u64 {
-                    prev_snapshot.apply_remote_update(update.clone()).unwrap();
-                }
-            }
-
-            assert_eq!(
-                prev_snapshot.entries(true).collect::<Vec<_>>(),
-                snapshot.entries(true).collect::<Vec<_>>(),
-                "wrong updates after snapshot {i}: {updates:#?}",
-            );
-        }
-    }
-
-    // The worktree's `UpdatedEntries` event can be used to follow along with
-    // all changes to the worktree's snapshot.
-    fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
-        let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
-        cx.subscribe(&cx.handle(), move |tree, _, event, _| {
-            if let Event::UpdatedEntries(changes) = event {
-                for (path, _, change_type) in changes.iter() {
-                    let entry = tree.entry_for_path(&path).cloned();
-                    let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
-                        Ok(ix) | Err(ix) => ix,
-                    };
-                    match change_type {
-                        PathChange::Loaded => entries.insert(ix, entry.unwrap()),
-                        PathChange::Added => entries.insert(ix, entry.unwrap()),
-                        PathChange::Removed => drop(entries.remove(ix)),
-                        PathChange::Updated => {
-                            let entry = entry.unwrap();
-                            let existing_entry = entries.get_mut(ix).unwrap();
-                            assert_eq!(existing_entry.path, entry.path);
-                            *existing_entry = entry;
-                        }
-                        PathChange::AddedOrUpdated => {
-                            let entry = entry.unwrap();
-                            if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
-                                *entries.get_mut(ix).unwrap() = entry;
-                            } else {
-                                entries.insert(ix, entry);
-                            }
-                        }
-                    }
-                }
-
-                let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
-                assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
-            }
-        })
-        .detach();
-    }
-
-    fn randomly_mutate_worktree(
-        worktree: &mut Worktree,
-        rng: &mut impl Rng,
-        cx: &mut ModelContext<Worktree>,
-    ) -> Task<Result<()>> {
-        log::info!("mutating worktree");
-        let worktree = worktree.as_local_mut().unwrap();
-        let snapshot = worktree.snapshot();
-        let entry = snapshot.entries(false).choose(rng).unwrap();
-
-        match rng.gen_range(0_u32..100) {
-            0..=33 if entry.path.as_ref() != Path::new("") => {
-                log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
-                worktree.delete_entry(entry.id, cx).unwrap()
-            }
-            ..=66 if entry.path.as_ref() != Path::new("") => {
-                let other_entry = snapshot.entries(false).choose(rng).unwrap();
-                let new_parent_path = if other_entry.is_dir() {
-                    other_entry.path.clone()
-                } else {
-                    other_entry.path.parent().unwrap().into()
-                };
-                let mut new_path = new_parent_path.join(gen_name(rng));
-                if new_path.starts_with(&entry.path) {
-                    new_path = gen_name(rng).into();
-                }
-
-                log::info!(
-                    "renaming entry {:?} ({}) to {:?}",
-                    entry.path,
-                    entry.id.0,
-                    new_path
-                );
-                let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
-                cx.foreground().spawn(async move {
-                    task.await?;
-                    Ok(())
-                })
-            }
-            _ => {
-                let task = if entry.is_dir() {
-                    let child_path = entry.path.join(gen_name(rng));
-                    let is_dir = rng.gen_bool(0.3);
-                    log::info!(
-                        "creating {} at {:?}",
-                        if is_dir { "dir" } else { "file" },
-                        child_path,
-                    );
-                    worktree.create_entry(child_path, is_dir, cx)
-                } else {
-                    log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
-                    worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
-                };
-                cx.foreground().spawn(async move {
-                    task.await?;
-                    Ok(())
-                })
-            }
-        }
-    }
-
-    async fn randomly_mutate_fs(
-        fs: &Arc<dyn Fs>,
-        root_path: &Path,
-        insertion_probability: f64,
-        rng: &mut impl Rng,
-    ) {
-        log::info!("mutating fs");
-        let mut files = Vec::new();
-        let mut dirs = Vec::new();
-        for path in fs.as_fake().paths(false) {
-            if path.starts_with(root_path) {
-                if fs.is_file(&path).await {
-                    files.push(path);
-                } else {
-                    dirs.push(path);
-                }
-            }
-        }
-
-        if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
-            let path = dirs.choose(rng).unwrap();
-            let new_path = path.join(gen_name(rng));
-
-            if rng.gen() {
-                log::info!(
-                    "creating dir {:?}",
-                    new_path.strip_prefix(root_path).unwrap()
-                );
-                fs.create_dir(&new_path).await.unwrap();
-            } else {
-                log::info!(
-                    "creating file {:?}",
-                    new_path.strip_prefix(root_path).unwrap()
-                );
-                fs.create_file(&new_path, Default::default()).await.unwrap();
-            }
-        } else if rng.gen_bool(0.05) {
-            let ignore_dir_path = dirs.choose(rng).unwrap();
-            let ignore_path = ignore_dir_path.join(&*GITIGNORE);
-
-            let subdirs = dirs
-                .iter()
-                .filter(|d| d.starts_with(&ignore_dir_path))
-                .cloned()
-                .collect::<Vec<_>>();
-            let subfiles = files
-                .iter()
-                .filter(|d| d.starts_with(&ignore_dir_path))
-                .cloned()
-                .collect::<Vec<_>>();
-            let files_to_ignore = {
-                let len = rng.gen_range(0..=subfiles.len());
-                subfiles.choose_multiple(rng, len)
-            };
-            let dirs_to_ignore = {
-                let len = rng.gen_range(0..subdirs.len());
-                subdirs.choose_multiple(rng, len)
-            };
-
-            let mut ignore_contents = String::new();
-            for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
-                writeln!(
-                    ignore_contents,
-                    "{}",
-                    path_to_ignore
-                        .strip_prefix(&ignore_dir_path)
-                        .unwrap()
-                        .to_str()
-                        .unwrap()
-                )
-                .unwrap();
-            }
-            log::info!(
-                "creating gitignore {:?} with contents:\n{}",
-                ignore_path.strip_prefix(&root_path).unwrap(),
-                ignore_contents
-            );
-            fs.save(
-                &ignore_path,
-                &ignore_contents.as_str().into(),
-                Default::default(),
-            )
-            .await
-            .unwrap();
-        } else {
-            let old_path = {
-                let file_path = files.choose(rng);
-                let dir_path = dirs[1..].choose(rng);
-                file_path.into_iter().chain(dir_path).choose(rng).unwrap()
-            };
-
-            let is_rename = rng.gen();
-            if is_rename {
-                let new_path_parent = dirs
-                    .iter()
-                    .filter(|d| !d.starts_with(old_path))
-                    .choose(rng)
-                    .unwrap();
-
-                let overwrite_existing_dir =
-                    !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
-                let new_path = if overwrite_existing_dir {
-                    fs.remove_dir(
-                        &new_path_parent,
-                        RemoveOptions {
-                            recursive: true,
-                            ignore_if_not_exists: true,
-                        },
-                    )
-                    .await
-                    .unwrap();
-                    new_path_parent.to_path_buf()
-                } else {
-                    new_path_parent.join(gen_name(rng))
-                };
-
-                log::info!(
-                    "renaming {:?} to {}{:?}",
-                    old_path.strip_prefix(&root_path).unwrap(),
-                    if overwrite_existing_dir {
-                        "overwrite "
-                    } else {
-                        ""
-                    },
-                    new_path.strip_prefix(&root_path).unwrap()
-                );
-                fs.rename(
-                    &old_path,
-                    &new_path,
-                    fs::RenameOptions {
-                        overwrite: true,
-                        ignore_if_exists: true,
-                    },
-                )
-                .await
-                .unwrap();
-            } else if fs.is_file(&old_path).await {
-                log::info!(
-                    "deleting file {:?}",
-                    old_path.strip_prefix(&root_path).unwrap()
-                );
-                fs.remove_file(old_path, Default::default()).await.unwrap();
-            } else {
-                log::info!(
-                    "deleting dir {:?}",
-                    old_path.strip_prefix(&root_path).unwrap()
-                );
-                fs.remove_dir(
-                    &old_path,
-                    RemoveOptions {
-                        recursive: true,
-                        ignore_if_not_exists: true,
-                    },
-                )
-                .await
-                .unwrap();
-            }
-        }
-    }
-
-    fn gen_name(rng: &mut impl Rng) -> String {
-        (0..6)
-            .map(|_| rng.sample(rand::distributions::Alphanumeric))
-            .map(char::from)
-            .collect()
-    }
-
-    impl LocalSnapshot {
-        fn check_invariants(&self) {
-            assert_eq!(
-                self.entries_by_path
-                    .cursor::<()>()
-                    .map(|e| (&e.path, e.id))
-                    .collect::<Vec<_>>(),
-                self.entries_by_id
-                    .cursor::<()>()
-                    .map(|e| (&e.path, e.id))
-                    .collect::<collections::BTreeSet<_>>()
-                    .into_iter()
-                    .collect::<Vec<_>>(),
-                "entries_by_path and entries_by_id are inconsistent"
-            );
-
-            let mut files = self.files(true, 0);
-            let mut visible_files = self.files(false, 0);
-            for entry in self.entries_by_path.cursor::<()>() {
-                if entry.is_file() {
-                    assert_eq!(files.next().unwrap().inode, entry.inode);
-                    if !entry.is_ignored {
-                        assert_eq!(visible_files.next().unwrap().inode, entry.inode);
-                    }
-                }
-            }
-
-            assert!(files.next().is_none());
-            assert!(visible_files.next().is_none());
-
-            let mut bfs_paths = Vec::new();
-            let mut stack = vec![Path::new("")];
-            while let Some(path) = stack.pop() {
-                bfs_paths.push(path);
-                let ix = stack.len();
-                for child_entry in self.child_entries(path) {
-                    stack.insert(ix, &child_entry.path);
-                }
-            }
-
-            let dfs_paths_via_iter = self
-                .entries_by_path
-                .cursor::<()>()
-                .map(|e| e.path.as_ref())
-                .collect::<Vec<_>>();
-            assert_eq!(bfs_paths, dfs_paths_via_iter);
-
-            let dfs_paths_via_traversal = self
-                .entries(true)
-                .map(|e| e.path.as_ref())
-                .collect::<Vec<_>>();
-            assert_eq!(dfs_paths_via_traversal, dfs_paths_via_iter);
-
-            for ignore_parent_abs_path in self.ignores_by_parent_abs_path.keys() {
-                let ignore_parent_path =
-                    ignore_parent_abs_path.strip_prefix(&self.abs_path).unwrap();
-                assert!(self.entry_for_path(&ignore_parent_path).is_some());
-                assert!(self
-                    .entry_for_path(ignore_parent_path.join(&*GITIGNORE))
-                    .is_some());
-            }
-        }
-
-        fn entries_without_ids(&self, include_ignored: bool) -> Vec<(&Path, u64, bool)> {
-            let mut paths = Vec::new();
-            for entry in self.entries_by_path.cursor::<()>() {
-                if include_ignored || !entry.is_ignored {
-                    paths.push((entry.path.as_ref(), entry.inode, entry.is_ignored));
-                }
-            }
-            paths.sort_by(|a, b| a.0.cmp(b.0));
-            paths
-        }
-    }
-
-    mod git_tests {
-        use super::*;
-        use pretty_assertions::assert_eq;
-
-        #[gpui::test]
-        async fn test_rename_work_directory(cx: &mut TestAppContext) {
-            let root = temp_tree(json!({
-                "projects": {
-                    "project1": {
-                        "a": "",
-                        "b": "",
-                    }
-                },
-
-            }));
-            let root_path = root.path();
-
-            let http_client = FakeHttpClient::with_404_response();
-            let client = cx.read(|cx| Client::new(http_client, cx));
-            let tree = Worktree::local(
-                client,
-                root_path,
-                true,
-                Arc::new(RealFs),
-                Default::default(),
-                &mut cx.to_async(),
-            )
-            .await
-            .unwrap();
-
-            let repo = git_init(&root_path.join("projects/project1"));
-            git_add("a", &repo);
-            git_commit("init", &repo);
-            std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
-
-            cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-                .await;
-
-            tree.flush_fs_events(cx).await;
-
-            cx.read(|cx| {
-                let tree = tree.read(cx);
-                let (work_dir, _) = tree.repositories().next().unwrap();
-                assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
-                assert_eq!(
-                    tree.status_for_file(Path::new("projects/project1/a")),
-                    Some(GitFileStatus::Modified)
-                );
-                assert_eq!(
-                    tree.status_for_file(Path::new("projects/project1/b")),
-                    Some(GitFileStatus::Added)
-                );
-            });
-
-            std::fs::rename(
-                root_path.join("projects/project1"),
-                root_path.join("projects/project2"),
-            )
-            .ok();
-            tree.flush_fs_events(cx).await;
-
-            cx.read(|cx| {
-                let tree = tree.read(cx);
-                let (work_dir, _) = tree.repositories().next().unwrap();
-                assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
-                assert_eq!(
-                    tree.status_for_file(Path::new("projects/project2/a")),
-                    Some(GitFileStatus::Modified)
-                );
-                assert_eq!(
-                    tree.status_for_file(Path::new("projects/project2/b")),
-                    Some(GitFileStatus::Added)
-                );
-            });
-        }
-
-        #[gpui::test]
-        async fn test_git_repository_for_path(cx: &mut TestAppContext) {
-            let root = temp_tree(json!({
-                "c.txt": "",
-                "dir1": {
-                    ".git": {},
-                    "deps": {
-                        "dep1": {
-                            ".git": {},
-                            "src": {
-                                "a.txt": ""
-                            }
-                        }
-                    },
-                    "src": {
-                        "b.txt": ""
-                    }
-                },
-            }));
-
-            let http_client = FakeHttpClient::with_404_response();
-            let client = cx.read(|cx| Client::new(http_client, cx));
-            let tree = Worktree::local(
-                client,
-                root.path(),
-                true,
-                Arc::new(RealFs),
-                Default::default(),
-                &mut cx.to_async(),
-            )
-            .await
-            .unwrap();
-
-            cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-                .await;
-            tree.flush_fs_events(cx).await;
-
-            tree.read_with(cx, |tree, _cx| {
-                let tree = tree.as_local().unwrap();
-
-                assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
-
-                let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
-                assert_eq!(
-                    entry
-                        .work_directory(tree)
-                        .map(|directory| directory.as_ref().to_owned()),
-                    Some(Path::new("dir1").to_owned())
-                );
-
-                let entry = tree
-                    .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
-                    .unwrap();
-                assert_eq!(
-                    entry
-                        .work_directory(tree)
-                        .map(|directory| directory.as_ref().to_owned()),
-                    Some(Path::new("dir1/deps/dep1").to_owned())
-                );
-
-                let entries = tree.files(false, 0);
-
-                let paths_with_repos = tree
-                    .entries_with_repositories(entries)
-                    .map(|(entry, repo)| {
-                        (
-                            entry.path.as_ref(),
-                            repo.and_then(|repo| {
-                                repo.work_directory(&tree)
-                                    .map(|work_directory| work_directory.0.to_path_buf())
-                            }),
-                        )
-                    })
-                    .collect::<Vec<_>>();
-
-                assert_eq!(
-                    paths_with_repos,
-                    &[
-                        (Path::new("c.txt"), None),
-                        (
-                            Path::new("dir1/deps/dep1/src/a.txt"),
-                            Some(Path::new("dir1/deps/dep1").into())
-                        ),
-                        (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
-                    ]
-                );
-            });
-
-            let repo_update_events = Arc::new(Mutex::new(vec![]));
-            tree.update(cx, |_, cx| {
-                let repo_update_events = repo_update_events.clone();
-                cx.subscribe(&tree, move |_, _, event, _| {
-                    if let Event::UpdatedGitRepositories(update) = event {
-                        repo_update_events.lock().push(update.clone());
-                    }
-                })
-                .detach();
-            });
-
-            std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
-            tree.flush_fs_events(cx).await;
-
-            assert_eq!(
-                repo_update_events.lock()[0]
-                    .iter()
-                    .map(|e| e.0.clone())
-                    .collect::<Vec<Arc<Path>>>(),
-                vec![Path::new("dir1").into()]
-            );
-
-            std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
-            tree.flush_fs_events(cx).await;
-
-            tree.read_with(cx, |tree, _cx| {
-                let tree = tree.as_local().unwrap();
-
-                assert!(tree
-                    .repository_for_path("dir1/src/b.txt".as_ref())
-                    .is_none());
-            });
-        }
-
-        #[gpui::test]
-        async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
-            const IGNORE_RULE: &'static str = "**/target";
-
-            let root = temp_tree(json!({
-                "project": {
-                    "a.txt": "a",
-                    "b.txt": "bb",
-                    "c": {
-                        "d": {
-                            "e.txt": "eee"
-                        }
-                    },
-                    "f.txt": "ffff",
-                    "target": {
-                        "build_file": "???"
-                    },
-                    ".gitignore": IGNORE_RULE
-                },
-
-            }));
-
-            let http_client = FakeHttpClient::with_404_response();
-            let client = cx.read(|cx| Client::new(http_client, cx));
-            let tree = Worktree::local(
-                client,
-                root.path(),
-                true,
-                Arc::new(RealFs),
-                Default::default(),
-                &mut cx.to_async(),
-            )
-            .await
-            .unwrap();
-
-            cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-                .await;
-
-            const A_TXT: &'static str = "a.txt";
-            const B_TXT: &'static str = "b.txt";
-            const E_TXT: &'static str = "c/d/e.txt";
-            const F_TXT: &'static str = "f.txt";
-            const DOTGITIGNORE: &'static str = ".gitignore";
-            const BUILD_FILE: &'static str = "target/build_file";
-            let project_path: &Path = &Path::new("project");
-
-            let work_dir = root.path().join("project");
-            let mut repo = git_init(work_dir.as_path());
-            repo.add_ignore_rule(IGNORE_RULE).unwrap();
-            git_add(Path::new(A_TXT), &repo);
-            git_add(Path::new(E_TXT), &repo);
-            git_add(Path::new(DOTGITIGNORE), &repo);
-            git_commit("Initial commit", &repo);
-
-            tree.flush_fs_events(cx).await;
-            deterministic.run_until_parked();
-
-            // Check that the right git state is observed on startup
-            tree.read_with(cx, |tree, _cx| {
-                let snapshot = tree.snapshot();
-                assert_eq!(snapshot.repository_entries.iter().count(), 1);
-                let (dir, _) = snapshot.repository_entries.iter().next().unwrap();
-                assert_eq!(dir.0.as_ref(), Path::new("project"));
-
-                assert_eq!(
-                    snapshot.status_for_file(project_path.join(B_TXT)),
-                    Some(GitFileStatus::Added)
-                );
-                assert_eq!(
-                    snapshot.status_for_file(project_path.join(F_TXT)),
-                    Some(GitFileStatus::Added)
-                );
-            });
-
-            std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
-
-            tree.flush_fs_events(cx).await;
-            deterministic.run_until_parked();
-
-            tree.read_with(cx, |tree, _cx| {
-                let snapshot = tree.snapshot();
-
-                assert_eq!(
-                    snapshot.status_for_file(project_path.join(A_TXT)),
-                    Some(GitFileStatus::Modified)
-                );
-            });
-
-            git_add(Path::new(A_TXT), &repo);
-            git_add(Path::new(B_TXT), &repo);
-            git_commit("Committing modified and added", &repo);
-            tree.flush_fs_events(cx).await;
-            deterministic.run_until_parked();
-
-            // Check that repo only changes are tracked
-            tree.read_with(cx, |tree, _cx| {
-                let snapshot = tree.snapshot();
-
-                assert_eq!(
-                    snapshot.status_for_file(project_path.join(F_TXT)),
-                    Some(GitFileStatus::Added)
-                );
-
-                assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
-                assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
-            });
-
-            git_reset(0, &repo);
-            git_remove_index(Path::new(B_TXT), &repo);
-            git_stash(&mut repo);
-            std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
-            std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
-            tree.flush_fs_events(cx).await;
-            deterministic.run_until_parked();
-
-            // Check that more complex repo changes are tracked
-            tree.read_with(cx, |tree, _cx| {
-                let snapshot = tree.snapshot();
-
-                assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
-                assert_eq!(
-                    snapshot.status_for_file(project_path.join(B_TXT)),
-                    Some(GitFileStatus::Added)
-                );
-                assert_eq!(
-                    snapshot.status_for_file(project_path.join(E_TXT)),
-                    Some(GitFileStatus::Modified)
-                );
-            });
-
-            std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
-            std::fs::remove_dir_all(work_dir.join("c")).unwrap();
-            std::fs::write(
-                work_dir.join(DOTGITIGNORE),
-                [IGNORE_RULE, "f.txt"].join("\n"),
-            )
-            .unwrap();
-
-            git_add(Path::new(DOTGITIGNORE), &repo);
-            git_commit("Committing modified git ignore", &repo);
-
-            tree.flush_fs_events(cx).await;
-            deterministic.run_until_parked();
-
-            let mut renamed_dir_name = "first_directory/second_directory";
-            const RENAMED_FILE: &'static str = "rf.txt";
-
-            std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
-            std::fs::write(
-                work_dir.join(renamed_dir_name).join(RENAMED_FILE),
-                "new-contents",
-            )
-            .unwrap();
-
-            tree.flush_fs_events(cx).await;
-            deterministic.run_until_parked();
-
-            tree.read_with(cx, |tree, _cx| {
-                let snapshot = tree.snapshot();
-                assert_eq!(
-                    snapshot
-                        .status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
-                    Some(GitFileStatus::Added)
-                );
-            });
-
-            renamed_dir_name = "new_first_directory/second_directory";
-
-            std::fs::rename(
-                work_dir.join("first_directory"),
-                work_dir.join("new_first_directory"),
-            )
-            .unwrap();
-
-            tree.flush_fs_events(cx).await;
-            deterministic.run_until_parked();
-
-            tree.read_with(cx, |tree, _cx| {
-                let snapshot = tree.snapshot();
-
-                assert_eq!(
-                    snapshot.status_for_file(
-                        project_path
-                            .join(Path::new(renamed_dir_name))
-                            .join(RENAMED_FILE)
-                    ),
-                    Some(GitFileStatus::Added)
-                );
-            });
-        }
-
-        #[gpui::test]
-        async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
-            let fs = FakeFs::new(cx.background());
-            fs.insert_tree(
-                "/root",
-                json!({
-                    ".git": {},
-                    "a": {
-                        "b": {
-                            "c1.txt": "",
-                            "c2.txt": "",
-                        },
-                        "d": {
-                            "e1.txt": "",
-                            "e2.txt": "",
-                            "e3.txt": "",
-                        }
-                    },
-                    "f": {
-                        "no-status.txt": ""
-                    },
-                    "g": {
-                        "h1.txt": "",
-                        "h2.txt": ""
-                    },
-
-                }),
-            )
-            .await;
-
-            fs.set_status_for_repo_via_git_operation(
-                &Path::new("/root/.git"),
-                &[
-                    (Path::new("a/b/c1.txt"), GitFileStatus::Added),
-                    (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
-                    (Path::new("g/h2.txt"), GitFileStatus::Conflict),
-                ],
-            );
-
-            let http_client = FakeHttpClient::with_404_response();
-            let client = cx.read(|cx| Client::new(http_client, cx));
-            let tree = Worktree::local(
-                client,
-                Path::new("/root"),
-                true,
-                fs.clone(),
-                Default::default(),
-                &mut cx.to_async(),
-            )
-            .await
-            .unwrap();
-
-            cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-                .await;
-
-            cx.foreground().run_until_parked();
-            let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
-
-            check_propagated_statuses(
-                &snapshot,
-                &[
-                    (Path::new(""), Some(GitFileStatus::Conflict)),
-                    (Path::new("a"), Some(GitFileStatus::Modified)),
-                    (Path::new("a/b"), Some(GitFileStatus::Added)),
-                    (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
-                    (Path::new("a/b/c2.txt"), None),
-                    (Path::new("a/d"), Some(GitFileStatus::Modified)),
-                    (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
-                    (Path::new("f"), None),
-                    (Path::new("f/no-status.txt"), None),
-                    (Path::new("g"), Some(GitFileStatus::Conflict)),
-                    (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
-                ],
-            );
-
-            check_propagated_statuses(
-                &snapshot,
-                &[
-                    (Path::new("a/b"), Some(GitFileStatus::Added)),
-                    (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
-                    (Path::new("a/b/c2.txt"), None),
-                    (Path::new("a/d"), Some(GitFileStatus::Modified)),
-                    (Path::new("a/d/e1.txt"), None),
-                    (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
-                    (Path::new("f"), None),
-                    (Path::new("f/no-status.txt"), None),
-                    (Path::new("g"), Some(GitFileStatus::Conflict)),
-                ],
-            );
-
-            check_propagated_statuses(
-                &snapshot,
-                &[
-                    (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
-                    (Path::new("a/b/c2.txt"), None),
-                    (Path::new("a/d/e1.txt"), None),
-                    (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
-                    (Path::new("f/no-status.txt"), None),
-                ],
-            );
-
-            #[track_caller]
-            fn check_propagated_statuses(
-                snapshot: &Snapshot,
-                expected_statuses: &[(&Path, Option<GitFileStatus>)],
-            ) {
-                let mut entries = expected_statuses
-                    .iter()
-                    .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
-                    .collect::<Vec<_>>();
-                snapshot.propagate_git_statuses(&mut entries);
-                assert_eq!(
-                    entries
-                        .iter()
-                        .map(|e| (e.path.as_ref(), e.git_status))
-                        .collect::<Vec<_>>(),
-                    expected_statuses
-                );
-            }
-        }
-
-        #[track_caller]
-        fn git_init(path: &Path) -> git2::Repository {
-            git2::Repository::init(path).expect("Failed to initialize git repository")
-        }
-
-        #[track_caller]
-        fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
-            let path = path.as_ref();
-            let mut index = repo.index().expect("Failed to get index");
-            index.add_path(path).expect("Failed to add a.txt");
-            index.write().expect("Failed to write index");
-        }
-
-        #[track_caller]
-        fn git_remove_index(path: &Path, repo: &git2::Repository) {
-            let mut index = repo.index().expect("Failed to get index");
-            index.remove_path(path).expect("Failed to add a.txt");
-            index.write().expect("Failed to write index");
-        }
-
-        #[track_caller]
-        fn git_commit(msg: &'static str, repo: &git2::Repository) {
-            use git2::Signature;
-
-            let signature = Signature::now("test", "test@zed.dev").unwrap();
-            let oid = repo.index().unwrap().write_tree().unwrap();
-            let tree = repo.find_tree(oid).unwrap();
-            if let Some(head) = repo.head().ok() {
-                let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
-
-                let parent_commit = parent_obj.as_commit().unwrap();
-
-                repo.commit(
-                    Some("HEAD"),
-                    &signature,
-                    &signature,
-                    msg,
-                    &tree,
-                    &[parent_commit],
-                )
-                .expect("Failed to commit with parent");
-            } else {
-                repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
-                    .expect("Failed to commit");
-            }
-        }
-
-        #[track_caller]
-        fn git_stash(repo: &mut git2::Repository) {
-            use git2::Signature;
-
-            let signature = Signature::now("test", "test@zed.dev").unwrap();
-            repo.stash_save(&signature, "N/A", None)
-                .expect("Failed to stash");
-        }
-
-        #[track_caller]
-        fn git_reset(offset: usize, repo: &git2::Repository) {
-            let head = repo.head().expect("Couldn't get repo head");
-            let object = head.peel(git2::ObjectType::Commit).unwrap();
-            let commit = object.as_commit().unwrap();
-            let new_head = commit
-                .parents()
-                .inspect(|parnet| {
-                    parnet.message();
-                })
-                .skip(offset)
-                .next()
-                .expect("Not enough history");
-            repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
-                .expect("Could not reset");
-        }
-
-        #[allow(dead_code)]
-        #[track_caller]
-        fn git_status(repo: &git2::Repository) -> HashMap<String, git2::Status> {
-            repo.statuses(None)
-                .unwrap()
-                .iter()
-                .map(|status| (status.path().unwrap().to_string(), status.status()))
-                .collect()
-        }
-    }
-}

crates/project/src/worktree_tests.rs ๐Ÿ”—

@@ -0,0 +1,1523 @@
+use crate::{
+    worktree::{Event, Snapshot, WorktreeHandle},
+    EntryKind, PathChange, Worktree,
+};
+use anyhow::Result;
+use client::Client;
+use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
+use git::GITIGNORE;
+use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext};
+use parking_lot::Mutex;
+use pretty_assertions::assert_eq;
+use rand::prelude::*;
+use serde_json::json;
+use std::{
+    env,
+    fmt::Write,
+    path::{Path, PathBuf},
+    sync::Arc,
+};
+use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
+
+#[gpui::test]
+async fn test_traversal(cx: &mut TestAppContext) {
+    let fs = FakeFs::new(cx.background());
+    fs.insert_tree(
+        "/root",
+        json!({
+           ".gitignore": "a/b\n",
+           "a": {
+               "b": "",
+               "c": "",
+           }
+        }),
+    )
+    .await;
+
+    let http_client = FakeHttpClient::with_404_response();
+    let client = cx.read(|cx| Client::new(http_client, cx));
+
+    let tree = Worktree::local(
+        client,
+        Path::new("/root"),
+        true,
+        fs,
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(false)
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![
+                Path::new(""),
+                Path::new(".gitignore"),
+                Path::new("a"),
+                Path::new("a/c"),
+            ]
+        );
+        assert_eq!(
+            tree.entries(true)
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![
+                Path::new(""),
+                Path::new(".gitignore"),
+                Path::new("a"),
+                Path::new("a/b"),
+                Path::new("a/c"),
+            ]
+        );
+    })
+}
+
+#[gpui::test]
+async fn test_descendent_entries(cx: &mut TestAppContext) {
+    let fs = FakeFs::new(cx.background());
+    fs.insert_tree(
+        "/root",
+        json!({
+            "a": "",
+            "b": {
+               "c": {
+                   "d": ""
+               },
+               "e": {}
+            },
+            "f": "",
+            "g": {
+                "h": {}
+            },
+            "i": {
+                "j": {
+                    "k": ""
+                },
+                "l": {
+
+                }
+            },
+            ".gitignore": "i/j\n",
+        }),
+    )
+    .await;
+
+    let http_client = FakeHttpClient::with_404_response();
+    let client = cx.read(|cx| Client::new(http_client, cx));
+
+    let tree = Worktree::local(
+        client,
+        Path::new("/root"),
+        true,
+        fs,
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.descendent_entries(false, false, Path::new("b"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![Path::new("b/c/d"),]
+        );
+        assert_eq!(
+            tree.descendent_entries(true, false, Path::new("b"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![
+                Path::new("b"),
+                Path::new("b/c"),
+                Path::new("b/c/d"),
+                Path::new("b/e"),
+            ]
+        );
+
+        assert_eq!(
+            tree.descendent_entries(false, false, Path::new("g"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            Vec::<PathBuf>::new()
+        );
+        assert_eq!(
+            tree.descendent_entries(true, false, Path::new("g"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![Path::new("g"), Path::new("g/h"),]
+        );
+
+        assert_eq!(
+            tree.descendent_entries(false, false, Path::new("i"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            Vec::<PathBuf>::new()
+        );
+        assert_eq!(
+            tree.descendent_entries(false, true, Path::new("i"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![Path::new("i/j/k")]
+        );
+        assert_eq!(
+            tree.descendent_entries(true, false, Path::new("i"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![Path::new("i"), Path::new("i/l"),]
+        );
+    })
+}
+
+#[gpui::test(iterations = 10)]
+async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
+    let fs = FakeFs::new(cx.background());
+    fs.insert_tree(
+        "/root",
+        json!({
+            "lib": {
+                "a": {
+                    "a.txt": ""
+                },
+                "b": {
+                    "b.txt": ""
+                }
+            }
+        }),
+    )
+    .await;
+    fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
+    fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
+
+    let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
+    let tree = Worktree::local(
+        client,
+        Path::new("/root"),
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(false)
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![
+                Path::new(""),
+                Path::new("lib"),
+                Path::new("lib/a"),
+                Path::new("lib/a/a.txt"),
+                Path::new("lib/a/lib"),
+                Path::new("lib/b"),
+                Path::new("lib/b/b.txt"),
+                Path::new("lib/b/lib"),
+            ]
+        );
+    });
+
+    fs.rename(
+        Path::new("/root/lib/a/lib"),
+        Path::new("/root/lib/a/lib-2"),
+        Default::default(),
+    )
+    .await
+    .unwrap();
+    executor.run_until_parked();
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(false)
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![
+                Path::new(""),
+                Path::new("lib"),
+                Path::new("lib/a"),
+                Path::new("lib/a/a.txt"),
+                Path::new("lib/a/lib-2"),
+                Path::new("lib/b"),
+                Path::new("lib/b/b.txt"),
+                Path::new("lib/b/lib"),
+            ]
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
+    // .gitignores are handled explicitly by Zed and do not use the git
+    // machinery that the git_tests module checks
+    let parent_dir = temp_tree(json!({
+        ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
+        "tree": {
+            ".git": {},
+            ".gitignore": "ignored-dir\n",
+            "tracked-dir": {
+                "tracked-file1": "",
+                "ancestor-ignored-file1": "",
+            },
+            "ignored-dir": {
+                "ignored-file1": ""
+            }
+        }
+    }));
+    let dir = parent_dir.path().join("tree");
+
+    let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
+
+    let tree = Worktree::local(
+        client,
+        dir.as_path(),
+        true,
+        Arc::new(RealFs),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+    tree.flush_fs_events(cx).await;
+    cx.read(|cx| {
+        let tree = tree.read(cx);
+        assert!(
+            !tree
+                .entry_for_path("tracked-dir/tracked-file1")
+                .unwrap()
+                .is_ignored
+        );
+        assert!(
+            tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
+                .unwrap()
+                .is_ignored
+        );
+        assert!(
+            tree.entry_for_path("ignored-dir/ignored-file1")
+                .unwrap()
+                .is_ignored
+        );
+    });
+
+    std::fs::write(dir.join("tracked-dir/tracked-file2"), "").unwrap();
+    std::fs::write(dir.join("tracked-dir/ancestor-ignored-file2"), "").unwrap();
+    std::fs::write(dir.join("ignored-dir/ignored-file2"), "").unwrap();
+    tree.flush_fs_events(cx).await;
+    cx.read(|cx| {
+        let tree = tree.read(cx);
+        assert!(
+            !tree
+                .entry_for_path("tracked-dir/tracked-file2")
+                .unwrap()
+                .is_ignored
+        );
+        assert!(
+            tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
+                .unwrap()
+                .is_ignored
+        );
+        assert!(
+            tree.entry_for_path("ignored-dir/ignored-file2")
+                .unwrap()
+                .is_ignored
+        );
+        assert!(tree.entry_for_path(".git").unwrap().is_ignored);
+    });
+}
+
+#[gpui::test]
+async fn test_write_file(cx: &mut TestAppContext) {
+    let dir = temp_tree(json!({
+        ".git": {},
+        ".gitignore": "ignored-dir\n",
+        "tracked-dir": {},
+        "ignored-dir": {}
+    }));
+
+    let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
+
+    let tree = Worktree::local(
+        client,
+        dir.path(),
+        true,
+        Arc::new(RealFs),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+    tree.flush_fs_events(cx).await;
+
+    tree.update(cx, |tree, cx| {
+        tree.as_local().unwrap().write_file(
+            Path::new("tracked-dir/file.txt"),
+            "hello".into(),
+            Default::default(),
+            cx,
+        )
+    })
+    .await
+    .unwrap();
+    tree.update(cx, |tree, cx| {
+        tree.as_local().unwrap().write_file(
+            Path::new("ignored-dir/file.txt"),
+            "world".into(),
+            Default::default(),
+            cx,
+        )
+    })
+    .await
+    .unwrap();
+
+    tree.read_with(cx, |tree, _| {
+        let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
+        let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
+        assert!(!tracked.is_ignored);
+        assert!(ignored.is_ignored);
+    });
+}
+
+#[gpui::test(iterations = 30)]
+async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
+    let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
+
+    let fs = FakeFs::new(cx.background());
+    fs.insert_tree(
+        "/root",
+        json!({
+            "b": {},
+            "c": {},
+            "d": {},
+        }),
+    )
+    .await;
+
+    let tree = Worktree::local(
+        client,
+        "/root".as_ref(),
+        true,
+        fs,
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    let snapshot1 = tree.update(cx, |tree, cx| {
+        let tree = tree.as_local_mut().unwrap();
+        let snapshot = Arc::new(Mutex::new(tree.snapshot()));
+        let _ = tree.observe_updates(0, cx, {
+            let snapshot = snapshot.clone();
+            move |update| {
+                snapshot.lock().apply_remote_update(update).unwrap();
+                async { true }
+            }
+        });
+        snapshot
+    });
+
+    let entry = tree
+        .update(cx, |tree, cx| {
+            tree.as_local_mut()
+                .unwrap()
+                .create_entry("a/e".as_ref(), true, cx)
+        })
+        .await
+        .unwrap();
+    assert!(entry.is_dir());
+
+    cx.foreground().run_until_parked();
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
+    });
+
+    let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
+    assert_eq!(
+        snapshot1.lock().entries(true).collect::<Vec<_>>(),
+        snapshot2.entries(true).collect::<Vec<_>>()
+    );
+}
+
+#[gpui::test(iterations = 100)]
+async fn test_random_worktree_operations_during_initial_scan(
+    cx: &mut TestAppContext,
+    mut rng: StdRng,
+) {
+    let operations = env::var("OPERATIONS")
+        .map(|o| o.parse().unwrap())
+        .unwrap_or(5);
+    let initial_entries = env::var("INITIAL_ENTRIES")
+        .map(|o| o.parse().unwrap())
+        .unwrap_or(20);
+
+    let root_dir = Path::new("/test");
+    let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
+    fs.as_fake().insert_tree(root_dir, json!({})).await;
+    for _ in 0..initial_entries {
+        randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
+    }
+    log::info!("generated initial tree");
+
+    let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
+    let worktree = Worktree::local(
+        client.clone(),
+        root_dir,
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
+    let updates = Arc::new(Mutex::new(Vec::new()));
+    worktree.update(cx, |tree, cx| {
+        check_worktree_change_events(tree, cx);
+
+        let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
+            let updates = updates.clone();
+            move |update| {
+                updates.lock().push(update);
+                async { true }
+            }
+        });
+    });
+
+    for _ in 0..operations {
+        worktree
+            .update(cx, |worktree, cx| {
+                randomly_mutate_worktree(worktree, &mut rng, cx)
+            })
+            .await
+            .log_err();
+        worktree.read_with(cx, |tree, _| {
+            tree.as_local().unwrap().snapshot().check_invariants()
+        });
+
+        if rng.gen_bool(0.6) {
+            snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
+        }
+    }
+
+    worktree
+        .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
+        .await;
+
+    cx.foreground().run_until_parked();
+
+    let final_snapshot = worktree.read_with(cx, |tree, _| {
+        let tree = tree.as_local().unwrap();
+        let snapshot = tree.snapshot();
+        snapshot.check_invariants();
+        snapshot
+    });
+
+    for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
+        let mut updated_snapshot = snapshot.clone();
+        for update in updates.lock().iter() {
+            if update.scan_id >= updated_snapshot.scan_id() as u64 {
+                updated_snapshot
+                    .apply_remote_update(update.clone())
+                    .unwrap();
+            }
+        }
+
+        assert_eq!(
+            updated_snapshot.entries(true).collect::<Vec<_>>(),
+            final_snapshot.entries(true).collect::<Vec<_>>(),
+            "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
+        );
+    }
+}
+
+#[gpui::test(iterations = 100)]
+async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
+    let operations = env::var("OPERATIONS")
+        .map(|o| o.parse().unwrap())
+        .unwrap_or(40);
+    let initial_entries = env::var("INITIAL_ENTRIES")
+        .map(|o| o.parse().unwrap())
+        .unwrap_or(20);
+
+    let root_dir = Path::new("/test");
+    let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
+    fs.as_fake().insert_tree(root_dir, json!({})).await;
+    for _ in 0..initial_entries {
+        randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
+    }
+    log::info!("generated initial tree");
+
+    let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
+    let worktree = Worktree::local(
+        client.clone(),
+        root_dir,
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    let updates = Arc::new(Mutex::new(Vec::new()));
+    worktree.update(cx, |tree, cx| {
+        check_worktree_change_events(tree, cx);
+
+        let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
+            let updates = updates.clone();
+            move |update| {
+                updates.lock().push(update);
+                async { true }
+            }
+        });
+    });
+
+    worktree
+        .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
+        .await;
+
+    fs.as_fake().pause_events();
+    let mut snapshots = Vec::new();
+    let mut mutations_len = operations;
+    while mutations_len > 1 {
+        if rng.gen_bool(0.2) {
+            worktree
+                .update(cx, |worktree, cx| {
+                    randomly_mutate_worktree(worktree, &mut rng, cx)
+                })
+                .await
+                .log_err();
+        } else {
+            randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
+        }
+
+        let buffered_event_count = fs.as_fake().buffered_event_count();
+        if buffered_event_count > 0 && rng.gen_bool(0.3) {
+            let len = rng.gen_range(0..=buffered_event_count);
+            log::info!("flushing {} events", len);
+            fs.as_fake().flush_events(len);
+        } else {
+            randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
+            mutations_len -= 1;
+        }
+
+        cx.foreground().run_until_parked();
+        if rng.gen_bool(0.2) {
+            log::info!("storing snapshot {}", snapshots.len());
+            let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
+            snapshots.push(snapshot);
+        }
+    }
+
+    log::info!("quiescing");
+    fs.as_fake().flush_events(usize::MAX);
+    cx.foreground().run_until_parked();
+    let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
+    snapshot.check_invariants();
+
+    {
+        let new_worktree = Worktree::local(
+            client.clone(),
+            root_dir,
+            true,
+            fs.clone(),
+            Default::default(),
+            &mut cx.to_async(),
+        )
+        .await
+        .unwrap();
+        new_worktree
+            .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
+            .await;
+        let new_snapshot =
+            new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
+        assert_eq!(
+            snapshot.entries_without_ids(true),
+            new_snapshot.entries_without_ids(true)
+        );
+    }
+
+    for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
+        for update in updates.lock().iter() {
+            if update.scan_id >= prev_snapshot.scan_id() as u64 {
+                prev_snapshot.apply_remote_update(update.clone()).unwrap();
+            }
+        }
+
+        assert_eq!(
+            prev_snapshot.entries(true).collect::<Vec<_>>(),
+            snapshot.entries(true).collect::<Vec<_>>(),
+            "wrong updates after snapshot {i}: {updates:#?}",
+        );
+    }
+}
+
+// The worktree's `UpdatedEntries` event can be used to follow along with
+// all changes to the worktree's snapshot.
+fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
+    let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
+    cx.subscribe(&cx.handle(), move |tree, _, event, _| {
+        if let Event::UpdatedEntries(changes) = event {
+            for (path, _, change_type) in changes.iter() {
+                let entry = tree.entry_for_path(&path).cloned();
+                let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
+                    Ok(ix) | Err(ix) => ix,
+                };
+                match change_type {
+                    PathChange::Loaded => entries.insert(ix, entry.unwrap()),
+                    PathChange::Added => entries.insert(ix, entry.unwrap()),
+                    PathChange::Removed => drop(entries.remove(ix)),
+                    PathChange::Updated => {
+                        let entry = entry.unwrap();
+                        let existing_entry = entries.get_mut(ix).unwrap();
+                        assert_eq!(existing_entry.path, entry.path);
+                        *existing_entry = entry;
+                    }
+                    PathChange::AddedOrUpdated => {
+                        let entry = entry.unwrap();
+                        if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
+                            *entries.get_mut(ix).unwrap() = entry;
+                        } else {
+                            entries.insert(ix, entry);
+                        }
+                    }
+                }
+            }
+
+            let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
+            assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
+        }
+    })
+    .detach();
+}
+
+fn randomly_mutate_worktree(
+    worktree: &mut Worktree,
+    rng: &mut impl Rng,
+    cx: &mut ModelContext<Worktree>,
+) -> Task<Result<()>> {
+    log::info!("mutating worktree");
+    let worktree = worktree.as_local_mut().unwrap();
+    let snapshot = worktree.snapshot();
+    let entry = snapshot.entries(false).choose(rng).unwrap();
+
+    match rng.gen_range(0_u32..100) {
+        0..=33 if entry.path.as_ref() != Path::new("") => {
+            log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
+            worktree.delete_entry(entry.id, cx).unwrap()
+        }
+        ..=66 if entry.path.as_ref() != Path::new("") => {
+            let other_entry = snapshot.entries(false).choose(rng).unwrap();
+            let new_parent_path = if other_entry.is_dir() {
+                other_entry.path.clone()
+            } else {
+                other_entry.path.parent().unwrap().into()
+            };
+            let mut new_path = new_parent_path.join(random_filename(rng));
+            if new_path.starts_with(&entry.path) {
+                new_path = random_filename(rng).into();
+            }
+
+            log::info!(
+                "renaming entry {:?} ({}) to {:?}",
+                entry.path,
+                entry.id.0,
+                new_path
+            );
+            let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
+            cx.foreground().spawn(async move {
+                task.await?;
+                Ok(())
+            })
+        }
+        _ => {
+            let task = if entry.is_dir() {
+                let child_path = entry.path.join(random_filename(rng));
+                let is_dir = rng.gen_bool(0.3);
+                log::info!(
+                    "creating {} at {:?}",
+                    if is_dir { "dir" } else { "file" },
+                    child_path,
+                );
+                worktree.create_entry(child_path, is_dir, cx)
+            } else {
+                log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
+                worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
+            };
+            cx.foreground().spawn(async move {
+                task.await?;
+                Ok(())
+            })
+        }
+    }
+}
+
+async fn randomly_mutate_fs(
+    fs: &Arc<dyn Fs>,
+    root_path: &Path,
+    insertion_probability: f64,
+    rng: &mut impl Rng,
+) {
+    log::info!("mutating fs");
+    let mut files = Vec::new();
+    let mut dirs = Vec::new();
+    for path in fs.as_fake().paths(false) {
+        if path.starts_with(root_path) {
+            if fs.is_file(&path).await {
+                files.push(path);
+            } else {
+                dirs.push(path);
+            }
+        }
+    }
+
+    if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
+        let path = dirs.choose(rng).unwrap();
+        let new_path = path.join(random_filename(rng));
+
+        if rng.gen() {
+            log::info!(
+                "creating dir {:?}",
+                new_path.strip_prefix(root_path).unwrap()
+            );
+            fs.create_dir(&new_path).await.unwrap();
+        } else {
+            log::info!(
+                "creating file {:?}",
+                new_path.strip_prefix(root_path).unwrap()
+            );
+            fs.create_file(&new_path, Default::default()).await.unwrap();
+        }
+    } else if rng.gen_bool(0.05) {
+        let ignore_dir_path = dirs.choose(rng).unwrap();
+        let ignore_path = ignore_dir_path.join(&*GITIGNORE);
+
+        let subdirs = dirs
+            .iter()
+            .filter(|d| d.starts_with(&ignore_dir_path))
+            .cloned()
+            .collect::<Vec<_>>();
+        let subfiles = files
+            .iter()
+            .filter(|d| d.starts_with(&ignore_dir_path))
+            .cloned()
+            .collect::<Vec<_>>();
+        let files_to_ignore = {
+            let len = rng.gen_range(0..=subfiles.len());
+            subfiles.choose_multiple(rng, len)
+        };
+        let dirs_to_ignore = {
+            let len = rng.gen_range(0..subdirs.len());
+            subdirs.choose_multiple(rng, len)
+        };
+
+        let mut ignore_contents = String::new();
+        for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
+            writeln!(
+                ignore_contents,
+                "{}",
+                path_to_ignore
+                    .strip_prefix(&ignore_dir_path)
+                    .unwrap()
+                    .to_str()
+                    .unwrap()
+            )
+            .unwrap();
+        }
+        log::info!(
+            "creating gitignore {:?} with contents:\n{}",
+            ignore_path.strip_prefix(&root_path).unwrap(),
+            ignore_contents
+        );
+        fs.save(
+            &ignore_path,
+            &ignore_contents.as_str().into(),
+            Default::default(),
+        )
+        .await
+        .unwrap();
+    } else {
+        let old_path = {
+            let file_path = files.choose(rng);
+            let dir_path = dirs[1..].choose(rng);
+            file_path.into_iter().chain(dir_path).choose(rng).unwrap()
+        };
+
+        let is_rename = rng.gen();
+        if is_rename {
+            let new_path_parent = dirs
+                .iter()
+                .filter(|d| !d.starts_with(old_path))
+                .choose(rng)
+                .unwrap();
+
+            let overwrite_existing_dir =
+                !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
+            let new_path = if overwrite_existing_dir {
+                fs.remove_dir(
+                    &new_path_parent,
+                    RemoveOptions {
+                        recursive: true,
+                        ignore_if_not_exists: true,
+                    },
+                )
+                .await
+                .unwrap();
+                new_path_parent.to_path_buf()
+            } else {
+                new_path_parent.join(random_filename(rng))
+            };
+
+            log::info!(
+                "renaming {:?} to {}{:?}",
+                old_path.strip_prefix(&root_path).unwrap(),
+                if overwrite_existing_dir {
+                    "overwrite "
+                } else {
+                    ""
+                },
+                new_path.strip_prefix(&root_path).unwrap()
+            );
+            fs.rename(
+                &old_path,
+                &new_path,
+                fs::RenameOptions {
+                    overwrite: true,
+                    ignore_if_exists: true,
+                },
+            )
+            .await
+            .unwrap();
+        } else if fs.is_file(&old_path).await {
+            log::info!(
+                "deleting file {:?}",
+                old_path.strip_prefix(&root_path).unwrap()
+            );
+            fs.remove_file(old_path, Default::default()).await.unwrap();
+        } else {
+            log::info!(
+                "deleting dir {:?}",
+                old_path.strip_prefix(&root_path).unwrap()
+            );
+            fs.remove_dir(
+                &old_path,
+                RemoveOptions {
+                    recursive: true,
+                    ignore_if_not_exists: true,
+                },
+            )
+            .await
+            .unwrap();
+        }
+    }
+}
+
+fn random_filename(rng: &mut impl Rng) -> String {
+    (0..6)
+        .map(|_| rng.sample(rand::distributions::Alphanumeric))
+        .map(char::from)
+        .collect()
+}
+
+#[gpui::test]
+async fn test_rename_work_directory(cx: &mut TestAppContext) {
+    let root = temp_tree(json!({
+        "projects": {
+            "project1": {
+                "a": "",
+                "b": "",
+            }
+        },
+
+    }));
+    let root_path = root.path();
+
+    let http_client = FakeHttpClient::with_404_response();
+    let client = cx.read(|cx| Client::new(http_client, cx));
+    let tree = Worktree::local(
+        client,
+        root_path,
+        true,
+        Arc::new(RealFs),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    let repo = git_init(&root_path.join("projects/project1"));
+    git_add("a", &repo);
+    git_commit("init", &repo);
+    std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
+
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    tree.flush_fs_events(cx).await;
+
+    cx.read(|cx| {
+        let tree = tree.read(cx);
+        let (work_dir, _) = tree.repositories().next().unwrap();
+        assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
+        assert_eq!(
+            tree.status_for_file(Path::new("projects/project1/a")),
+            Some(GitFileStatus::Modified)
+        );
+        assert_eq!(
+            tree.status_for_file(Path::new("projects/project1/b")),
+            Some(GitFileStatus::Added)
+        );
+    });
+
+    std::fs::rename(
+        root_path.join("projects/project1"),
+        root_path.join("projects/project2"),
+    )
+    .ok();
+    tree.flush_fs_events(cx).await;
+
+    cx.read(|cx| {
+        let tree = tree.read(cx);
+        let (work_dir, _) = tree.repositories().next().unwrap();
+        assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
+        assert_eq!(
+            tree.status_for_file(Path::new("projects/project2/a")),
+            Some(GitFileStatus::Modified)
+        );
+        assert_eq!(
+            tree.status_for_file(Path::new("projects/project2/b")),
+            Some(GitFileStatus::Added)
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_git_repository_for_path(cx: &mut TestAppContext) {
+    let root = temp_tree(json!({
+        "c.txt": "",
+        "dir1": {
+            ".git": {},
+            "deps": {
+                "dep1": {
+                    ".git": {},
+                    "src": {
+                        "a.txt": ""
+                    }
+                }
+            },
+            "src": {
+                "b.txt": ""
+            }
+        },
+    }));
+
+    let http_client = FakeHttpClient::with_404_response();
+    let client = cx.read(|cx| Client::new(http_client, cx));
+    let tree = Worktree::local(
+        client,
+        root.path(),
+        true,
+        Arc::new(RealFs),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+    tree.flush_fs_events(cx).await;
+
+    tree.read_with(cx, |tree, _cx| {
+        let tree = tree.as_local().unwrap();
+
+        assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
+
+        let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
+        assert_eq!(
+            entry
+                .work_directory(tree)
+                .map(|directory| directory.as_ref().to_owned()),
+            Some(Path::new("dir1").to_owned())
+        );
+
+        let entry = tree
+            .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
+            .unwrap();
+        assert_eq!(
+            entry
+                .work_directory(tree)
+                .map(|directory| directory.as_ref().to_owned()),
+            Some(Path::new("dir1/deps/dep1").to_owned())
+        );
+
+        let entries = tree.files(false, 0);
+
+        let paths_with_repos = tree
+            .entries_with_repositories(entries)
+            .map(|(entry, repo)| {
+                (
+                    entry.path.as_ref(),
+                    repo.and_then(|repo| {
+                        repo.work_directory(&tree)
+                            .map(|work_directory| work_directory.0.to_path_buf())
+                    }),
+                )
+            })
+            .collect::<Vec<_>>();
+
+        assert_eq!(
+            paths_with_repos,
+            &[
+                (Path::new("c.txt"), None),
+                (
+                    Path::new("dir1/deps/dep1/src/a.txt"),
+                    Some(Path::new("dir1/deps/dep1").into())
+                ),
+                (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
+            ]
+        );
+    });
+
+    let repo_update_events = Arc::new(Mutex::new(vec![]));
+    tree.update(cx, |_, cx| {
+        let repo_update_events = repo_update_events.clone();
+        cx.subscribe(&tree, move |_, _, event, _| {
+            if let Event::UpdatedGitRepositories(update) = event {
+                repo_update_events.lock().push(update.clone());
+            }
+        })
+        .detach();
+    });
+
+    std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
+    tree.flush_fs_events(cx).await;
+
+    assert_eq!(
+        repo_update_events.lock()[0]
+            .iter()
+            .map(|e| e.0.clone())
+            .collect::<Vec<Arc<Path>>>(),
+        vec![Path::new("dir1").into()]
+    );
+
+    std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
+    tree.flush_fs_events(cx).await;
+
+    tree.read_with(cx, |tree, _cx| {
+        let tree = tree.as_local().unwrap();
+
+        assert!(tree
+            .repository_for_path("dir1/src/b.txt".as_ref())
+            .is_none());
+    });
+}
+
+#[gpui::test]
+async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
+    const IGNORE_RULE: &'static str = "**/target";
+
+    let root = temp_tree(json!({
+        "project": {
+            "a.txt": "a",
+            "b.txt": "bb",
+            "c": {
+                "d": {
+                    "e.txt": "eee"
+                }
+            },
+            "f.txt": "ffff",
+            "target": {
+                "build_file": "???"
+            },
+            ".gitignore": IGNORE_RULE
+        },
+
+    }));
+
+    let http_client = FakeHttpClient::with_404_response();
+    let client = cx.read(|cx| Client::new(http_client, cx));
+    let tree = Worktree::local(
+        client,
+        root.path(),
+        true,
+        Arc::new(RealFs),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    const A_TXT: &'static str = "a.txt";
+    const B_TXT: &'static str = "b.txt";
+    const E_TXT: &'static str = "c/d/e.txt";
+    const F_TXT: &'static str = "f.txt";
+    const DOTGITIGNORE: &'static str = ".gitignore";
+    const BUILD_FILE: &'static str = "target/build_file";
+    let project_path: &Path = &Path::new("project");
+
+    let work_dir = root.path().join("project");
+    let mut repo = git_init(work_dir.as_path());
+    repo.add_ignore_rule(IGNORE_RULE).unwrap();
+    git_add(Path::new(A_TXT), &repo);
+    git_add(Path::new(E_TXT), &repo);
+    git_add(Path::new(DOTGITIGNORE), &repo);
+    git_commit("Initial commit", &repo);
+
+    tree.flush_fs_events(cx).await;
+    deterministic.run_until_parked();
+
+    // Check that the right git state is observed on startup
+    tree.read_with(cx, |tree, _cx| {
+        let snapshot = tree.snapshot();
+        assert_eq!(snapshot.repositories().count(), 1);
+        let (dir, _) = snapshot.repositories().next().unwrap();
+        assert_eq!(dir.as_ref(), Path::new("project"));
+
+        assert_eq!(
+            snapshot.status_for_file(project_path.join(B_TXT)),
+            Some(GitFileStatus::Added)
+        );
+        assert_eq!(
+            snapshot.status_for_file(project_path.join(F_TXT)),
+            Some(GitFileStatus::Added)
+        );
+    });
+
+    std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
+
+    tree.flush_fs_events(cx).await;
+    deterministic.run_until_parked();
+
+    tree.read_with(cx, |tree, _cx| {
+        let snapshot = tree.snapshot();
+
+        assert_eq!(
+            snapshot.status_for_file(project_path.join(A_TXT)),
+            Some(GitFileStatus::Modified)
+        );
+    });
+
+    git_add(Path::new(A_TXT), &repo);
+    git_add(Path::new(B_TXT), &repo);
+    git_commit("Committing modified and added", &repo);
+    tree.flush_fs_events(cx).await;
+    deterministic.run_until_parked();
+
+    // Check that repo only changes are tracked
+    tree.read_with(cx, |tree, _cx| {
+        let snapshot = tree.snapshot();
+
+        assert_eq!(
+            snapshot.status_for_file(project_path.join(F_TXT)),
+            Some(GitFileStatus::Added)
+        );
+
+        assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
+        assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
+    });
+
+    git_reset(0, &repo);
+    git_remove_index(Path::new(B_TXT), &repo);
+    git_stash(&mut repo);
+    std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
+    std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
+    tree.flush_fs_events(cx).await;
+    deterministic.run_until_parked();
+
+    // Check that more complex repo changes are tracked
+    tree.read_with(cx, |tree, _cx| {
+        let snapshot = tree.snapshot();
+
+        assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
+        assert_eq!(
+            snapshot.status_for_file(project_path.join(B_TXT)),
+            Some(GitFileStatus::Added)
+        );
+        assert_eq!(
+            snapshot.status_for_file(project_path.join(E_TXT)),
+            Some(GitFileStatus::Modified)
+        );
+    });
+
+    std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
+    std::fs::remove_dir_all(work_dir.join("c")).unwrap();
+    std::fs::write(
+        work_dir.join(DOTGITIGNORE),
+        [IGNORE_RULE, "f.txt"].join("\n"),
+    )
+    .unwrap();
+
+    git_add(Path::new(DOTGITIGNORE), &repo);
+    git_commit("Committing modified git ignore", &repo);
+
+    tree.flush_fs_events(cx).await;
+    deterministic.run_until_parked();
+
+    let mut renamed_dir_name = "first_directory/second_directory";
+    const RENAMED_FILE: &'static str = "rf.txt";
+
+    std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
+    std::fs::write(
+        work_dir.join(renamed_dir_name).join(RENAMED_FILE),
+        "new-contents",
+    )
+    .unwrap();
+
+    tree.flush_fs_events(cx).await;
+    deterministic.run_until_parked();
+
+    tree.read_with(cx, |tree, _cx| {
+        let snapshot = tree.snapshot();
+        assert_eq!(
+            snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
+            Some(GitFileStatus::Added)
+        );
+    });
+
+    renamed_dir_name = "new_first_directory/second_directory";
+
+    std::fs::rename(
+        work_dir.join("first_directory"),
+        work_dir.join("new_first_directory"),
+    )
+    .unwrap();
+
+    tree.flush_fs_events(cx).await;
+    deterministic.run_until_parked();
+
+    tree.read_with(cx, |tree, _cx| {
+        let snapshot = tree.snapshot();
+
+        assert_eq!(
+            snapshot.status_for_file(
+                project_path
+                    .join(Path::new(renamed_dir_name))
+                    .join(RENAMED_FILE)
+            ),
+            Some(GitFileStatus::Added)
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
+    let fs = FakeFs::new(cx.background());
+    fs.insert_tree(
+        "/root",
+        json!({
+            ".git": {},
+            "a": {
+                "b": {
+                    "c1.txt": "",
+                    "c2.txt": "",
+                },
+                "d": {
+                    "e1.txt": "",
+                    "e2.txt": "",
+                    "e3.txt": "",
+                }
+            },
+            "f": {
+                "no-status.txt": ""
+            },
+            "g": {
+                "h1.txt": "",
+                "h2.txt": ""
+            },
+
+        }),
+    )
+    .await;
+
+    fs.set_status_for_repo_via_git_operation(
+        &Path::new("/root/.git"),
+        &[
+            (Path::new("a/b/c1.txt"), GitFileStatus::Added),
+            (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
+            (Path::new("g/h2.txt"), GitFileStatus::Conflict),
+        ],
+    );
+
+    let http_client = FakeHttpClient::with_404_response();
+    let client = cx.read(|cx| Client::new(http_client, cx));
+    let tree = Worktree::local(
+        client,
+        Path::new("/root"),
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    cx.foreground().run_until_parked();
+    let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
+
+    check_propagated_statuses(
+        &snapshot,
+        &[
+            (Path::new(""), Some(GitFileStatus::Conflict)),
+            (Path::new("a"), Some(GitFileStatus::Modified)),
+            (Path::new("a/b"), Some(GitFileStatus::Added)),
+            (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
+            (Path::new("a/b/c2.txt"), None),
+            (Path::new("a/d"), Some(GitFileStatus::Modified)),
+            (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
+            (Path::new("f"), None),
+            (Path::new("f/no-status.txt"), None),
+            (Path::new("g"), Some(GitFileStatus::Conflict)),
+            (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
+        ],
+    );
+
+    check_propagated_statuses(
+        &snapshot,
+        &[
+            (Path::new("a/b"), Some(GitFileStatus::Added)),
+            (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
+            (Path::new("a/b/c2.txt"), None),
+            (Path::new("a/d"), Some(GitFileStatus::Modified)),
+            (Path::new("a/d/e1.txt"), None),
+            (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
+            (Path::new("f"), None),
+            (Path::new("f/no-status.txt"), None),
+            (Path::new("g"), Some(GitFileStatus::Conflict)),
+        ],
+    );
+
+    check_propagated_statuses(
+        &snapshot,
+        &[
+            (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
+            (Path::new("a/b/c2.txt"), None),
+            (Path::new("a/d/e1.txt"), None),
+            (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
+            (Path::new("f/no-status.txt"), None),
+        ],
+    );
+
+    #[track_caller]
+    fn check_propagated_statuses(
+        snapshot: &Snapshot,
+        expected_statuses: &[(&Path, Option<GitFileStatus>)],
+    ) {
+        let mut entries = expected_statuses
+            .iter()
+            .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
+            .collect::<Vec<_>>();
+        snapshot.propagate_git_statuses(&mut entries);
+        assert_eq!(
+            entries
+                .iter()
+                .map(|e| (e.path.as_ref(), e.git_status))
+                .collect::<Vec<_>>(),
+            expected_statuses
+        );
+    }
+}
+
+#[track_caller]
+fn git_init(path: &Path) -> git2::Repository {
+    git2::Repository::init(path).expect("Failed to initialize git repository")
+}
+
+#[track_caller]
+fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
+    let path = path.as_ref();
+    let mut index = repo.index().expect("Failed to get index");
+    index.add_path(path).expect("Failed to add a.txt");
+    index.write().expect("Failed to write index");
+}
+
+#[track_caller]
+fn git_remove_index(path: &Path, repo: &git2::Repository) {
+    let mut index = repo.index().expect("Failed to get index");
+    index.remove_path(path).expect("Failed to add a.txt");
+    index.write().expect("Failed to write index");
+}
+
+#[track_caller]
+fn git_commit(msg: &'static str, repo: &git2::Repository) {
+    use git2::Signature;
+
+    let signature = Signature::now("test", "test@zed.dev").unwrap();
+    let oid = repo.index().unwrap().write_tree().unwrap();
+    let tree = repo.find_tree(oid).unwrap();
+    if let Some(head) = repo.head().ok() {
+        let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
+
+        let parent_commit = parent_obj.as_commit().unwrap();
+
+        repo.commit(
+            Some("HEAD"),
+            &signature,
+            &signature,
+            msg,
+            &tree,
+            &[parent_commit],
+        )
+        .expect("Failed to commit with parent");
+    } else {
+        repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
+            .expect("Failed to commit");
+    }
+}
+
+#[track_caller]
+fn git_stash(repo: &mut git2::Repository) {
+    use git2::Signature;
+
+    let signature = Signature::now("test", "test@zed.dev").unwrap();
+    repo.stash_save(&signature, "N/A", None)
+        .expect("Failed to stash");
+}
+
+#[track_caller]
+fn git_reset(offset: usize, repo: &git2::Repository) {
+    let head = repo.head().expect("Couldn't get repo head");
+    let object = head.peel(git2::ObjectType::Commit).unwrap();
+    let commit = object.as_commit().unwrap();
+    let new_head = commit
+        .parents()
+        .inspect(|parnet| {
+            parnet.message();
+        })
+        .skip(offset)
+        .next()
+        .expect("Not enough history");
+    repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
+        .expect("Could not reset");
+}
+
+#[allow(dead_code)]
+#[track_caller]
+fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
+    repo.statuses(None)
+        .unwrap()
+        .iter()
+        .map(|status| (status.path().unwrap().to_string(), status.status()))
+        .collect()
+}

crates/settings/Cargo.toml ๐Ÿ”—

@@ -31,7 +31,7 @@ serde_derive.workspace = true
 serde_json.workspace = true
 smallvec.workspace = true
 toml.workspace = true
-tree-sitter = "*"
+tree-sitter.workspace = true
 tree-sitter-json = "*"
 
 [dev-dependencies]

crates/settings/src/settings_store.rs ๐Ÿ”—

@@ -86,9 +86,9 @@ pub struct SettingsJsonSchemaParams<'a> {
 /// A set of strongly-typed setting values defined via multiple JSON files.
 pub struct SettingsStore {
     setting_values: HashMap<TypeId, Box<dyn AnySettingValue>>,
-    default_deserialized_settings: serde_json::Value,
-    user_deserialized_settings: serde_json::Value,
-    local_deserialized_settings: BTreeMap<(usize, Arc<Path>), serde_json::Value>,
+    raw_default_settings: serde_json::Value,
+    raw_user_settings: serde_json::Value,
+    raw_local_settings: BTreeMap<(usize, Arc<Path>), serde_json::Value>,
     tab_size_callback: Option<(TypeId, Box<dyn Fn(&dyn Any) -> Option<usize>>)>,
 }
 
@@ -96,9 +96,9 @@ impl Default for SettingsStore {
     fn default() -> Self {
         SettingsStore {
             setting_values: Default::default(),
-            default_deserialized_settings: serde_json::json!({}),
-            user_deserialized_settings: serde_json::json!({}),
-            local_deserialized_settings: Default::default(),
+            raw_default_settings: serde_json::json!({}),
+            raw_user_settings: serde_json::json!({}),
+            raw_local_settings: Default::default(),
             tab_size_callback: Default::default(),
         }
     }
@@ -148,13 +148,13 @@ impl SettingsStore {
         }));
 
         if let Some(default_settings) = setting_value
-            .deserialize_setting(&self.default_deserialized_settings)
+            .deserialize_setting(&self.raw_default_settings)
             .log_err()
         {
             let mut user_values_stack = Vec::new();
 
             if let Some(user_settings) = setting_value
-                .deserialize_setting(&self.user_deserialized_settings)
+                .deserialize_setting(&self.raw_user_settings)
                 .log_err()
             {
                 user_values_stack = vec![user_settings];
@@ -196,8 +196,8 @@ impl SettingsStore {
     ///
     /// This is only for debugging and reporting. For user-facing functionality,
     /// use the typed setting interface.
-    pub fn untyped_user_settings(&self) -> &serde_json::Value {
-        &self.user_deserialized_settings
+    pub fn raw_user_settings(&self) -> &serde_json::Value {
+        &self.raw_user_settings
     }
 
     #[cfg(any(test, feature = "test-support"))]
@@ -219,7 +219,7 @@ impl SettingsStore {
         cx: &AppContext,
         update: impl FnOnce(&mut T::FileContent),
     ) {
-        let old_text = serde_json::to_string(&self.user_deserialized_settings).unwrap();
+        let old_text = serde_json::to_string(&self.raw_user_settings).unwrap();
         let new_text = self.new_text_for_update::<T>(old_text, update);
         self.set_user_settings(&new_text, cx).unwrap();
     }
@@ -248,25 +248,19 @@ impl SettingsStore {
     ) -> Vec<(Range<usize>, String)> {
         let setting_type_id = TypeId::of::<T>();
 
-        let old_content = self
+        let setting = self
             .setting_values
             .get(&setting_type_id)
-            .unwrap_or_else(|| panic!("unregistered setting type {}", type_name::<T>()))
-            .deserialize_setting(&self.user_deserialized_settings)
-            .unwrap_or_else(|e| {
-                panic!(
-                    "could not deserialize setting type {} from user settings: {}",
-                    type_name::<T>(),
-                    e
-                )
-            })
-            .0
-            .downcast::<T::FileContent>()
-            .unwrap();
+            .unwrap_or_else(|| panic!("unregistered setting type {}", type_name::<T>()));
+        let raw_settings = parse_json_with_comments::<serde_json::Value>(text).unwrap_or_default();
+        let old_content = match setting.deserialize_setting(&raw_settings) {
+            Ok(content) => content.0.downcast::<T::FileContent>().unwrap(),
+            Err(_) => Box::new(T::FileContent::default()),
+        };
         let mut new_content = old_content.clone();
         update(&mut new_content);
 
-        let old_value = &serde_json::to_value(&old_content).unwrap();
+        let old_value = serde_json::to_value(&old_content).unwrap();
         let new_value = serde_json::to_value(new_content).unwrap();
 
         let mut key_path = Vec::new();
@@ -323,7 +317,7 @@ impl SettingsStore {
     ) -> Result<()> {
         let settings: serde_json::Value = parse_json_with_comments(default_settings_content)?;
         if settings.is_object() {
-            self.default_deserialized_settings = settings;
+            self.raw_default_settings = settings;
             self.recompute_values(None, cx)?;
             Ok(())
         } else {
@@ -339,7 +333,7 @@ impl SettingsStore {
     ) -> Result<()> {
         let settings: serde_json::Value = parse_json_with_comments(user_settings_content)?;
         if settings.is_object() {
-            self.user_deserialized_settings = settings;
+            self.raw_user_settings = settings;
             self.recompute_values(None, cx)?;
             Ok(())
         } else {
@@ -356,11 +350,10 @@ impl SettingsStore {
         cx: &AppContext,
     ) -> Result<()> {
         if let Some(content) = settings_content {
-            self.local_deserialized_settings
+            self.raw_local_settings
                 .insert((root_id, path.clone()), parse_json_with_comments(content)?);
         } else {
-            self.local_deserialized_settings
-                .remove(&(root_id, path.clone()));
+            self.raw_local_settings.remove(&(root_id, path.clone()));
         }
         self.recompute_values(Some((root_id, &path)), cx)?;
         Ok(())
@@ -368,14 +361,13 @@ impl SettingsStore {
 
     /// Add or remove a set of local settings via a JSON string.
     pub fn clear_local_settings(&mut self, root_id: usize, cx: &AppContext) -> Result<()> {
-        self.local_deserialized_settings
-            .retain(|k, _| k.0 != root_id);
+        self.raw_local_settings.retain(|k, _| k.0 != root_id);
         self.recompute_values(Some((root_id, "".as_ref())), cx)?;
         Ok(())
     }
 
     pub fn local_settings(&self, root_id: usize) -> impl '_ + Iterator<Item = (Arc<Path>, String)> {
-        self.local_deserialized_settings
+        self.raw_local_settings
             .range((root_id, Path::new("").into())..(root_id + 1, Path::new("").into()))
             .map(|((_, path), content)| (path.clone(), serde_json::to_string(content).unwrap()))
     }
@@ -466,14 +458,13 @@ impl SettingsStore {
         let mut user_settings_stack = Vec::<DeserializedSetting>::new();
         let mut paths_stack = Vec::<Option<(usize, &Path)>>::new();
         for setting_value in self.setting_values.values_mut() {
-            let default_settings =
-                setting_value.deserialize_setting(&self.default_deserialized_settings)?;
+            let default_settings = setting_value.deserialize_setting(&self.raw_default_settings)?;
 
             user_settings_stack.clear();
             paths_stack.clear();
 
             if let Some(user_settings) = setting_value
-                .deserialize_setting(&self.user_deserialized_settings)
+                .deserialize_setting(&self.raw_user_settings)
                 .log_err()
             {
                 user_settings_stack.push(user_settings);
@@ -491,7 +482,7 @@ impl SettingsStore {
             }
 
             // Reload the local values for the setting.
-            for ((root_id, path), local_settings) in &self.local_deserialized_settings {
+            for ((root_id, path), local_settings) in &self.raw_local_settings {
                 // Build a stack of all of the local values for that setting.
                 while let Some(prev_entry) = paths_stack.last() {
                     if let Some((prev_root_id, prev_path)) = prev_entry {
@@ -542,9 +533,9 @@ impl Debug for SettingsStore {
                     .map(|value| value.setting_type_name())
                     .collect::<Vec<_>>(),
             )
-            .field("default_settings", &self.default_deserialized_settings)
-            .field("user_settings", &self.user_deserialized_settings)
-            .field("local_settings", &self.local_deserialized_settings)
+            .field("default_settings", &self.raw_default_settings)
+            .field("user_settings", &self.raw_user_settings)
+            .field("local_settings", &self.raw_local_settings)
             .finish_non_exhaustive()
     }
 }

crates/theme/src/theme.rs ๐Ÿ”—

@@ -44,7 +44,7 @@ pub struct Theme {
     pub context_menu: ContextMenu,
     pub contacts_popover: ContactsPopover,
     pub contact_list: ContactList,
-    pub lsp_log_menu: LspLogMenu,
+    pub toolbar_dropdown_menu: DropdownMenu,
     pub copilot: Copilot,
     pub contact_finder: ContactFinder,
     pub project_panel: ProjectPanel,
@@ -246,15 +246,26 @@ pub struct ContactFinder {
 }
 
 #[derive(Deserialize, Default)]
-pub struct LspLogMenu {
+pub struct DropdownMenu {
     #[serde(flatten)]
     pub container: ContainerStyle,
-    pub header: Interactive<ContainedText>,
-    pub server: ContainedText,
-    pub item: Interactive<ContainedText>,
+    pub header: Interactive<DropdownMenuItem>,
+    pub section_header: ContainedText,
+    pub item: Interactive<DropdownMenuItem>,
     pub row_height: f32,
 }
 
+#[derive(Deserialize, Default)]
+pub struct DropdownMenuItem {
+    #[serde(flatten)]
+    pub container: ContainerStyle,
+    #[serde(flatten)]
+    pub text: TextStyle,
+    pub secondary_text: Option<TextStyle>,
+    #[serde(default)]
+    pub secondary_text_spacing: f32,
+}
+
 #[derive(Clone, Deserialize, Default)]
 pub struct TabBar {
     #[serde(flatten)]

crates/zed/Cargo.toml ๐Ÿ”—

@@ -45,11 +45,11 @@ journal = { path = "../journal" }
 language = { path = "../language" }
 language_selector = { path = "../language_selector" }
 lsp = { path = "../lsp" }
-lsp_log = { path = "../lsp_log" }
+language_tools = { path = "../language_tools" }
 node_runtime = { path = "../node_runtime" }
 ai = { path = "../ai" }
 outline = { path = "../outline" }
-plugin_runtime = { path = "../plugin_runtime" }
+plugin_runtime = { path = "../plugin_runtime",optional = true }
 project = { path = "../project" }
 project_panel = { path = "../project_panel" }
 project_symbols = { path = "../project_symbols" }
@@ -102,13 +102,14 @@ tempdir.workspace = true
 thiserror.workspace = true
 tiny_http = "0.8"
 toml.workspace = true
-tree-sitter = "0.20"
+tree-sitter.workspace = true
 tree-sitter-c = "0.20.1"
 tree-sitter-cpp = "0.20.0"
 tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
 tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "4ba9dab6e2602960d95b2b625f3386c27e08084e" }
 tree-sitter-embedded-template = "0.20.0"
 tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" }
+tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "2e1348c3cf2c9323e87c2744796cf3f3868aa82a" }
 tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "40a81c01a40ac48744e0c8ccabbaba1920441199" }
 tree-sitter-rust = "0.20.3"
 tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" }

crates/zed/src/languages.rs ๐Ÿ”—

@@ -10,6 +10,7 @@ mod elixir;
 mod go;
 mod html;
 mod json;
+#[cfg(feature = "plugin_runtime")]
 mod language_plugin;
 mod lua;
 mod python;
@@ -33,110 +34,109 @@ mod yaml;
 struct LanguageDir;
 
 pub fn init(languages: Arc<LanguageRegistry>, node_runtime: Arc<NodeRuntime>) {
-    fn adapter_arc(adapter: impl LspAdapter) -> Arc<dyn LspAdapter> {
-        Arc::new(adapter)
-    }
-
-    let languages_list = [
-        (
-            "c",
-            tree_sitter_c::language(),
-            vec![adapter_arc(c::CLspAdapter)],
-        ),
-        (
-            "cpp",
-            tree_sitter_cpp::language(),
-            vec![adapter_arc(c::CLspAdapter)],
-        ),
-        ("css", tree_sitter_css::language(), vec![]),
-        (
-            "elixir",
-            tree_sitter_elixir::language(),
-            vec![adapter_arc(elixir::ElixirLspAdapter)],
-        ),
-        (
-            "go",
-            tree_sitter_go::language(),
-            vec![adapter_arc(go::GoLspAdapter)],
-        ),
-        (
-            "json",
-            tree_sitter_json::language(),
-            vec![adapter_arc(json::JsonLspAdapter::new(
-                node_runtime.clone(),
-                languages.clone(),
-            ))],
-        ),
-        ("markdown", tree_sitter_markdown::language(), vec![]),
-        (
-            "python",
-            tree_sitter_python::language(),
-            vec![adapter_arc(python::PythonLspAdapter::new(
-                node_runtime.clone(),
-            ))],
-        ),
-        (
-            "rust",
-            tree_sitter_rust::language(),
-            vec![adapter_arc(rust::RustLspAdapter)],
-        ),
-        ("toml", tree_sitter_toml::language(), vec![]),
-        (
-            "tsx",
-            tree_sitter_typescript::language_tsx(),
-            vec![
-                adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())),
-                adapter_arc(typescript::EsLintLspAdapter::new(node_runtime.clone())),
-            ],
-        ),
-        (
-            "typescript",
-            tree_sitter_typescript::language_typescript(),
-            vec![
-                adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())),
-                adapter_arc(typescript::EsLintLspAdapter::new(node_runtime.clone())),
-            ],
-        ),
-        (
-            "javascript",
-            tree_sitter_typescript::language_tsx(),
-            vec![
-                adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())),
-                adapter_arc(typescript::EsLintLspAdapter::new(node_runtime.clone())),
-            ],
-        ),
-        (
-            "html",
-            tree_sitter_html::language(),
-            vec![adapter_arc(html::HtmlLspAdapter::new(node_runtime.clone()))],
-        ),
-        (
-            "ruby",
-            tree_sitter_ruby::language(),
-            vec![adapter_arc(ruby::RubyLanguageServer)],
-        ),
-        (
-            "erb",
-            tree_sitter_embedded_template::language(),
-            vec![adapter_arc(ruby::RubyLanguageServer)],
-        ),
-        ("scheme", tree_sitter_scheme::language(), vec![]),
-        ("racket", tree_sitter_racket::language(), vec![]),
-        (
-            "lua",
-            tree_sitter_lua::language(),
-            vec![adapter_arc(lua::LuaLspAdapter)],
-        ),
-        (
-            "yaml",
-            tree_sitter_yaml::language(),
-            vec![adapter_arc(yaml::YamlLspAdapter::new(node_runtime))],
-        ),
-    ];
+    let language = |name, grammar, adapters| {
+        languages.register(name, load_config(name), grammar, adapters, load_queries)
+    };
 
-    for (name, grammar, lsp_adapters) in languages_list {
-        languages.register(name, load_config(name), grammar, lsp_adapters, load_queries);
-    }
+    language(
+        "c",
+        tree_sitter_c::language(),
+        vec![Arc::new(c::CLspAdapter) as Arc<dyn LspAdapter>],
+    );
+    language(
+        "cpp",
+        tree_sitter_cpp::language(),
+        vec![Arc::new(c::CLspAdapter)],
+    );
+    language("css", tree_sitter_css::language(), vec![]);
+    language(
+        "elixir",
+        tree_sitter_elixir::language(),
+        vec![Arc::new(elixir::ElixirLspAdapter)],
+    );
+    language(
+        "go",
+        tree_sitter_go::language(),
+        vec![Arc::new(go::GoLspAdapter)],
+    );
+    language(
+        "heex",
+        tree_sitter_heex::language(),
+        vec![Arc::new(elixir::ElixirLspAdapter)],
+    );
+    language(
+        "json",
+        tree_sitter_json::language(),
+        vec![Arc::new(json::JsonLspAdapter::new(
+            node_runtime.clone(),
+            languages.clone(),
+        ))],
+    );
+    language("markdown", tree_sitter_markdown::language(), vec![]);
+    language(
+        "python",
+        tree_sitter_python::language(),
+        vec![Arc::new(python::PythonLspAdapter::new(
+            node_runtime.clone(),
+        ))],
+    );
+    language(
+        "rust",
+        tree_sitter_rust::language(),
+        vec![Arc::new(rust::RustLspAdapter)],
+    );
+    language("toml", tree_sitter_toml::language(), vec![]);
+    language(
+        "tsx",
+        tree_sitter_typescript::language_tsx(),
+        vec![
+            Arc::new(typescript::TypeScriptLspAdapter::new(node_runtime.clone())),
+            Arc::new(typescript::EsLintLspAdapter::new(node_runtime.clone())),
+        ],
+    );
+    language(
+        "typescript",
+        tree_sitter_typescript::language_typescript(),
+        vec![
+            Arc::new(typescript::TypeScriptLspAdapter::new(node_runtime.clone())),
+            Arc::new(typescript::EsLintLspAdapter::new(node_runtime.clone())),
+        ],
+    );
+    language(
+        "javascript",
+        tree_sitter_typescript::language_tsx(),
+        vec![
+            Arc::new(typescript::TypeScriptLspAdapter::new(node_runtime.clone())),
+            Arc::new(typescript::EsLintLspAdapter::new(node_runtime.clone())),
+        ],
+    );
+    language(
+        "html",
+        tree_sitter_html::language(),
+        vec![Arc::new(html::HtmlLspAdapter::new(node_runtime.clone()))],
+    );
+    language(
+        "ruby",
+        tree_sitter_ruby::language(),
+        vec![Arc::new(ruby::RubyLanguageServer)],
+    );
+    language(
+        "erb",
+        tree_sitter_embedded_template::language(),
+        vec![Arc::new(ruby::RubyLanguageServer)],
+    );
+    language("scheme", tree_sitter_scheme::language(), vec![]);
+    language("racket", tree_sitter_racket::language(), vec![]);
+    language(
+        "lua",
+        tree_sitter_lua::language(),
+        vec![Arc::new(lua::LuaLspAdapter)],
+    );
+    language(
+        "yaml",
+        tree_sitter_yaml::language(),
+        vec![Arc::new(yaml::YamlLspAdapter::new(node_runtime))],
+    );
 }
 
 #[cfg(any(test, feature = "test-support"))]

crates/zed/src/languages/heex/highlights.scm ๐Ÿ”—

@@ -0,0 +1,54 @@
+; HEEx delimiters
+[
+  "%>"
+  "--%>"
+  "-->"
+  "/>"
+  "<!"
+  "<!--"
+  "<"
+  "<%!--"
+  "<%"
+  "<%#"
+  "<%%="
+  "<%="
+  "</"
+  "</:"
+  "<:"
+  ">"
+  "{"
+  "}"
+] @punctuation.bracket
+
+; HEEx operators are highlighted as such
+"=" @operator
+
+; HEEx inherits the DOCTYPE tag from HTML
+(doctype) @constant
+
+(comment) @comment
+
+; HEEx tags and slots are highlighted as HTML
+[
+ (tag_name)
+ (slot_name)
+] @tag
+
+; HEEx attributes are highlighted as HTML attributes
+(attribute_name) @attribute
+
+; HEEx special attributes are highlighted as keywords
+(special_attribute_name) @keyword
+
+[
+  (attribute_value)
+  (quoted_attribute_value)
+] @string
+
+; HEEx components are highlighted as Elixir modules and functions
+(component_name
+  [
+    (module) @module
+    (function) @function
+    "." @punctuation.delimiter
+  ])

crates/zed/src/languages/heex/injections.scm ๐Ÿ”—

@@ -0,0 +1,13 @@
+((directive (partial_expression_value) @content)
+ (#set! language "elixir")
+ (#set! include-children)
+ (#set! combined))
+
+; Regular expression_values do not need to be combined
+((directive (expression_value) @content)
+ (#set! language "elixir"))
+
+; expressions live within HTML tags, and do not need to be combined
+;     <link href={ Routes.static_path(..) } />
+((expression (expression_value) @content)
+ (#set! language "elixir"))

crates/zed/src/main.rs ๐Ÿ”—

@@ -191,7 +191,7 @@ fn main() {
         language_selector::init(cx);
         theme_selector::init(cx);
         activity_indicator::init(cx);
-        lsp_log::init(cx);
+        language_tools::init(cx);
         call::init(app_state.client.clone(), app_state.user_store.clone(), cx);
         collab_ui::init(&app_state, cx);
         feedback::init(cx);

crates/zed/src/zed.rs ๐Ÿ”—

@@ -312,8 +312,11 @@ pub fn initialize_workspace(
                                 let feedback_info_text = cx.add_view(|_| FeedbackInfoText::new());
                                 toolbar.add_item(feedback_info_text, cx);
                                 let lsp_log_item =
-                                    cx.add_view(|_| lsp_log::LspLogToolbarItemView::new());
+                                    cx.add_view(|_| language_tools::LspLogToolbarItemView::new());
                                 toolbar.add_item(lsp_log_item, cx);
+                                let syntax_tree_item = cx
+                                    .add_view(|_| language_tools::SyntaxTreeToolbarItemView::new());
+                                toolbar.add_item(syntax_tree_item, cx);
                             })
                         });
                     }

styles/src/styleTree/app.ts ๐Ÿ”—

@@ -17,7 +17,7 @@ import projectSharedNotification from "./projectSharedNotification"
 import tooltip from "./tooltip"
 import terminal from "./terminal"
 import contactList from "./contactList"
-import lspLogMenu from "./lspLogMenu"
+import toolbarDropdownMenu from "./toolbarDropdownMenu"
 import incomingCallNotification from "./incomingCallNotification"
 import { ColorScheme } from "../theme/colorScheme"
 import feedback from "./feedback"
@@ -46,7 +46,7 @@ export default function app(colorScheme: ColorScheme): Object {
         contactsPopover: contactsPopover(colorScheme),
         contactFinder: contactFinder(colorScheme),
         contactList: contactList(colorScheme),
-        lspLogMenu: lspLogMenu(colorScheme),
+        toolbarDropdownMenu: toolbarDropdownMenu(colorScheme),
         search: search(colorScheme),
         sharedScreen: sharedScreen(colorScheme),
         updateNotification: updateNotification(colorScheme),

styles/src/styleTree/lspLogMenu.ts โ†’ styles/src/styleTree/toolbarDropdownMenu.ts ๐Ÿ”—

@@ -1,7 +1,7 @@
 import { ColorScheme } from "../theme/colorScheme"
 import { background, border, text } from "./components"
 
-export default function contactsPanel(colorScheme: ColorScheme) {
+export default function dropdownMenu(colorScheme: ColorScheme) {
     let layer = colorScheme.middle
 
     return {
@@ -11,6 +11,8 @@ export default function contactsPanel(colorScheme: ColorScheme) {
         shadow: colorScheme.popoverShadow,
         header: {
             ...text(layer, "sans", { size: "sm" }),
+            secondaryText: text(layer, "sans", { size: "sm", color: "#aaaaaa" }),
+            secondaryTextSpacing: 10,
             padding: { left: 8, right: 8, top: 2, bottom: 2 },
             cornerRadius: 6,
             background: background(layer, "on"),
@@ -20,12 +22,14 @@ export default function contactsPanel(colorScheme: ColorScheme) {
                 ...text(layer, "sans", "hovered", { size: "sm" }),
             }
         },
-        server: {
+        sectionHeader: {
             ...text(layer, "sans", { size: "sm" }),
             padding: { left: 8, right: 8, top: 8, bottom: 8 },
         },
         item: {
             ...text(layer, "sans", { size: "sm" }),
+            secondaryTextSpacing: 10,
+            secondaryText: text(layer, "sans", { size: "sm" }),
             padding: { left: 18, right: 18, top: 2, bottom: 2 },
             hover: {
                 background: background(layer, "hovered"),