Merge pull request #293 from zed-industries/project-diagnostics

Nathan Sobo created

Project diagnostics: First pass

Change summary

Cargo.lock                                  |   31 
crates/chat_panel/src/chat_panel.rs         |    4 
crates/client/src/user.rs                   |   24 
crates/clock/src/clock.rs                   |    9 
crates/contacts_panel/src/contacts_panel.rs |  148 
crates/diagnostics/Cargo.toml               |   27 
crates/diagnostics/src/diagnostics.rs       |  870 +++++++
crates/editor/Cargo.toml                    |    6 
crates/editor/src/display_map.rs            |  264 +
crates/editor/src/display_map/block_map.rs  |  387 +-
crates/editor/src/display_map/fold_map.rs   |  362 +-
crates/editor/src/display_map/tab_map.rs    |  108 
crates/editor/src/display_map/wrap_map.rs   |  224 +
crates/editor/src/editor.rs                 |  425 +-
crates/editor/src/element.rs                |   62 
crates/editor/src/items.rs                  |  115 
crates/editor/src/movement.rs               |  195 +
crates/editor/src/multi_buffer.rs           | 2754 +++++++++++++++++++++++
crates/editor/src/multi_buffer/anchor.rs    |  118 
crates/editor/src/test.rs                   |   13 
crates/file_finder/src/file_finder.rs       |    4 
crates/go_to_line/src/go_to_line.rs         |   22 
crates/gpui/src/executor.rs                 |    8 
crates/journal/src/journal.rs               |    2 
crates/language/Cargo.toml                  |   18 
crates/language/build.rs                    |    1 
crates/language/src/buffer.rs               |  733 +++---
crates/language/src/diagnostic_set.rs       |  223 +
crates/language/src/language.rs             |   34 
crates/language/src/proto.rs                |  304 +-
crates/language/src/tests.rs                |  702 ++---
crates/lsp/src/lsp.rs                       |    6 
crates/project/Cargo.toml                   |    2 
crates/project/src/fs.rs                    |    1 
crates/project/src/project.rs               |  798 +++++-
crates/project/src/worktree.rs              |  703 +++--
crates/project_panel/src/project_panel.rs   |   17 
crates/rpc/proto/zed.proto                  |  247 +
crates/rpc/src/peer.rs                      |   10 
crates/rpc/src/proto.rs                     |   53 
crates/rpc/src/rpc.rs                       |    2 
crates/server/src/db.rs                     |    4 
crates/server/src/releases.rs               |    7 
crates/server/src/rpc.rs                    |  662 +++--
crates/server/src/rpc/store.rs              |  486 ++-
crates/sum_tree/src/cursor.rs               |   70 
crates/sum_tree/src/sum_tree.rs             |   13 
crates/sum_tree/src/tree_map.rs             |  152 +
crates/text/Cargo.toml                      |    1 
crates/text/src/anchor.rs                   |  550 ----
crates/text/src/locator.rs                  |   89 
crates/text/src/operation_queue.rs          |   50 
crates/text/src/patch.rs                    |   38 
crates/text/src/point.rs                    |    8 
crates/text/src/point_utf16.rs              |    8 
crates/text/src/random_char_iter.rs         |   12 
crates/text/src/rope.rs                     |   74 
crates/text/src/selection.rs                |  127 
crates/text/src/subscription.rs             |   48 
crates/text/src/tests.rs                    |  121 
crates/text/src/text.rs                     |  595 +---
crates/theme/src/theme.rs                   |    8 
crates/theme_selector/src/theme_selector.rs |    6 
crates/util/src/test.rs                     |   13 
crates/workspace/Cargo.toml                 |    1 
crates/workspace/src/workspace.rs           |  380 ++-
crates/zed/Cargo.toml                       |    2 
crates/zed/assets/icons/broadcast-24.svg    |    6 
crates/zed/assets/themes/_base.toml         |   20 
crates/zed/src/language.rs                  |  179 +
crates/zed/src/main.rs                      |    2 
crates/zed/src/zed.rs                       |   60 
script/seed-db                              |    3 
73 files changed, 9,406 insertions(+), 4,425 deletions(-)

Detailed changes

Cargo.lock 🔗

@@ -1399,6 +1399,24 @@ dependencies = [
  "const-oid",
 ]
 
+[[package]]
+name = "diagnostics"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "client",
+ "collections",
+ "editor",
+ "gpui",
+ "language",
+ "postage",
+ "project",
+ "serde_json",
+ "unindent",
+ "util",
+ "workspace",
+]
+
 [[package]]
 name = "digest"
 version = "0.8.1"
@@ -1525,9 +1543,11 @@ dependencies = [
  "aho-corasick",
  "anyhow",
  "clock",
+ "collections",
  "ctor",
  "env_logger",
  "gpui",
+ "itertools",
  "language",
  "lazy_static",
  "log",
@@ -2585,7 +2605,11 @@ name = "language"
 version = "0.1.0"
 dependencies = [
  "anyhow",
+ "async-trait",
  "clock",
+ "collections",
+ "ctor",
+ "env_logger",
  "futures",
  "gpui",
  "lazy_static",
@@ -2597,7 +2621,9 @@ dependencies = [
  "rpc",
  "serde",
  "similar",
+ "smallvec",
  "smol",
+ "sum_tree",
  "text",
  "theme",
  "tree-sitter",
@@ -3449,6 +3475,7 @@ dependencies = [
  "async-trait",
  "client",
  "clock",
+ "collections",
  "fsevent",
  "futures",
  "fuzzy",
@@ -4834,6 +4861,7 @@ dependencies = [
  "ctor",
  "env_logger",
  "gpui",
+ "lazy_static",
  "log",
  "parking_lot",
  "rand 0.8.3",
@@ -5628,6 +5656,7 @@ version = "0.1.0"
 dependencies = [
  "anyhow",
  "client",
+ "clock",
  "gpui",
  "language",
  "log",
@@ -5673,9 +5702,11 @@ dependencies = [
  "chat_panel",
  "client",
  "clock",
+ "collections",
  "contacts_panel",
  "crossbeam-channel",
  "ctor",
+ "diagnostics",
  "dirs 3.0.1",
  "easy-parallel",
  "editor",

crates/chat_panel/src/chat_panel.rs 🔗

@@ -56,14 +56,14 @@ impl ChatPanel {
                 4,
                 {
                     let settings = settings.clone();
-                    move |_| {
+                    Arc::new(move |_| {
                         let settings = settings.borrow();
                         EditorSettings {
                             tab_size: settings.tab_size,
                             style: settings.theme.chat_panel.input_editor.as_editor(),
                             soft_wrap: editor::SoftWrap::EditorWidth,
                         }
-                    }
+                    })
                 },
                 cx,
             )

crates/client/src/user.rs 🔗

@@ -22,14 +22,14 @@ pub struct User {
 #[derive(Debug)]
 pub struct Contact {
     pub user: Arc<User>,
-    pub worktrees: Vec<WorktreeMetadata>,
+    pub projects: Vec<ProjectMetadata>,
 }
 
 #[derive(Debug)]
-pub struct WorktreeMetadata {
+pub struct ProjectMetadata {
     pub id: u64,
-    pub root_name: String,
     pub is_shared: bool,
+    pub worktree_root_names: Vec<String>,
     pub guests: Vec<Arc<User>>,
 }
 
@@ -112,7 +112,7 @@ impl UserStore {
         let mut user_ids = HashSet::new();
         for contact in &message.contacts {
             user_ids.insert(contact.user_id);
-            user_ids.extend(contact.worktrees.iter().flat_map(|w| &w.guests).copied());
+            user_ids.extend(contact.projects.iter().flat_map(|w| &w.guests).copied());
         }
 
         let load_users = self.load_users(user_ids.into_iter().collect(), cx);
@@ -221,10 +221,10 @@ impl Contact {
                 user_store.fetch_user(contact.user_id, cx)
             })
             .await?;
-        let mut worktrees = Vec::new();
-        for worktree in contact.worktrees {
+        let mut projects = Vec::new();
+        for project in contact.projects {
             let mut guests = Vec::new();
-            for participant_id in worktree.guests {
+            for participant_id in project.guests {
                 guests.push(
                     user_store
                         .update(cx, |user_store, cx| {
@@ -233,14 +233,14 @@ impl Contact {
                         .await?,
                 );
             }
-            worktrees.push(WorktreeMetadata {
-                id: worktree.id,
-                root_name: worktree.root_name,
-                is_shared: worktree.is_shared,
+            projects.push(ProjectMetadata {
+                id: project.id,
+                worktree_root_names: project.worktree_root_names.clone(),
+                is_shared: project.is_shared,
                 guests,
             });
         }
-        Ok(Self { user, worktrees })
+        Ok(Self { user, projects })
     }
 }
 

crates/clock/src/clock.rs 🔗

@@ -21,6 +21,15 @@ pub struct Lamport {
 }
 
 impl Local {
+    pub const MIN: Self = Self {
+        replica_id: ReplicaId::MIN,
+        value: Seq::MIN,
+    };
+    pub const MAX: Self = Self {
+        replica_id: ReplicaId::MAX,
+        value: Seq::MAX,
+    };
+
     pub fn new(replica_id: ReplicaId) -> Self {
         Self {
             replica_id,

crates/contacts_panel/src/contacts_panel.rs 🔗

@@ -1,27 +1,15 @@
+use std::sync::Arc;
+
 use client::{Contact, UserStore};
 use gpui::{
-    action,
     elements::*,
     geometry::{rect::RectF, vector::vec2f},
     platform::CursorStyle,
-    Element, ElementBox, Entity, LayoutContext, ModelHandle, MutableAppContext, RenderContext,
-    Subscription, View, ViewContext,
+    Element, ElementBox, Entity, LayoutContext, ModelHandle, RenderContext, Subscription, View,
+    ViewContext,
 };
 use postage::watch;
-use theme::Theme;
-use workspace::{Settings, Workspace};
-
-action!(JoinWorktree, u64);
-action!(LeaveWorktree, u64);
-action!(ShareWorktree, u64);
-action!(UnshareWorktree, u64);
-
-pub fn init(cx: &mut MutableAppContext) {
-    cx.add_action(ContactsPanel::share_worktree);
-    cx.add_action(ContactsPanel::unshare_worktree);
-    cx.add_action(ContactsPanel::join_worktree);
-    cx.add_action(ContactsPanel::leave_worktree);
-}
+use workspace::{AppState, JoinProject, JoinProjectParams, Settings};
 
 pub struct ContactsPanel {
     contacts: ListState,
@@ -31,78 +19,33 @@ pub struct ContactsPanel {
 }
 
 impl ContactsPanel {
-    pub fn new(
-        user_store: ModelHandle<UserStore>,
-        settings: watch::Receiver<Settings>,
-        cx: &mut ViewContext<Self>,
-    ) -> Self {
+    pub fn new(app_state: Arc<AppState>, cx: &mut ViewContext<Self>) -> Self {
         Self {
             contacts: ListState::new(
-                user_store.read(cx).contacts().len(),
+                app_state.user_store.read(cx).contacts().len(),
                 Orientation::Top,
                 1000.,
                 {
-                    let user_store = user_store.clone();
-                    let settings = settings.clone();
+                    let app_state = app_state.clone();
                     move |ix, cx| {
-                        let user_store = user_store.read(cx);
+                        let user_store = app_state.user_store.read(cx);
                         let contacts = user_store.contacts().clone();
                         let current_user_id = user_store.current_user().map(|user| user.id);
                         Self::render_collaborator(
                             &contacts[ix],
                             current_user_id,
-                            &settings.borrow().theme,
+                            app_state.clone(),
                             cx,
                         )
                     }
                 },
             ),
-            _maintain_contacts: cx.observe(&user_store, Self::update_contacts),
-            user_store,
-            settings,
+            _maintain_contacts: cx.observe(&app_state.user_store, Self::update_contacts),
+            user_store: app_state.user_store.clone(),
+            settings: app_state.settings.clone(),
         }
     }
 
-    fn share_worktree(
-        workspace: &mut Workspace,
-        action: &ShareWorktree,
-        cx: &mut ViewContext<Workspace>,
-    ) {
-        workspace
-            .project()
-            .update(cx, |p, cx| p.share_worktree(action.0, cx));
-    }
-
-    fn unshare_worktree(
-        workspace: &mut Workspace,
-        action: &UnshareWorktree,
-        cx: &mut ViewContext<Workspace>,
-    ) {
-        workspace
-            .project()
-            .update(cx, |p, cx| p.unshare_worktree(action.0, cx));
-    }
-
-    fn join_worktree(
-        workspace: &mut Workspace,
-        action: &JoinWorktree,
-        cx: &mut ViewContext<Workspace>,
-    ) {
-        workspace
-            .project()
-            .update(cx, |p, cx| p.add_remote_worktree(action.0, cx).detach());
-    }
-
-    fn leave_worktree(
-        workspace: &mut Workspace,
-        action: &LeaveWorktree,
-        cx: &mut ViewContext<Workspace>,
-    ) {
-        workspace
-            .project()
-            .update(cx, |p, cx| p.close_remote_worktree(action.0, cx));
-    }
-
     fn update_contacts(&mut self, _: ModelHandle<UserStore>, cx: &mut ViewContext<Self>) {
         self.contacts
             .reset(self.user_store.read(cx).contacts().len());
@@ -112,20 +55,16 @@ impl ContactsPanel {
     fn render_collaborator(
         collaborator: &Contact,
         current_user_id: Option<u64>,
-        theme: &Theme,
+        app_state: Arc<AppState>,
         cx: &mut LayoutContext,
     ) -> ElementBox {
-        let theme = &theme.contacts_panel;
-        let worktree_count = collaborator.worktrees.len();
+        let theme = &app_state.settings.borrow().theme.contacts_panel;
+        let project_count = collaborator.projects.len();
         let font_cache = cx.font_cache();
-        let line_height = theme.unshared_worktree.name.text.line_height(font_cache);
-        let cap_height = theme.unshared_worktree.name.text.cap_height(font_cache);
-        let baseline_offset = theme
-            .unshared_worktree
-            .name
-            .text
-            .baseline_offset(font_cache)
-            + (theme.unshared_worktree.height - line_height) / 2.;
+        let line_height = theme.unshared_project.name.text.line_height(font_cache);
+        let cap_height = theme.unshared_project.name.text.cap_height(font_cache);
+        let baseline_offset = theme.unshared_project.name.text.baseline_offset(font_cache)
+            + (theme.unshared_project.height - line_height) / 2.;
         let tree_branch_width = theme.tree_branch_width;
         let tree_branch_color = theme.tree_branch_color;
         let host_avatar_height = theme
@@ -161,11 +100,11 @@ impl ContactsPanel {
             )
             .with_children(
                 collaborator
-                    .worktrees
+                    .projects
                     .iter()
                     .enumerate()
-                    .map(|(ix, worktree)| {
-                        let worktree_id = worktree.id;
+                    .map(|(ix, project)| {
+                        let project_id = project.id;
 
                         Flex::row()
                             .with_child(
@@ -182,7 +121,7 @@ impl ContactsPanel {
                                             vec2f(start_x, start_y),
                                             vec2f(
                                                 start_x + tree_branch_width,
-                                                if ix + 1 == worktree_count {
+                                                if ix + 1 == project_count {
                                                     end_y
                                                 } else {
                                                     bounds.max_y()
@@ -210,28 +149,28 @@ impl ContactsPanel {
                             .with_child({
                                 let is_host = Some(collaborator.user.id) == current_user_id;
                                 let is_guest = !is_host
-                                    && worktree
+                                    && project
                                         .guests
                                         .iter()
                                         .any(|guest| Some(guest.id) == current_user_id);
-                                let is_shared = worktree.is_shared;
+                                let is_shared = project.is_shared;
+                                let app_state = app_state.clone();
 
                                 MouseEventHandler::new::<ContactsPanel, _, _, _>(
-                                    worktree_id as usize,
+                                    project_id as usize,
                                     cx,
                                     |mouse_state, _| {
-                                        let style = match (worktree.is_shared, mouse_state.hovered)
-                                        {
-                                            (false, false) => &theme.unshared_worktree,
-                                            (false, true) => &theme.hovered_unshared_worktree,
-                                            (true, false) => &theme.shared_worktree,
-                                            (true, true) => &theme.hovered_shared_worktree,
+                                        let style = match (project.is_shared, mouse_state.hovered) {
+                                            (false, false) => &theme.unshared_project,
+                                            (false, true) => &theme.hovered_unshared_project,
+                                            (true, false) => &theme.shared_project,
+                                            (true, true) => &theme.hovered_shared_project,
                                         };
 
                                         Flex::row()
                                             .with_child(
                                                 Label::new(
-                                                    worktree.root_name.clone(),
+                                                    project.worktree_root_names.join(", "),
                                                     style.name.text.clone(),
                                                 )
                                                 .aligned()
@@ -240,7 +179,7 @@ impl ContactsPanel {
                                                 .with_style(style.name.container)
                                                 .boxed(),
                                             )
-                                            .with_children(worktree.guests.iter().filter_map(
+                                            .with_children(project.guests.iter().filter_map(
                                                 |participant| {
                                                     participant.avatar.clone().map(|avatar| {
                                                         Image::new(avatar)
@@ -268,23 +207,18 @@ impl ContactsPanel {
                                     CursorStyle::Arrow
                                 })
                                 .on_click(move |cx| {
-                                    if is_shared {
-                                        if is_host {
-                                            cx.dispatch_action(UnshareWorktree(worktree_id));
-                                        } else if is_guest {
-                                            cx.dispatch_action(LeaveWorktree(worktree_id));
-                                        } else {
-                                            cx.dispatch_action(JoinWorktree(worktree_id))
-                                        }
-                                    } else if is_host {
-                                        cx.dispatch_action(ShareWorktree(worktree_id));
+                                    if !is_host && !is_guest {
+                                        cx.dispatch_global_action(JoinProject(JoinProjectParams {
+                                            project_id,
+                                            app_state: app_state.clone(),
+                                        }));
                                     }
                                 })
                                 .expanded(1.0)
                                 .boxed()
                             })
                             .constrained()
-                            .with_height(theme.unshared_worktree.height)
+                            .with_height(theme.unshared_project.height)
                             .boxed()
                     }),
             )

crates/diagnostics/Cargo.toml 🔗

@@ -0,0 +1,27 @@
+[package]
+name = "diagnostics"
+version = "0.1.0"
+edition = "2021"
+
+[lib]
+path = "src/diagnostics.rs"
+
+[dependencies]
+anyhow = "1.0"
+collections = { path = "../collections" }
+editor = { path = "../editor" }
+language = { path = "../language" }
+gpui = { path = "../gpui" }
+project = { path = "../project" }
+util = { path = "../util" }
+workspace = { path = "../workspace" }
+postage = { version = "0.4", features = ["futures-traits"] }
+
+[dev-dependencies]
+unindent = "0.1"
+client = { path = "../client", features = ["test-support"] }
+editor = { path = "../editor", features = ["test-support"] }
+language = { path = "../language", features = ["test-support"] }
+gpui = { path = "../gpui", features = ["test-support"] }
+workspace = { path = "../workspace", features = ["test-support"] }
+serde_json = { version = "1", features = ["preserve_order"] }

crates/diagnostics/src/diagnostics.rs 🔗

@@ -0,0 +1,870 @@
+use anyhow::Result;
+use collections::{HashMap, HashSet};
+use editor::{
+    context_header_renderer, diagnostic_block_renderer, diagnostic_header_renderer,
+    display_map::{BlockDisposition, BlockId, BlockProperties},
+    BuildSettings, Editor, ExcerptId, ExcerptProperties, MultiBuffer,
+};
+use gpui::{
+    action, elements::*, keymap::Binding, AppContext, Entity, ModelHandle, MutableAppContext,
+    RenderContext, Task, View, ViewContext, ViewHandle,
+};
+use language::{Bias, Buffer, Diagnostic, DiagnosticEntry, Point};
+use postage::watch;
+use project::Project;
+use std::{cmp::Ordering, ops::Range, path::Path, sync::Arc};
+use util::TryFutureExt;
+use workspace::Workspace;
+
+action!(Toggle);
+action!(ClearInvalid);
+
+const CONTEXT_LINE_COUNT: u32 = 1;
+
+pub fn init(cx: &mut MutableAppContext) {
+    cx.add_bindings([
+        Binding::new("alt-shift-D", Toggle, None),
+        Binding::new(
+            "alt-shift-C",
+            ClearInvalid,
+            Some("ProjectDiagnosticsEditor"),
+        ),
+    ]);
+    cx.add_action(ProjectDiagnosticsEditor::toggle);
+    cx.add_action(ProjectDiagnosticsEditor::clear_invalid);
+}
+
+type Event = editor::Event;
+
+struct ProjectDiagnostics {
+    project: ModelHandle<Project>,
+}
+
+struct ProjectDiagnosticsEditor {
+    editor: ViewHandle<Editor>,
+    excerpts: ModelHandle<MultiBuffer>,
+    path_states: Vec<(Arc<Path>, Vec<DiagnosticGroupState>)>,
+    build_settings: BuildSettings,
+}
+
+struct DiagnosticGroupState {
+    primary_diagnostic: DiagnosticEntry<language::Anchor>,
+    excerpts: Vec<ExcerptId>,
+    blocks: HashMap<BlockId, DiagnosticBlock>,
+    block_count: usize,
+    is_valid: bool,
+}
+
+enum DiagnosticBlock {
+    Header(Diagnostic),
+    Inline(Diagnostic),
+    Context,
+}
+
+impl ProjectDiagnostics {
+    fn new(project: ModelHandle<Project>) -> Self {
+        Self { project }
+    }
+}
+
+impl Entity for ProjectDiagnostics {
+    type Event = ();
+}
+
+impl Entity for ProjectDiagnosticsEditor {
+    type Event = Event;
+}
+
+impl View for ProjectDiagnosticsEditor {
+    fn ui_name() -> &'static str {
+        "ProjectDiagnosticsEditor"
+    }
+
+    fn render(&mut self, _: &mut RenderContext<Self>) -> ElementBox {
+        ChildView::new(self.editor.id()).boxed()
+    }
+
+    fn on_focus(&mut self, cx: &mut ViewContext<Self>) {
+        cx.focus(&self.editor);
+    }
+}
+
+impl ProjectDiagnosticsEditor {
+    fn new(
+        project: ModelHandle<Project>,
+        settings: watch::Receiver<workspace::Settings>,
+        cx: &mut ViewContext<Self>,
+    ) -> Self {
+        let project_paths = project
+            .read(cx)
+            .diagnostic_summaries(cx)
+            .map(|e| e.0)
+            .collect::<Vec<_>>();
+
+        cx.spawn(|this, mut cx| {
+            let project = project.clone();
+            async move {
+                for project_path in project_paths {
+                    let buffer = project
+                        .update(&mut cx, |project, cx| project.open_buffer(project_path, cx))
+                        .await?;
+                    this.update(&mut cx, |view, cx| view.populate_excerpts(buffer, cx))
+                }
+                Result::<_, anyhow::Error>::Ok(())
+            }
+        })
+        .detach();
+
+        cx.subscribe(&project, |_, project, event, cx| {
+            if let project::Event::DiagnosticsUpdated(project_path) = event {
+                let project_path = project_path.clone();
+                cx.spawn(|this, mut cx| {
+                    async move {
+                        let buffer = project
+                            .update(&mut cx, |project, cx| project.open_buffer(project_path, cx))
+                            .await?;
+                        this.update(&mut cx, |view, cx| view.populate_excerpts(buffer, cx));
+                        Ok(())
+                    }
+                    .log_err()
+                })
+                .detach();
+            }
+        })
+        .detach();
+
+        let excerpts = cx.add_model(|cx| MultiBuffer::new(project.read(cx).replica_id()));
+        let build_settings = editor::settings_builder(excerpts.downgrade(), settings.clone());
+        let editor =
+            cx.add_view(|cx| Editor::for_buffer(excerpts.clone(), build_settings.clone(), cx));
+        cx.subscribe(&editor, |_, _, event, cx| cx.emit(*event))
+            .detach();
+        Self {
+            excerpts,
+            editor,
+            build_settings,
+            path_states: Default::default(),
+        }
+    }
+
+    #[cfg(test)]
+    fn text(&self, cx: &AppContext) -> String {
+        self.editor.read(cx).text(cx)
+    }
+
+    fn toggle(workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext<Workspace>) {
+        let diagnostics = cx.add_model(|_| ProjectDiagnostics::new(workspace.project().clone()));
+        workspace.add_item(diagnostics, cx);
+    }
+
+    fn clear_invalid(&mut self, _: &ClearInvalid, cx: &mut ViewContext<Self>) {
+        let mut blocks_to_delete = HashSet::default();
+        let mut excerpts_to_delete = Vec::new();
+        let mut path_ixs_to_delete = Vec::new();
+        for (ix, (_, groups)) in self.path_states.iter_mut().enumerate() {
+            groups.retain(|group| {
+                if group.is_valid {
+                    true
+                } else {
+                    blocks_to_delete.extend(group.blocks.keys().copied());
+                    excerpts_to_delete.extend(group.excerpts.iter().cloned());
+                    false
+                }
+            });
+
+            if groups.is_empty() {
+                path_ixs_to_delete.push(ix);
+            }
+        }
+
+        for ix in path_ixs_to_delete.into_iter().rev() {
+            self.path_states.remove(ix);
+        }
+
+        self.excerpts.update(cx, |excerpts, cx| {
+            excerpts_to_delete.sort_unstable();
+            excerpts.remove_excerpts(&excerpts_to_delete, cx)
+        });
+        self.editor
+            .update(cx, |editor, cx| editor.remove_blocks(blocks_to_delete, cx));
+    }
+
+    fn populate_excerpts(&mut self, buffer: ModelHandle<Buffer>, cx: &mut ViewContext<Self>) {
+        let snapshot;
+        let path;
+        {
+            let buffer = buffer.read(cx);
+            snapshot = buffer.snapshot();
+            if let Some(file) = buffer.file() {
+                path = file.path().clone();
+            } else {
+                return;
+            }
+        }
+
+        let path_ix = match self
+            .path_states
+            .binary_search_by_key(&path.as_ref(), |e| e.0.as_ref())
+        {
+            Ok(ix) => ix,
+            Err(ix) => {
+                self.path_states
+                    .insert(ix, (path.clone(), Default::default()));
+                ix
+            }
+        };
+
+        let mut prev_excerpt_id = if path_ix > 0 {
+            let prev_path_last_group = &self.path_states[path_ix - 1].1.last().unwrap();
+            prev_path_last_group.excerpts.last().unwrap().clone()
+        } else {
+            ExcerptId::min()
+        };
+
+        let groups = &mut self.path_states[path_ix].1;
+        let mut groups_to_add = Vec::new();
+        let mut group_ixs_to_remove = Vec::new();
+        let mut blocks_to_add = Vec::new();
+        let mut blocks_to_restyle = HashMap::default();
+        let mut blocks_to_remove = HashSet::default();
+        let selected_excerpts = self
+            .editor
+            .read(cx)
+            .local_anchor_selections()
+            .iter()
+            .flat_map(|s| [s.start.excerpt_id().clone(), s.end.excerpt_id().clone()])
+            .collect::<HashSet<_>>();
+        let mut diagnostic_blocks = Vec::new();
+        let excerpts_snapshot = self.excerpts.update(cx, |excerpts, excerpts_cx| {
+            let mut old_groups = groups.iter_mut().enumerate().peekable();
+            let mut new_groups = snapshot
+                .diagnostic_groups()
+                .into_iter()
+                .filter(|group| group.entries[group.primary_ix].diagnostic.is_disk_based)
+                .peekable();
+
+            loop {
+                let mut to_insert = None;
+                let mut to_invalidate = None;
+                let mut to_validate = None;
+                match (old_groups.peek(), new_groups.peek()) {
+                    (None, None) => break,
+                    (None, Some(_)) => to_insert = new_groups.next(),
+                    (Some(_), None) => to_invalidate = old_groups.next(),
+                    (Some((_, old_group)), Some(new_group)) => {
+                        let old_primary = &old_group.primary_diagnostic;
+                        let new_primary = &new_group.entries[new_group.primary_ix];
+                        match compare_diagnostics(old_primary, new_primary, &snapshot) {
+                            Ordering::Less => to_invalidate = old_groups.next(),
+                            Ordering::Equal => {
+                                to_validate = old_groups.next();
+                                new_groups.next();
+                            }
+                            Ordering::Greater => to_insert = new_groups.next(),
+                        }
+                    }
+                }
+
+                if let Some(group) = to_insert {
+                    let mut group_state = DiagnosticGroupState {
+                        primary_diagnostic: group.entries[group.primary_ix].clone(),
+                        excerpts: Default::default(),
+                        blocks: Default::default(),
+                        block_count: 0,
+                        is_valid: true,
+                    };
+                    let mut pending_range: Option<(Range<Point>, usize)> = None;
+                    let mut is_first_excerpt_for_group = true;
+                    for (ix, entry) in group.entries.iter().map(Some).chain([None]).enumerate() {
+                        let resolved_entry = entry.map(|e| e.resolve::<Point>(&snapshot));
+                        if let Some((range, start_ix)) = &mut pending_range {
+                            if let Some(entry) = resolved_entry.as_ref() {
+                                if entry.range.start.row
+                                    <= range.end.row + 1 + CONTEXT_LINE_COUNT * 2
+                                {
+                                    range.end = range.end.max(entry.range.end);
+                                    continue;
+                                }
+                            }
+
+                            let excerpt_start =
+                                Point::new(range.start.row.saturating_sub(CONTEXT_LINE_COUNT), 0);
+                            let excerpt_end = snapshot.clip_point(
+                                Point::new(range.end.row + CONTEXT_LINE_COUNT, u32::MAX),
+                                Bias::Left,
+                            );
+                            let excerpt_id = excerpts.insert_excerpt_after(
+                                &prev_excerpt_id,
+                                ExcerptProperties {
+                                    buffer: &buffer,
+                                    range: excerpt_start..excerpt_end,
+                                },
+                                excerpts_cx,
+                            );
+
+                            prev_excerpt_id = excerpt_id.clone();
+                            group_state.excerpts.push(excerpt_id.clone());
+                            let header_position = (excerpt_id.clone(), language::Anchor::min());
+
+                            if is_first_excerpt_for_group {
+                                is_first_excerpt_for_group = false;
+                                let primary = &group.entries[group.primary_ix].diagnostic;
+                                group_state.block_count += 1;
+                                diagnostic_blocks.push(DiagnosticBlock::Header(primary.clone()));
+                                blocks_to_add.push(BlockProperties {
+                                    position: header_position,
+                                    height: 2,
+                                    render: diagnostic_header_renderer(
+                                        buffer.clone(),
+                                        primary.clone(),
+                                        true,
+                                        self.build_settings.clone(),
+                                    ),
+                                    disposition: BlockDisposition::Above,
+                                });
+                            } else {
+                                group_state.block_count += 1;
+                                diagnostic_blocks.push(DiagnosticBlock::Context);
+                                blocks_to_add.push(BlockProperties {
+                                    position: header_position,
+                                    height: 1,
+                                    render: context_header_renderer(self.build_settings.clone()),
+                                    disposition: BlockDisposition::Above,
+                                });
+                            }
+
+                            for entry in &group.entries[*start_ix..ix] {
+                                if !entry.diagnostic.is_primary {
+                                    group_state.block_count += 1;
+                                    diagnostic_blocks
+                                        .push(DiagnosticBlock::Inline(entry.diagnostic.clone()));
+                                    blocks_to_add.push(BlockProperties {
+                                        position: (excerpt_id.clone(), entry.range.start.clone()),
+                                        height: entry.diagnostic.message.matches('\n').count()
+                                            as u8
+                                            + 1,
+                                        render: diagnostic_block_renderer(
+                                            entry.diagnostic.clone(),
+                                            true,
+                                            self.build_settings.clone(),
+                                        ),
+                                        disposition: BlockDisposition::Below,
+                                    });
+                                }
+                            }
+
+                            pending_range.take();
+                        }
+
+                        if let Some(entry) = resolved_entry {
+                            pending_range = Some((entry.range.clone(), ix));
+                        }
+                    }
+
+                    groups_to_add.push(group_state);
+                } else if let Some((group_ix, group_state)) = to_invalidate {
+                    if group_state
+                        .excerpts
+                        .iter()
+                        .any(|excerpt_id| selected_excerpts.contains(excerpt_id))
+                    {
+                        for (block_id, block) in &group_state.blocks {
+                            match block {
+                                DiagnosticBlock::Header(diagnostic) => {
+                                    blocks_to_restyle.insert(
+                                        *block_id,
+                                        diagnostic_header_renderer(
+                                            buffer.clone(),
+                                            diagnostic.clone(),
+                                            false,
+                                            self.build_settings.clone(),
+                                        ),
+                                    );
+                                }
+                                DiagnosticBlock::Inline(diagnostic) => {
+                                    blocks_to_restyle.insert(
+                                        *block_id,
+                                        diagnostic_block_renderer(
+                                            diagnostic.clone(),
+                                            false,
+                                            self.build_settings.clone(),
+                                        ),
+                                    );
+                                }
+                                DiagnosticBlock::Context => {}
+                            }
+                        }
+
+                        group_state.is_valid = false;
+                        prev_excerpt_id = group_state.excerpts.last().unwrap().clone();
+                    } else {
+                        excerpts.remove_excerpts(group_state.excerpts.iter(), excerpts_cx);
+                        group_ixs_to_remove.push(group_ix);
+                        blocks_to_remove.extend(group_state.blocks.keys().copied());
+                    }
+                } else if let Some((_, group_state)) = to_validate {
+                    for (block_id, block) in &group_state.blocks {
+                        match block {
+                            DiagnosticBlock::Header(diagnostic) => {
+                                blocks_to_restyle.insert(
+                                    *block_id,
+                                    diagnostic_header_renderer(
+                                        buffer.clone(),
+                                        diagnostic.clone(),
+                                        true,
+                                        self.build_settings.clone(),
+                                    ),
+                                );
+                            }
+                            DiagnosticBlock::Inline(diagnostic) => {
+                                blocks_to_restyle.insert(
+                                    *block_id,
+                                    diagnostic_block_renderer(
+                                        diagnostic.clone(),
+                                        true,
+                                        self.build_settings.clone(),
+                                    ),
+                                );
+                            }
+                            DiagnosticBlock::Context => {}
+                        }
+                    }
+                    group_state.is_valid = true;
+                    prev_excerpt_id = group_state.excerpts.last().unwrap().clone();
+                } else {
+                    unreachable!();
+                }
+            }
+
+            excerpts.snapshot(excerpts_cx)
+        });
+
+        self.editor.update(cx, |editor, cx| {
+            editor.remove_blocks(blocks_to_remove, cx);
+            editor.replace_blocks(blocks_to_restyle, cx);
+            let mut block_ids = editor
+                .insert_blocks(
+                    blocks_to_add.into_iter().map(|block| {
+                        let (excerpt_id, text_anchor) = block.position;
+                        BlockProperties {
+                            position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor),
+                            height: block.height,
+                            render: block.render,
+                            disposition: block.disposition,
+                        }
+                    }),
+                    cx,
+                )
+                .into_iter()
+                .zip(diagnostic_blocks);
+
+            for group_state in &mut groups_to_add {
+                group_state.blocks = block_ids.by_ref().take(group_state.block_count).collect();
+            }
+        });
+
+        for ix in group_ixs_to_remove.into_iter().rev() {
+            groups.remove(ix);
+        }
+        groups.extend(groups_to_add);
+        groups.sort_unstable_by(|a, b| {
+            let range_a = &a.primary_diagnostic.range;
+            let range_b = &b.primary_diagnostic.range;
+            range_a
+                .start
+                .cmp(&range_b.start, &snapshot)
+                .unwrap()
+                .then_with(|| range_a.end.cmp(&range_b.end, &snapshot).unwrap())
+        });
+
+        if groups.is_empty() {
+            self.path_states.remove(path_ix);
+        }
+
+        cx.notify();
+    }
+}
+
+impl workspace::Item for ProjectDiagnostics {
+    type View = ProjectDiagnosticsEditor;
+
+    fn build_view(
+        handle: ModelHandle<Self>,
+        settings: watch::Receiver<workspace::Settings>,
+        cx: &mut ViewContext<Self::View>,
+    ) -> Self::View {
+        let project = handle.read(cx).project.clone();
+        ProjectDiagnosticsEditor::new(project, settings, cx)
+    }
+
+    fn project_path(&self) -> Option<project::ProjectPath> {
+        None
+    }
+}
+
+impl workspace::ItemView for ProjectDiagnosticsEditor {
+    fn title(&self, _: &AppContext) -> String {
+        "Project Diagnostics".to_string()
+    }
+
+    fn project_path(&self, _: &AppContext) -> Option<project::ProjectPath> {
+        None
+    }
+
+    fn save(&mut self, cx: &mut ViewContext<Self>) -> Result<Task<Result<()>>> {
+        self.excerpts.update(cx, |excerpts, cx| excerpts.save(cx))
+    }
+
+    fn save_as(
+        &mut self,
+        _: ModelHandle<project::Worktree>,
+        _: &std::path::Path,
+        _: &mut ViewContext<Self>,
+    ) -> Task<Result<()>> {
+        unreachable!()
+    }
+
+    fn is_dirty(&self, cx: &AppContext) -> bool {
+        self.excerpts.read(cx).read(cx).is_dirty()
+    }
+
+    fn has_conflict(&self, cx: &AppContext) -> bool {
+        self.excerpts.read(cx).read(cx).has_conflict()
+    }
+
+    fn should_update_tab_on_event(event: &Event) -> bool {
+        matches!(
+            event,
+            Event::Saved | Event::Dirtied | Event::FileHandleChanged
+        )
+    }
+
+    fn can_save(&self, _: &AppContext) -> bool {
+        true
+    }
+
+    fn can_save_as(&self, _: &AppContext) -> bool {
+        false
+    }
+}
+
+fn compare_diagnostics<L: language::ToOffset, R: language::ToOffset>(
+    lhs: &DiagnosticEntry<L>,
+    rhs: &DiagnosticEntry<R>,
+    snapshot: &language::BufferSnapshot,
+) -> Ordering {
+    lhs.range
+        .start
+        .to_offset(&snapshot)
+        .cmp(&rhs.range.start.to_offset(snapshot))
+        .then_with(|| {
+            lhs.range
+                .end
+                .to_offset(&snapshot)
+                .cmp(&rhs.range.end.to_offset(snapshot))
+        })
+        .then_with(|| lhs.diagnostic.message.cmp(&rhs.diagnostic.message))
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use client::{http::ServerResponse, test::FakeHttpClient, Client, UserStore};
+    use gpui::TestAppContext;
+    use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, LanguageRegistry};
+    use project::FakeFs;
+    use serde_json::json;
+    use std::sync::Arc;
+    use unindent::Unindent as _;
+    use workspace::WorkspaceParams;
+
+    #[gpui::test]
+    async fn test_diagnostics(mut cx: TestAppContext) {
+        let settings = cx.update(WorkspaceParams::test).settings;
+        let http_client = FakeHttpClient::new(|_| async move { Ok(ServerResponse::new(404)) });
+        let client = Client::new();
+        let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
+        let fs = Arc::new(FakeFs::new());
+
+        let project = cx.update(|cx| {
+            Project::local(
+                client.clone(),
+                user_store,
+                Arc::new(LanguageRegistry::new()),
+                fs.clone(),
+                cx,
+            )
+        });
+
+        fs.insert_tree(
+            "/test",
+            json!({
+                "a.rs": "
+                    const a: i32 = 'a';
+                ".unindent(),
+
+                "main.rs": "
+                    fn main() {
+                        let x = vec![];
+                        let y = vec![];
+                        a(x);
+                        b(y);
+                        // comment 1
+                        // comment 2
+                        c(y);
+                        d(x);
+                    }
+                "
+                .unindent(),
+            }),
+        )
+        .await;
+
+        let worktree = project
+            .update(&mut cx, |project, cx| {
+                project.add_local_worktree("/test", cx)
+            })
+            .await
+            .unwrap();
+
+        worktree.update(&mut cx, |worktree, cx| {
+            worktree
+                .update_diagnostics_from_provider(
+                    Arc::from("/test/main.rs".as_ref()),
+                    vec![
+                        DiagnosticEntry {
+                            range: 20..21,
+                            diagnostic: Diagnostic {
+                                message:
+                                    "move occurs because `x` has type `Vec<char>`, which does not implement the `Copy` trait"
+                                        .to_string(),
+                                severity: DiagnosticSeverity::INFORMATION,
+                                is_primary: false,
+                                is_disk_based: true,
+                                group_id: 1,
+                                ..Default::default()
+                            },
+                        },
+                        DiagnosticEntry {
+                            range: 40..41,
+                            diagnostic: Diagnostic {
+                                message:
+                                    "move occurs because `y` has type `Vec<char>`, which does not implement the `Copy` trait"
+                                        .to_string(),
+                                severity: DiagnosticSeverity::INFORMATION,
+                                is_primary: false,
+                                is_disk_based: true,
+                                group_id: 0,
+                                ..Default::default()
+                            },
+                        },
+                        DiagnosticEntry {
+                            range: 58..59,
+                            diagnostic: Diagnostic {
+                                message: "value moved here".to_string(),
+                                severity: DiagnosticSeverity::INFORMATION,
+                                is_primary: false,
+                                is_disk_based: true,
+                                group_id: 1,
+                                ..Default::default()
+                            },
+                        },
+                        DiagnosticEntry {
+                            range: 68..69,
+                            diagnostic: Diagnostic {
+                                message: "value moved here".to_string(),
+                                severity: DiagnosticSeverity::INFORMATION,
+                                is_primary: false,
+                                is_disk_based: true,
+                                group_id: 0,
+                                ..Default::default()
+                            },
+                        },
+                        DiagnosticEntry {
+                            range: 112..113,
+                            diagnostic: Diagnostic {
+                                message: "use of moved value".to_string(),
+                                severity: DiagnosticSeverity::ERROR,
+                                is_primary: true,
+                                is_disk_based: true,
+                                group_id: 0,
+                                ..Default::default()
+                            },
+                        },
+                        DiagnosticEntry {
+                            range: 112..113,
+                            diagnostic: Diagnostic {
+                                message: "value used here after move".to_string(),
+                                severity: DiagnosticSeverity::INFORMATION,
+                                is_primary: false,
+                                is_disk_based: true,
+                                group_id: 0,
+                                ..Default::default()
+                            },
+                        },
+                        DiagnosticEntry {
+                            range: 122..123,
+                            diagnostic: Diagnostic {
+                                message: "use of moved value".to_string(),
+                                severity: DiagnosticSeverity::ERROR,
+                                is_primary: true,
+                                is_disk_based: true,
+                                group_id: 1,
+                                ..Default::default()
+                            },
+                        },
+                        DiagnosticEntry {
+                            range: 122..123,
+                            diagnostic: Diagnostic {
+                                message: "value used here after move".to_string(),
+                                severity: DiagnosticSeverity::INFORMATION,
+                                is_primary: false,
+                                is_disk_based: true,
+                                group_id: 1,
+                                ..Default::default()
+                            },
+                        },
+                    ],
+                    cx,
+                )
+                .unwrap();
+        });
+
+        let view = cx.add_view(Default::default(), |cx| {
+            ProjectDiagnosticsEditor::new(project.clone(), settings, cx)
+        });
+
+        view.condition(&mut cx, |view, cx| view.text(cx).contains("fn main()"))
+            .await;
+
+        view.update(&mut cx, |view, cx| {
+            let editor = view.editor.update(cx, |editor, cx| editor.snapshot(cx));
+
+            assert_eq!(
+                editor.text(),
+                concat!(
+                    //
+                    // main.rs, diagnostic group 1
+                    //
+                    "\n", // primary message
+                    "\n", // filename
+                    "    let x = vec![];\n",
+                    "    let y = vec![];\n",
+                    "\n", // supporting diagnostic
+                    "    a(x);\n",
+                    "    b(y);\n",
+                    "\n", // supporting diagnostic
+                    "    // comment 1\n",
+                    "    // comment 2\n",
+                    "    c(y);\n",
+                    "\n", // supporting diagnostic
+                    "    d(x);\n",
+                    //
+                    // main.rs, diagnostic group 2
+                    //
+                    "\n", // primary message
+                    "\n", // filename
+                    "fn main() {\n",
+                    "    let x = vec![];\n",
+                    "\n", // supporting diagnostic
+                    "    let y = vec![];\n",
+                    "    a(x);\n",
+                    "\n", // supporting diagnostic
+                    "    b(y);\n",
+                    "\n", // context ellipsis
+                    "    c(y);\n",
+                    "    d(x);\n",
+                    "\n", // supporting diagnostic
+                    "}"
+                )
+            );
+        });
+
+        worktree.update(&mut cx, |worktree, cx| {
+            worktree
+                .update_diagnostics_from_provider(
+                    Arc::from("/test/a.rs".as_ref()),
+                    vec![
+                        DiagnosticEntry {
+                            range: 15..15,
+                            diagnostic: Diagnostic {
+                                message: "mismatched types".to_string(),
+                                severity: DiagnosticSeverity::ERROR,
+                                is_primary: true,
+                                is_disk_based: true,
+                                group_id: 0,
+                                ..Default::default()
+                            },
+                        },
+                        DiagnosticEntry {
+                            range: 15..15,
+                            diagnostic: Diagnostic {
+                                message: "expected `usize`, found `char`".to_string(),
+                                severity: DiagnosticSeverity::INFORMATION,
+                                is_primary: false,
+                                is_disk_based: true,
+                                group_id: 0,
+                                ..Default::default()
+                            },
+                        },
+                    ],
+                    cx,
+                )
+                .unwrap();
+        });
+
+        view.condition(&mut cx, |view, cx| view.text(cx).contains("const a"))
+            .await;
+
+        view.update(&mut cx, |view, cx| {
+            let editor = view.editor.update(cx, |editor, cx| editor.snapshot(cx));
+
+            assert_eq!(
+                editor.text(),
+                concat!(
+                    //
+                    // a.rs
+                    //
+                    "\n", // primary message
+                    "\n", // filename
+                    "const a: i32 = 'a';\n",
+                    "\n", // supporting diagnostic
+                    "\n", // context line
+                    //
+                    // main.rs, diagnostic group 1
+                    //
+                    "\n", // primary message
+                    "\n", // filename
+                    "    let x = vec![];\n",
+                    "    let y = vec![];\n",
+                    "\n", // supporting diagnostic
+                    "    a(x);\n",
+                    "    b(y);\n",
+                    "\n", // supporting diagnostic
+                    "    // comment 1\n",
+                    "    // comment 2\n",
+                    "    c(y);\n",
+                    "\n", // supporting diagnostic
+                    "    d(x);\n",
+                    //
+                    // main.rs, diagnostic group 2
+                    //
+                    "\n", // primary message
+                    "\n", // filename
+                    "fn main() {\n",
+                    "    let x = vec![];\n",
+                    "\n", // supporting diagnostic
+                    "    let y = vec![];\n",
+                    "    a(x);\n",
+                    "\n", // supporting diagnostic
+                    "    b(y);\n",
+                    "\n", // context ellipsis
+                    "    c(y);\n",
+                    "    d(x);\n",
+                    "\n", // supporting diagnostic
+                    "}"
+                )
+            );
+        });
+    }
+}

crates/editor/Cargo.toml 🔗

@@ -8,14 +8,17 @@ path = "src/editor.rs"
 
 [features]
 test-support = [
+    "rand",
     "text/test-support",
     "language/test-support",
     "gpui/test-support",
+    "util/test-support",
 ]
 
 [dependencies]
 text = { path = "../text" }
 clock = { path = "../clock" }
+collections = { path = "../collections" }
 gpui = { path = "../gpui" }
 language = { path = "../language" }
 project = { path = "../project" }
@@ -25,10 +28,12 @@ util = { path = "../util" }
 workspace = { path = "../workspace" }
 aho-corasick = "0.7"
 anyhow = "1.0"
+itertools = "0.10"
 lazy_static = "1.4"
 log = "0.4"
 parking_lot = "0.11"
 postage = { version = "0.4", features = ["futures-traits"] }
+rand = { version = "0.8.3", optional = true }
 serde = { version = "1", features = ["derive", "rc"] }
 smallvec = { version = "1.6", features = ["union"] }
 smol = "1.2"
@@ -37,6 +42,7 @@ smol = "1.2"
 text = { path = "../text", features = ["test-support"] }
 language = { path = "../language", features = ["test-support"] }
 gpui = { path = "../gpui", features = ["test-support"] }
+util = { path = "../util", features = ["test-support"] }
 ctor = "0.1"
 env_logger = "0.8"
 rand = "0.8"

crates/editor/src/display_map.rs 🔗

@@ -3,28 +3,29 @@ mod fold_map;
 mod tab_map;
 mod wrap_map;
 
-pub use block_map::{
-    AlignedBlock, BlockContext, BlockDisposition, BlockId, BlockProperties, BufferRows, Chunks,
-};
+use crate::{Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint};
 use block_map::{BlockMap, BlockPoint};
+use collections::{HashMap, HashSet};
 use fold_map::{FoldMap, ToFoldPoint as _};
-use gpui::{fonts::FontId, ElementBox, Entity, ModelContext, ModelHandle};
-use language::{Anchor, Buffer, Point, Subscription as BufferSubscription, ToOffset, ToPoint};
-use std::{
-    collections::{HashMap, HashSet},
-    ops::Range,
-};
+use gpui::{fonts::FontId, Entity, ModelContext, ModelHandle};
+use language::{Point, Subscription as BufferSubscription};
+use std::ops::Range;
 use sum_tree::Bias;
 use tab_map::TabMap;
 use theme::SyntaxTheme;
 use wrap_map::WrapMap;
 
+pub use block_map::{
+    AlignedBlock, BlockBufferRows as DisplayBufferRows, BlockChunks as DisplayChunks, BlockContext,
+    BlockDisposition, BlockId, BlockProperties, RenderBlock,
+};
+
 pub trait ToDisplayPoint {
-    fn to_display_point(&self, map: &DisplayMapSnapshot) -> DisplayPoint;
+    fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint;
 }
 
 pub struct DisplayMap {
-    buffer: ModelHandle<Buffer>,
+    buffer: ModelHandle<MultiBuffer>,
     buffer_subscription: BufferSubscription,
     fold_map: FoldMap,
     tab_map: TabMap,
@@ -38,7 +39,7 @@ impl Entity for DisplayMap {
 
 impl DisplayMap {
     pub fn new(
-        buffer: ModelHandle<Buffer>,
+        buffer: ModelHandle<MultiBuffer>,
         tab_size: usize,
         font_id: FontId,
         font_size: f32,
@@ -46,10 +47,10 @@ impl DisplayMap {
         cx: &mut ModelContext<Self>,
     ) -> Self {
         let buffer_subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
-        let (fold_map, snapshot) = FoldMap::new(buffer.read(cx).snapshot());
+        let (fold_map, snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx));
         let (tab_map, snapshot) = TabMap::new(snapshot, tab_size);
         let (wrap_map, snapshot) = WrapMap::new(snapshot, font_id, font_size, wrap_width, cx);
-        let block_map = BlockMap::new(buffer.clone(), snapshot);
+        let block_map = BlockMap::new(snapshot);
         cx.observe(&wrap_map, |_, _, cx| cx.notify()).detach();
         DisplayMap {
             buffer,
@@ -61,18 +62,18 @@ impl DisplayMap {
         }
     }
 
-    pub fn snapshot(&self, cx: &mut ModelContext<Self>) -> DisplayMapSnapshot {
-        let buffer_snapshot = self.buffer.read(cx).snapshot();
+    pub fn snapshot(&self, cx: &mut ModelContext<Self>) -> DisplaySnapshot {
+        let buffer_snapshot = self.buffer.read(cx).snapshot(cx);
         let edits = self.buffer_subscription.consume().into_inner();
         let (folds_snapshot, edits) = self.fold_map.read(buffer_snapshot, edits);
         let (tabs_snapshot, edits) = self.tab_map.sync(folds_snapshot.clone(), edits);
         let (wraps_snapshot, edits) = self
             .wrap_map
             .update(cx, |map, cx| map.sync(tabs_snapshot.clone(), edits, cx));
-        let blocks_snapshot = self.block_map.read(wraps_snapshot.clone(), edits, cx);
+        let blocks_snapshot = self.block_map.read(wraps_snapshot.clone(), edits);
 
-        DisplayMapSnapshot {
-            buffer_snapshot: self.buffer.read(cx).snapshot(),
+        DisplaySnapshot {
+            buffer_snapshot: self.buffer.read(cx).snapshot(cx),
             folds_snapshot,
             tabs_snapshot,
             wraps_snapshot,
@@ -85,20 +86,20 @@ impl DisplayMap {
         ranges: impl IntoIterator<Item = Range<T>>,
         cx: &mut ModelContext<Self>,
     ) {
-        let snapshot = self.buffer.read(cx).snapshot();
+        let snapshot = self.buffer.read(cx).snapshot(cx);
         let edits = self.buffer_subscription.consume().into_inner();
         let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
         let (snapshot, edits) = self.tab_map.sync(snapshot, edits);
         let (snapshot, edits) = self
             .wrap_map
             .update(cx, |map, cx| map.sync(snapshot, edits, cx));
-        self.block_map.read(snapshot, edits, cx);
+        self.block_map.read(snapshot, edits);
         let (snapshot, edits) = fold_map.fold(ranges);
         let (snapshot, edits) = self.tab_map.sync(snapshot, edits);
         let (snapshot, edits) = self
             .wrap_map
             .update(cx, |map, cx| map.sync(snapshot, edits, cx));
-        self.block_map.read(snapshot, edits, cx);
+        self.block_map.read(snapshot, edits);
     }
 
     pub fn unfold<T: ToOffset>(
@@ -106,20 +107,20 @@ impl DisplayMap {
         ranges: impl IntoIterator<Item = Range<T>>,
         cx: &mut ModelContext<Self>,
     ) {
-        let snapshot = self.buffer.read(cx).snapshot();
+        let snapshot = self.buffer.read(cx).snapshot(cx);
         let edits = self.buffer_subscription.consume().into_inner();
         let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
         let (snapshot, edits) = self.tab_map.sync(snapshot, edits);
         let (snapshot, edits) = self
             .wrap_map
             .update(cx, |map, cx| map.sync(snapshot, edits, cx));
-        self.block_map.read(snapshot, edits, cx);
+        self.block_map.read(snapshot, edits);
         let (snapshot, edits) = fold_map.unfold(ranges);
         let (snapshot, edits) = self.tab_map.sync(snapshot, edits);
         let (snapshot, edits) = self
             .wrap_map
             .update(cx, |map, cx| map.sync(snapshot, edits, cx));
-        self.block_map.read(snapshot, edits, cx);
+        self.block_map.read(snapshot, edits);
     }
 
     pub fn insert_blocks<P>(
@@ -130,34 +131,31 @@ impl DisplayMap {
     where
         P: ToOffset + Clone,
     {
-        let snapshot = self.buffer.read(cx).snapshot();
+        let snapshot = self.buffer.read(cx).snapshot(cx);
         let edits = self.buffer_subscription.consume().into_inner();
         let (snapshot, edits) = self.fold_map.read(snapshot, edits);
         let (snapshot, edits) = self.tab_map.sync(snapshot, edits);
         let (snapshot, edits) = self
             .wrap_map
             .update(cx, |map, cx| map.sync(snapshot, edits, cx));
-        let mut block_map = self.block_map.write(snapshot, edits, cx);
-        block_map.insert(blocks, cx)
+        let mut block_map = self.block_map.write(snapshot, edits);
+        block_map.insert(blocks)
     }
 
-    pub fn replace_blocks<F>(&mut self, styles: HashMap<BlockId, F>)
-    where
-        F: 'static + Fn(&BlockContext) -> ElementBox,
-    {
+    pub fn replace_blocks(&mut self, styles: HashMap<BlockId, RenderBlock>) {
         self.block_map.replace(styles);
     }
 
     pub fn remove_blocks(&mut self, ids: HashSet<BlockId>, cx: &mut ModelContext<Self>) {
-        let snapshot = self.buffer.read(cx).snapshot();
+        let snapshot = self.buffer.read(cx).snapshot(cx);
         let edits = self.buffer_subscription.consume().into_inner();
         let (snapshot, edits) = self.fold_map.read(snapshot, edits);
         let (snapshot, edits) = self.tab_map.sync(snapshot, edits);
         let (snapshot, edits) = self
             .wrap_map
             .update(cx, |map, cx| map.sync(snapshot, edits, cx));
-        let mut block_map = self.block_map.write(snapshot, edits, cx);
-        block_map.remove(ids, cx);
+        let mut block_map = self.block_map.write(snapshot, edits);
+        block_map.remove(ids);
     }
 
     pub fn set_font(&self, font_id: FontId, font_size: f32, cx: &mut ModelContext<Self>) {
@@ -176,15 +174,15 @@ impl DisplayMap {
     }
 }
 
-pub struct DisplayMapSnapshot {
-    pub buffer_snapshot: language::Snapshot,
-    folds_snapshot: fold_map::Snapshot,
-    tabs_snapshot: tab_map::Snapshot,
-    wraps_snapshot: wrap_map::Snapshot,
+pub struct DisplaySnapshot {
+    pub buffer_snapshot: MultiBufferSnapshot,
+    folds_snapshot: fold_map::FoldSnapshot,
+    tabs_snapshot: tab_map::TabSnapshot,
+    wraps_snapshot: wrap_map::WrapSnapshot,
     blocks_snapshot: block_map::BlockSnapshot,
 }
 
-impl DisplayMapSnapshot {
+impl DisplaySnapshot {
     #[cfg(test)]
     pub fn fold_count(&self) -> usize {
         self.folds_snapshot.fold_count()
@@ -194,56 +192,74 @@ impl DisplayMapSnapshot {
         self.buffer_snapshot.len() == 0
     }
 
-    pub fn buffer_rows<'a>(&'a self, start_row: u32) -> BufferRows<'a> {
+    pub fn buffer_rows<'a>(&'a self, start_row: u32) -> DisplayBufferRows<'a> {
         self.blocks_snapshot.buffer_rows(start_row)
     }
 
-    pub fn buffer_row_count(&self) -> u32 {
-        self.buffer_snapshot.max_point().row + 1
+    pub fn max_buffer_row(&self) -> u32 {
+        self.buffer_snapshot.max_buffer_row()
     }
 
-    pub fn prev_row_boundary(&self, mut display_point: DisplayPoint) -> (DisplayPoint, Point) {
+    pub fn prev_row_boundary(&self, input_display_point: DisplayPoint) -> (DisplayPoint, Point) {
+        let mut display_point = input_display_point;
         loop {
             *display_point.column_mut() = 0;
             let mut point = display_point.to_point(self);
+            point = self.buffer_snapshot.clip_point(point, Bias::Left);
             point.column = 0;
-            let next_display_point = self.point_to_display_point(point, Bias::Left);
+            let next_display_point = self.point_to_display_point_with_clipping(point, Bias::Left);
             if next_display_point == display_point {
                 return (display_point, point);
             }
+            if next_display_point > display_point {
+                panic!("invalid display point {:?}", input_display_point);
+            }
             display_point = next_display_point;
         }
     }
 
-    pub fn next_row_boundary(&self, mut display_point: DisplayPoint) -> (DisplayPoint, Point) {
+    pub fn next_row_boundary(&self, input_display_point: DisplayPoint) -> (DisplayPoint, Point) {
+        let mut display_point = input_display_point;
         loop {
             *display_point.column_mut() = self.line_len(display_point.row());
-            let mut point = display_point.to_point(self);
+            let mut point = self.display_point_to_point(display_point, Bias::Right);
+            point = self.buffer_snapshot.clip_point(point, Bias::Right);
             point.column = self.buffer_snapshot.line_len(point.row);
             let next_display_point = self.point_to_display_point(point, Bias::Right);
             if next_display_point == display_point {
                 return (display_point, point);
             }
+            if next_display_point < display_point {
+                panic!("invalid display point {:?}", input_display_point);
+            }
             display_point = next_display_point;
         }
     }
 
     fn point_to_display_point(&self, point: Point, bias: Bias) -> DisplayPoint {
-        DisplayPoint(
-            self.blocks_snapshot.to_block_point(
-                self.wraps_snapshot.from_tab_point(
-                    self.tabs_snapshot
-                        .to_tab_point(point.to_fold_point(&self.folds_snapshot, bias)),
-                ),
-            ),
-        )
+        let fold_point = point.to_fold_point(&self.folds_snapshot, bias);
+        let tab_point = self.tabs_snapshot.to_tab_point(fold_point);
+        let wrap_point = self.wraps_snapshot.from_tab_point(tab_point);
+        let block_point = self.blocks_snapshot.to_block_point(wrap_point);
+        DisplayPoint(block_point)
+    }
+
+    fn point_to_display_point_with_clipping(&self, point: Point, bias: Bias) -> DisplayPoint {
+        let fold_point = point.to_fold_point(&self.folds_snapshot, bias);
+        let tab_point = self.tabs_snapshot.to_tab_point(fold_point);
+        let wrap_point = self
+            .wraps_snapshot
+            .from_tab_point_with_clipping(tab_point, bias);
+        let block_point = self.blocks_snapshot.to_block_point(wrap_point);
+        DisplayPoint(block_point)
     }
 
     fn display_point_to_point(&self, point: DisplayPoint, bias: Bias) -> Point {
-        let unblocked_point = self.blocks_snapshot.to_wrap_point(point.0);
-        let unwrapped_point = self.wraps_snapshot.to_tab_point(unblocked_point);
-        let unexpanded_point = self.tabs_snapshot.to_fold_point(unwrapped_point, bias).0;
-        unexpanded_point.to_buffer_point(&self.folds_snapshot)
+        let block_point = point.0;
+        let wrap_point = self.blocks_snapshot.to_wrap_point(block_point);
+        let tab_point = self.wraps_snapshot.to_tab_point(wrap_point);
+        let fold_point = self.tabs_snapshot.to_fold_point(tab_point, bias).0;
+        fold_point.to_buffer_point(&self.folds_snapshot)
     }
 
     pub fn max_point(&self) -> DisplayPoint {
@@ -260,7 +276,7 @@ impl DisplayMapSnapshot {
         &'a self,
         display_rows: Range<u32>,
         theme: Option<&'a SyntaxTheme>,
-    ) -> block_map::Chunks<'a> {
+    ) -> DisplayChunks<'a> {
         self.blocks_snapshot.chunks(display_rows, theme)
     }
 
@@ -420,11 +436,11 @@ impl DisplayPoint {
         &mut self.0.column
     }
 
-    pub fn to_point(self, map: &DisplayMapSnapshot) -> Point {
+    pub fn to_point(self, map: &DisplaySnapshot) -> Point {
         map.display_point_to_point(self, Bias::Left)
     }
 
-    pub fn to_offset(self, map: &DisplayMapSnapshot, bias: Bias) -> usize {
+    pub fn to_offset(self, map: &DisplaySnapshot, bias: Bias) -> usize {
         let unblocked_point = map.blocks_snapshot.to_wrap_point(self.0);
         let unwrapped_point = map.wraps_snapshot.to_tab_point(unblocked_point);
         let unexpanded_point = map.tabs_snapshot.to_fold_point(unwrapped_point, bias).0;
@@ -433,19 +449,19 @@ impl DisplayPoint {
 }
 
 impl ToDisplayPoint for usize {
-    fn to_display_point(&self, map: &DisplayMapSnapshot) -> DisplayPoint {
+    fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint {
         map.point_to_display_point(self.to_point(&map.buffer_snapshot), Bias::Left)
     }
 }
 
 impl ToDisplayPoint for Point {
-    fn to_display_point(&self, map: &DisplayMapSnapshot) -> DisplayPoint {
+    fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint {
         map.point_to_display_point(*self, Bias::Left)
     }
 }
 
 impl ToDisplayPoint for Anchor {
-    fn to_display_point(&self, map: &DisplayMapSnapshot) -> DisplayPoint {
+    fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint {
         self.to_point(&map.buffer_snapshot).to_display_point(map)
     }
 }
@@ -454,15 +470,16 @@ impl ToDisplayPoint for Anchor {
 mod tests {
     use super::*;
     use crate::{movement, test::*};
-    use gpui::{color::Color, MutableAppContext};
-    use language::{Language, LanguageConfig, RandomCharIter, SelectionGoal};
-    use rand::{prelude::StdRng, Rng};
+    use gpui::{color::Color, elements::*, MutableAppContext};
+    use language::{Buffer, Language, LanguageConfig, RandomCharIter, SelectionGoal};
+    use rand::{prelude::*, Rng};
     use std::{env, sync::Arc};
     use theme::SyntaxTheme;
+    use util::test::sample_text;
     use Bias::*;
 
     #[gpui::test(iterations = 100)]
-    async fn test_random(mut cx: gpui::TestAppContext, mut rng: StdRng) {
+    async fn test_random_display_map(mut cx: gpui::TestAppContext, mut rng: StdRng) {
         cx.foreground().set_block_on_ticks(0..=50);
         cx.foreground().forbid_parking();
         let operations = env::var("OPERATIONS")
@@ -486,10 +503,14 @@ mod tests {
         log::info!("tab size: {}", tab_size);
         log::info!("wrap width: {:?}", wrap_width);
 
-        let buffer = cx.add_model(|cx| {
-            let len = rng.gen_range(0..10);
-            let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
-            Buffer::new(0, text, cx)
+        let buffer = cx.update(|cx| {
+            if rng.gen() {
+                let len = rng.gen_range(0..10);
+                let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
+                MultiBuffer::build_simple(&text, cx)
+            } else {
+                MultiBuffer::build_random(&mut rng, cx)
+            }
         });
 
         let map = cx.add_model(|cx| {
@@ -497,6 +518,15 @@ mod tests {
         });
         let (_observer, notifications) = Observer::new(&map, &mut cx);
         let mut fold_count = 0;
+        let mut blocks = Vec::new();
+
+        let snapshot = map.update(&mut cx, |map, cx| map.snapshot(cx));
+        log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
+        log::info!("fold text: {:?}", snapshot.folds_snapshot.text());
+        log::info!("tab text: {:?}", snapshot.tabs_snapshot.text());
+        log::info!("wrap text: {:?}", snapshot.wraps_snapshot.text());
+        log::info!("block text: {:?}", snapshot.blocks_snapshot.text());
+        log::info!("display text: {:?}", snapshot.text());
 
         for _i in 0..operations {
             match rng.gen_range(0..100) {
@@ -509,10 +539,55 @@ mod tests {
                     log::info!("setting wrap width to {:?}", wrap_width);
                     map.update(&mut cx, |map, cx| map.set_wrap_width(wrap_width, cx));
                 }
-                20..=80 => {
+                20..=44 => {
+                    map.update(&mut cx, |map, cx| {
+                        if rng.gen() || blocks.is_empty() {
+                            let buffer = map.snapshot(cx).buffer_snapshot;
+                            let block_properties = (0..rng.gen_range(1..=1))
+                                .map(|_| {
+                                    let position =
+                                        buffer.anchor_after(buffer.clip_offset(
+                                            rng.gen_range(0..=buffer.len()),
+                                            Bias::Left,
+                                        ));
+
+                                    let disposition = if rng.gen() {
+                                        BlockDisposition::Above
+                                    } else {
+                                        BlockDisposition::Below
+                                    };
+                                    let height = rng.gen_range(1..5);
+                                    log::info!(
+                                        "inserting block {:?} {:?} with height {}",
+                                        disposition,
+                                        position.to_point(&buffer),
+                                        height
+                                    );
+                                    BlockProperties {
+                                        position,
+                                        height,
+                                        disposition,
+                                        render: Arc::new(|_| Empty::new().boxed()),
+                                    }
+                                })
+                                .collect::<Vec<_>>();
+                            blocks.extend(map.insert_blocks(block_properties, cx));
+                        } else {
+                            blocks.shuffle(&mut rng);
+                            let remove_count = rng.gen_range(1..=4.min(blocks.len()));
+                            let block_ids_to_remove = (0..remove_count)
+                                .map(|_| blocks.remove(rng.gen_range(0..blocks.len())))
+                                .collect();
+                            log::info!("removing block ids {:?}", block_ids_to_remove);
+                            map.remove_blocks(block_ids_to_remove, cx);
+                        }
+                    });
+                }
+                45..=79 => {
                     let mut ranges = Vec::new();
                     for _ in 0..rng.gen_range(1..=3) {
-                        buffer.read_with(&cx, |buffer, _| {
+                        buffer.read_with(&cx, |buffer, cx| {
+                            let buffer = buffer.read(cx);
                             let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right);
                             let start = buffer.clip_offset(rng.gen_range(0..=end), Left);
                             ranges.push(start..end);
@@ -542,7 +617,11 @@ mod tests {
 
             let snapshot = map.update(&mut cx, |map, cx| map.snapshot(cx));
             fold_count = snapshot.fold_count();
-            log::info!("buffer text: {:?}", buffer.read_with(&cx, |b, _| b.text()));
+            log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
+            log::info!("fold text: {:?}", snapshot.folds_snapshot.text());
+            log::info!("tab text: {:?}", snapshot.tabs_snapshot.text());
+            log::info!("wrap text: {:?}", snapshot.wraps_snapshot.text());
+            log::info!("block text: {:?}", snapshot.blocks_snapshot.text());
             log::info!("display text: {:?}", snapshot.text());
 
             // Line boundaries
@@ -558,12 +637,11 @@ mod tests {
                 assert!(next_display_bound >= point);
                 assert_eq!(prev_buffer_bound.column, 0);
                 assert_eq!(prev_display_bound.column(), 0);
-                if next_display_bound < snapshot.max_point() {
+                if next_buffer_bound < snapshot.buffer_snapshot.max_point() {
                     assert_eq!(
-                        buffer
-                            .read_with(&cx, |buffer, _| buffer.chars_at(next_buffer_bound).next()),
+                        snapshot.buffer_snapshot.chars_at(next_buffer_bound).next(),
                         Some('\n')
-                    )
+                    );
                 }
 
                 assert_eq!(
@@ -597,6 +675,8 @@ mod tests {
             }
 
             // Movement
+            let min_point = snapshot.clip_point(DisplayPoint::new(0, 0), Left);
+            let max_point = snapshot.clip_point(snapshot.max_point(), Right);
             for _ in 0..5 {
                 let row = rng.gen_range(0..=snapshot.max_point().row());
                 let column = rng.gen_range(0..=snapshot.line_len(row));
@@ -606,7 +686,7 @@ mod tests {
 
                 let moved_right = movement::right(&snapshot, point).unwrap();
                 log::info!("Right {:?}", moved_right);
-                if point < snapshot.max_point() {
+                if point < max_point {
                     assert!(moved_right > point);
                     if point.column() == snapshot.line_len(point.row())
                         || snapshot.soft_wrap_indent(point.row()).is_some()
@@ -620,13 +700,13 @@ mod tests {
 
                 let moved_left = movement::left(&snapshot, point).unwrap();
                 log::info!("Left {:?}", moved_left);
-                if !point.is_zero() {
+                if point > min_point {
                     assert!(moved_left < point);
                     if point.column() == 0 {
                         assert!(moved_left.row() < point.row());
                     }
                 } else {
-                    assert!(moved_left.is_zero());
+                    assert_eq!(moved_left, point);
                 }
             }
         }
@@ -648,7 +728,7 @@ mod tests {
         let wrap_width = Some(64.);
 
         let text = "one two three four five\nsix seven eight";
-        let buffer = cx.add_model(|cx| Buffer::new(0, text.to_string(), cx));
+        let buffer = MultiBuffer::build_simple(text, cx);
         let map = cx.add_model(|cx| {
             DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, wrap_width, cx)
         });
@@ -697,8 +777,8 @@ mod tests {
             (DisplayPoint::new(2, 4), SelectionGoal::Column(10))
         );
 
+        let ix = snapshot.buffer_snapshot.text().find("seven").unwrap();
         buffer.update(cx, |buffer, cx| {
-            let ix = buffer.text().find("seven").unwrap();
             buffer.edit(vec![ix..ix], "and ", cx);
         });
 
@@ -720,8 +800,8 @@ mod tests {
 
     #[gpui::test]
     fn test_text_chunks(cx: &mut gpui::MutableAppContext) {
-        let text = sample_text(6, 6);
-        let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
+        let text = sample_text(6, 6, 'a');
+        let buffer = MultiBuffer::build_simple(&text, cx);
         let tab_size = 4;
         let family_id = cx.font_cache().load_family(&["Helvetica"]).unwrap();
         let font_id = cx
@@ -800,6 +880,7 @@ mod tests {
         let buffer =
             cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Some(lang), None, cx));
         buffer.condition(&cx, |buf, _| !buf.is_parsing()).await;
+        let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
 
         let tab_size = 2;
         let font_cache = cx.font_cache();
@@ -887,6 +968,7 @@ mod tests {
         let buffer =
             cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Some(lang), None, cx));
         buffer.condition(&cx, |buf, _| !buf.is_parsing()).await;
+        let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
 
         let font_cache = cx.font_cache();
 
@@ -932,7 +1014,7 @@ mod tests {
 
         let text = "\n'a', 'α',\t'✋',\t'❎', '🍐'\n";
         let display_text = "\n'a', 'α',   '✋',    '❎', '🍐'\n";
-        let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
+        let buffer = MultiBuffer::build_simple(text, cx);
 
         let tab_size = 4;
         let font_cache = cx.font_cache();
@@ -976,7 +1058,7 @@ mod tests {
     #[gpui::test]
     fn test_tabs_with_multibyte_chars(cx: &mut gpui::MutableAppContext) {
         let text = "✅\t\tα\nβ\t\n🏀β\t\tγ";
-        let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
+        let buffer = MultiBuffer::build_simple(text, cx);
         let tab_size = 4;
         let font_cache = cx.font_cache();
         let family_id = font_cache.load_family(&["Helvetica"]).unwrap();
@@ -1035,7 +1117,7 @@ mod tests {
 
     #[gpui::test]
     fn test_max_point(cx: &mut gpui::MutableAppContext) {
-        let buffer = cx.add_model(|cx| Buffer::new(0, "aaa\n\t\tbbb", cx));
+        let buffer = MultiBuffer::build_simple("aaa\n\t\tbbb", cx);
         let tab_size = 4;
         let font_cache = cx.font_cache();
         let family_id = font_cache.load_family(&["Helvetica"]).unwrap();

crates/editor/src/display_map/block_map.rs 🔗

@@ -1,10 +1,11 @@
-use super::wrap_map::{self, Edit as WrapEdit, Snapshot as WrapSnapshot, WrapPoint};
-use gpui::{AppContext, ElementBox, ModelHandle};
-use language::{Buffer, Chunk};
+use super::wrap_map::{self, WrapEdit, WrapPoint, WrapSnapshot};
+use crate::{Anchor, ToOffset, ToPoint as _};
+use collections::{HashMap, HashSet};
+use gpui::{AppContext, ElementBox};
+use language::Chunk;
 use parking_lot::Mutex;
 use std::{
     cmp::{self, Ordering},
-    collections::{HashMap, HashSet},
     fmt::Debug,
     ops::{Deref, Range},
     sync::{
@@ -12,14 +13,13 @@ use std::{
         Arc,
     },
 };
-use sum_tree::SumTree;
-use text::{Anchor, Bias, Edit, Point, ToOffset, ToPoint as _};
+use sum_tree::{Bias, SumTree};
+use text::{Edit, Point};
 use theme::SyntaxTheme;
 
 const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize];
 
 pub struct BlockMap {
-    buffer: ModelHandle<Buffer>,
     next_block_id: AtomicUsize,
     wrap_snapshot: Mutex<WrapSnapshot>,
     blocks: Vec<Arc<Block>>,
@@ -45,11 +45,13 @@ struct BlockRow(u32);
 #[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)]
 struct WrapRow(u32);
 
+pub type RenderBlock = Arc<dyn Fn(&BlockContext) -> ElementBox>;
+
 pub struct Block {
     id: BlockId,
     position: Anchor,
     height: u8,
-    render: Mutex<Arc<dyn Fn(&BlockContext) -> ElementBox>>,
+    render: Mutex<RenderBlock>,
     disposition: BlockDisposition,
 }
 
@@ -93,25 +95,24 @@ struct TransformSummary {
     output_rows: u32,
 }
 
-pub struct Chunks<'a> {
+pub struct BlockChunks<'a> {
     transforms: sum_tree::Cursor<'a, Transform, (BlockRow, WrapRow)>,
-    input_chunks: wrap_map::Chunks<'a>,
+    input_chunks: wrap_map::WrapChunks<'a>,
     input_chunk: Chunk<'a>,
     output_row: u32,
     max_output_row: u32,
 }
 
-pub struct BufferRows<'a> {
+pub struct BlockBufferRows<'a> {
     transforms: sum_tree::Cursor<'a, Transform, (BlockRow, WrapRow)>,
-    input_buffer_rows: wrap_map::BufferRows<'a>,
+    input_buffer_rows: wrap_map::WrapBufferRows<'a>,
     output_row: u32,
     started: bool,
 }
 
 impl BlockMap {
-    pub fn new(buffer: ModelHandle<Buffer>, wrap_snapshot: WrapSnapshot) -> Self {
+    pub fn new(wrap_snapshot: WrapSnapshot) -> Self {
         Self {
-            buffer,
             next_block_id: AtomicUsize::new(0),
             blocks: Vec::new(),
             transforms: Mutex::new(SumTree::from_item(
@@ -122,13 +123,8 @@ impl BlockMap {
         }
     }
 
-    pub fn read(
-        &self,
-        wrap_snapshot: WrapSnapshot,
-        edits: Vec<WrapEdit>,
-        cx: &AppContext,
-    ) -> BlockSnapshot {
-        self.sync(&wrap_snapshot, edits, cx);
+    pub fn read(&self, wrap_snapshot: WrapSnapshot, edits: Vec<WrapEdit>) -> BlockSnapshot {
+        self.sync(&wrap_snapshot, edits);
         *self.wrap_snapshot.lock() = wrap_snapshot.clone();
         BlockSnapshot {
             wrap_snapshot,
@@ -136,23 +132,18 @@ impl BlockMap {
         }
     }
 
-    pub fn write(
-        &mut self,
-        wrap_snapshot: WrapSnapshot,
-        edits: Vec<WrapEdit>,
-        cx: &AppContext,
-    ) -> BlockMapWriter {
-        self.sync(&wrap_snapshot, edits, cx);
+    pub fn write(&mut self, wrap_snapshot: WrapSnapshot, edits: Vec<WrapEdit>) -> BlockMapWriter {
+        self.sync(&wrap_snapshot, edits);
         *self.wrap_snapshot.lock() = wrap_snapshot;
         BlockMapWriter(self)
     }
 
-    fn sync(&self, wrap_snapshot: &WrapSnapshot, edits: Vec<WrapEdit>, cx: &AppContext) {
+    fn sync(&self, wrap_snapshot: &WrapSnapshot, edits: Vec<WrapEdit>) {
         if edits.is_empty() {
             return;
         }
 
-        let buffer = self.buffer.read(cx);
+        let buffer = wrap_snapshot.buffer_snapshot();
         let mut transforms = self.transforms.lock();
         let mut new_transforms = SumTree::new();
         let old_row_count = transforms.summary().input_rows;
@@ -236,26 +227,29 @@ impl BlockMap {
             }
 
             // Find the blocks within this edited region.
-            let new_start = wrap_snapshot.to_point(WrapPoint::new(new_start.0, 0), Bias::Left);
-            let start_anchor = buffer.anchor_before(new_start);
+            let new_buffer_start =
+                wrap_snapshot.to_point(WrapPoint::new(new_start.0, 0), Bias::Left);
+            let start_anchor = buffer.anchor_before(new_buffer_start);
             let start_block_ix = match self.blocks[last_block_ix..].binary_search_by(|probe| {
                 probe
                     .position
-                    .cmp(&start_anchor, buffer)
+                    .cmp(&start_anchor, &buffer)
                     .unwrap()
                     .then(Ordering::Greater)
             }) {
                 Ok(ix) | Err(ix) => last_block_ix + ix,
             };
+
             let end_block_ix = if new_end.0 > wrap_snapshot.max_point().row() {
                 self.blocks.len()
             } else {
-                let new_end = wrap_snapshot.to_point(WrapPoint::new(new_end.0, 0), Bias::Left);
-                let end_anchor = buffer.anchor_before(new_end);
+                let new_buffer_end =
+                    wrap_snapshot.to_point(WrapPoint::new(new_end.0, 0), Bias::Left);
+                let end_anchor = buffer.anchor_before(new_buffer_end);
                 match self.blocks[start_block_ix..].binary_search_by(|probe| {
                     probe
                         .position
-                        .cmp(&end_anchor, buffer)
+                        .cmp(&end_anchor, &buffer)
                         .unwrap()
                         .then(Ordering::Greater)
                 }) {
@@ -263,12 +257,13 @@ impl BlockMap {
                 }
             };
             last_block_ix = end_block_ix;
-            blocks_in_edit.clear();
+
+            debug_assert!(blocks_in_edit.is_empty());
             blocks_in_edit.extend(
                 self.blocks[start_block_ix..end_block_ix]
                     .iter()
                     .map(|block| {
-                        let mut position = block.position.to_point(buffer);
+                        let mut position = block.position.to_point(&buffer);
                         let column = wrap_snapshot.from_point(position, Bias::Left).column();
                         match block.disposition {
                             BlockDisposition::Above => position.column = 0,
@@ -277,22 +272,21 @@ impl BlockMap {
                             }
                         }
                         let position = wrap_snapshot.from_point(position, Bias::Left);
-                        (position.row(), column, block)
+                        (position.row(), column, block.clone())
                     }),
             );
-            blocks_in_edit
-                .sort_unstable_by_key(|(row, _, block)| (*row, block.disposition, block.id));
+            blocks_in_edit.sort_by_key(|(row, _, block)| (*row, block.disposition, block.id));
 
             // For each of these blocks, insert a new isomorphic transform preceding the block,
             // and then insert the block itself.
-            for (block_row, column, block) in blocks_in_edit.iter().copied() {
+            for (block_row, column, block) in blocks_in_edit.drain(..) {
                 let insertion_row = match block.disposition {
                     BlockDisposition::Above => block_row,
                     BlockDisposition::Below => block_row + 1,
                 };
                 let extent_before_block = insertion_row - new_transforms.summary().input_rows;
                 push_isomorphic(&mut new_transforms, extent_before_block);
-                new_transforms.push(Transform::block(block.clone(), column), &());
+                new_transforms.push(Transform::block(block, column), &());
             }
 
             old_end = WrapRow(old_end.0.min(old_row_count));
@@ -317,13 +311,10 @@ impl BlockMap {
         *transforms = new_transforms;
     }
 
-    pub fn replace<F>(&mut self, mut element_builders: HashMap<BlockId, F>)
-    where
-        F: 'static + Fn(&BlockContext) -> ElementBox,
-    {
+    pub fn replace(&mut self, mut renderers: HashMap<BlockId, RenderBlock>) {
         for block in &self.blocks {
-            if let Some(build_element) = element_builders.remove(&block.id) {
-                *block.render.lock() = Arc::new(build_element);
+            if let Some(render) = renderers.remove(&block.id) {
+                *block.render.lock() = render;
             }
         }
     }
@@ -374,37 +365,33 @@ impl<'a> BlockMapWriter<'a> {
     pub fn insert<P>(
         &mut self,
         blocks: impl IntoIterator<Item = BlockProperties<P>>,
-        cx: &AppContext,
     ) -> Vec<BlockId>
     where
         P: ToOffset + Clone,
     {
-        let buffer = self.0.buffer.read(cx);
         let mut ids = Vec::new();
         let mut edits = Vec::<Edit<u32>>::new();
         let wrap_snapshot = &*self.0.wrap_snapshot.lock();
+        let buffer = wrap_snapshot.buffer_snapshot();
 
         for block in blocks {
             let id = BlockId(self.0.next_block_id.fetch_add(1, SeqCst));
             ids.push(id);
 
             let position = buffer.anchor_after(block.position);
-            let point = position.to_point(buffer);
-            let start_row = wrap_snapshot
+            let point = position.to_point(&buffer);
+            let wrap_row = wrap_snapshot
                 .from_point(Point::new(point.row, 0), Bias::Left)
                 .row();
-            let end_row = if point.row == buffer.max_point().row {
-                wrap_snapshot.max_point().row() + 1
-            } else {
-                wrap_snapshot
-                    .from_point(Point::new(point.row + 1, 0), Bias::Left)
-                    .row()
-            };
+            let start_row = wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0));
+            let end_row = wrap_snapshot
+                .next_row_boundary(WrapPoint::new(wrap_row, 0))
+                .unwrap_or(wrap_snapshot.max_point().row() + 1);
 
             let block_ix = match self
                 .0
                 .blocks
-                .binary_search_by(|probe| probe.position.cmp(&position, buffer).unwrap())
+                .binary_search_by(|probe| probe.position.cmp(&position, &buffer).unwrap())
             {
                 Ok(ix) | Err(ix) => ix,
             };
@@ -430,30 +417,27 @@ impl<'a> BlockMapWriter<'a> {
             }
         }
 
-        self.0.sync(wrap_snapshot, edits, cx);
+        self.0.sync(wrap_snapshot, edits);
         ids
     }
 
-    pub fn remove(&mut self, block_ids: HashSet<BlockId>, cx: &AppContext) {
-        let buffer = self.0.buffer.read(cx);
+    pub fn remove(&mut self, block_ids: HashSet<BlockId>) {
         let wrap_snapshot = &*self.0.wrap_snapshot.lock();
+        let buffer = wrap_snapshot.buffer_snapshot();
         let mut edits = Vec::new();
         let mut last_block_buffer_row = None;
         self.0.blocks.retain(|block| {
             if block_ids.contains(&block.id) {
-                let buffer_row = block.position.to_point(buffer).row;
+                let buffer_row = block.position.to_point(&buffer).row;
                 if last_block_buffer_row != Some(buffer_row) {
                     last_block_buffer_row = Some(buffer_row);
-                    let start_row = wrap_snapshot
+                    let wrap_row = wrap_snapshot
                         .from_point(Point::new(buffer_row, 0), Bias::Left)
                         .row();
+                    let start_row = wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0));
                     let end_row = wrap_snapshot
-                        .from_point(
-                            Point::new(buffer_row, buffer.line_len(buffer_row)),
-                            Bias::Left,
-                        )
-                        .row()
-                        + 1;
+                        .next_row_boundary(WrapPoint::new(wrap_row, 0))
+                        .unwrap_or(wrap_snapshot.max_point().row() + 1);
                     edits.push(Edit {
                         old: start_row..end_row,
                         new: start_row..end_row,
@@ -464,19 +448,23 @@ impl<'a> BlockMapWriter<'a> {
                 true
             }
         });
-        self.0.sync(wrap_snapshot, edits, cx);
+        self.0.sync(wrap_snapshot, edits);
     }
 }
 
 impl BlockSnapshot {
     #[cfg(test)]
-    fn text(&mut self) -> String {
+    pub fn text(&self) -> String {
         self.chunks(0..self.transforms.summary().output_rows, None)
             .map(|chunk| chunk.text)
             .collect()
     }
 
-    pub fn chunks<'a>(&'a self, rows: Range<u32>, theme: Option<&'a SyntaxTheme>) -> Chunks<'a> {
+    pub fn chunks<'a>(
+        &'a self,
+        rows: Range<u32>,
+        theme: Option<&'a SyntaxTheme>,
+    ) -> BlockChunks<'a> {
         let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows);
         let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>();
         let input_end = {
@@ -503,7 +491,7 @@ impl BlockSnapshot {
             };
             cursor.start().1 .0 + overshoot
         };
-        Chunks {
+        BlockChunks {
             input_chunks: self.wrap_snapshot.chunks(input_start..input_end, theme),
             input_chunk: Default::default(),
             transforms: cursor,
@@ -512,7 +500,7 @@ impl BlockSnapshot {
         }
     }
 
-    pub fn buffer_rows<'a>(&'a self, start_row: u32) -> BufferRows<'a> {
+    pub fn buffer_rows<'a>(&'a self, start_row: u32) -> BlockBufferRows<'a> {
         let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>();
         cursor.seek(&BlockRow(start_row), Bias::Right, &());
         let (output_start, input_start) = cursor.start();
@@ -522,7 +510,7 @@ impl BlockSnapshot {
             0
         };
         let input_start_row = input_start.0 + overshoot;
-        BufferRows {
+        BlockBufferRows {
             transforms: cursor,
             input_buffer_rows: self.wrap_snapshot.buffer_rows(input_start_row),
             output_row: start_row,
@@ -590,41 +578,45 @@ impl BlockSnapshot {
         cursor.seek(&BlockRow(point.row), Bias::Right, &());
 
         let max_input_row = WrapRow(self.transforms.summary().input_rows);
-        let search_left =
+        let mut search_left =
             (bias == Bias::Left && cursor.start().1 .0 > 0) || cursor.end(&()).1 == max_input_row;
+        let mut reversed = false;
 
         loop {
             if let Some(transform) = cursor.item() {
                 if transform.is_isomorphic() {
                     let (output_start_row, input_start_row) = cursor.start();
                     let (output_end_row, input_end_row) = cursor.end(&());
-
-                    if point.row >= output_end_row.0 {
-                        return BlockPoint::new(
-                            output_end_row.0 - 1,
-                            self.wrap_snapshot.line_len(input_end_row.0 - 1),
-                        );
-                    }
-
                     let output_start = Point::new(output_start_row.0, 0);
-                    if point.0 > output_start {
-                        let output_overshoot = point.0 - output_start;
-                        let input_start = Point::new(input_start_row.0, 0);
-                        let input_point = self
-                            .wrap_snapshot
-                            .clip_point(WrapPoint(input_start + output_overshoot), bias);
-                        let input_overshoot = input_point.0 - input_start;
-                        return BlockPoint(output_start + input_overshoot);
+                    let input_start = Point::new(input_start_row.0, 0);
+                    let input_end = Point::new(input_end_row.0, 0);
+                    let input_point = if point.row >= output_end_row.0 {
+                        let line_len = self.wrap_snapshot.line_len(input_end_row.0 - 1);
+                        self.wrap_snapshot
+                            .clip_point(WrapPoint::new(input_end_row.0 - 1, line_len), bias)
                     } else {
-                        return BlockPoint(output_start);
+                        let output_overshoot = point.0.saturating_sub(output_start);
+                        self.wrap_snapshot
+                            .clip_point(WrapPoint(input_start + output_overshoot), bias)
+                    };
+
+                    if (input_start..input_end).contains(&input_point.0) {
+                        let input_overshoot = input_point.0.saturating_sub(input_start);
+                        return BlockPoint(output_start + input_overshoot);
                     }
-                } else if search_left {
+                }
+
+                if search_left {
                     cursor.prev(&());
                 } else {
                     cursor.next(&());
                 }
-            } else {
+            } else if reversed {
                 return self.max_point();
+            } else {
+                reversed = true;
+                search_left = !search_left;
+                cursor.seek(&BlockRow(point.row), Bias::Right, &());
             }
         }
     }
@@ -693,7 +685,7 @@ impl Transform {
     }
 }
 
-impl<'a> Iterator for Chunks<'a> {
+impl<'a> Iterator for BlockChunks<'a> {
     type Item = Chunk<'a>;
 
     fn next(&mut self) -> Option<Self::Item> {
@@ -756,7 +748,7 @@ impl<'a> Iterator for Chunks<'a> {
     }
 }
 
-impl<'a> Iterator for BufferRows<'a> {
+impl<'a> Iterator for BlockBufferRows<'a> {
     type Item = Option<u32>;
 
     fn next(&mut self) -> Option<Self::Item> {
@@ -840,6 +832,14 @@ impl Deref for AlignedBlock {
     }
 }
 
+impl<'a> Deref for BlockContext<'a> {
+    type Target = AppContext;
+
+    fn deref(&self) -> &Self::Target {
+        &self.cx
+    }
+}
+
 impl Debug for Block {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         f.debug_struct("Block")
@@ -872,8 +872,8 @@ fn offset_for_row(s: &str, target: u32) -> (u32, usize) {
 mod tests {
     use super::*;
     use crate::display_map::{fold_map::FoldMap, tab_map::TabMap, wrap_map::WrapMap};
+    use crate::multi_buffer::MultiBuffer;
     use gpui::{elements::Empty, Element};
-    use language::Buffer;
     use rand::prelude::*;
     use std::env;
     use text::RandomCharIter;
@@ -902,38 +902,36 @@ mod tests {
 
         let text = "aaa\nbbb\nccc\nddd";
 
-        let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
-        let (fold_map, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot());
+        let buffer = MultiBuffer::build_simple(text, cx);
+        let subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
+        let (fold_map, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx));
         let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1);
         let (wrap_map, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, None, cx);
-        let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot.clone());
-
-        let mut writer = block_map.write(wraps_snapshot.clone(), vec![], cx);
-        writer.insert(
-            vec![
-                BlockProperties {
-                    position: Point::new(1, 0),
-                    height: 1,
-                    disposition: BlockDisposition::Above,
-                    render: Arc::new(|_| Empty::new().named("block 1")),
-                },
-                BlockProperties {
-                    position: Point::new(1, 2),
-                    height: 2,
-                    disposition: BlockDisposition::Above,
-                    render: Arc::new(|_| Empty::new().named("block 2")),
-                },
-                BlockProperties {
-                    position: Point::new(3, 3),
-                    height: 3,
-                    disposition: BlockDisposition::Below,
-                    render: Arc::new(|_| Empty::new().named("block 3")),
-                },
-            ],
-            cx,
-        );
+        let mut block_map = BlockMap::new(wraps_snapshot.clone());
+
+        let mut writer = block_map.write(wraps_snapshot.clone(), vec![]);
+        writer.insert(vec![
+            BlockProperties {
+                position: Point::new(1, 0),
+                height: 1,
+                disposition: BlockDisposition::Above,
+                render: Arc::new(|_| Empty::new().named("block 1")),
+            },
+            BlockProperties {
+                position: Point::new(1, 2),
+                height: 2,
+                disposition: BlockDisposition::Above,
+                render: Arc::new(|_| Empty::new().named("block 2")),
+            },
+            BlockProperties {
+                position: Point::new(3, 3),
+                height: 3,
+                disposition: BlockDisposition::Below,
+                render: Arc::new(|_| Empty::new().named("block 3")),
+            },
+        ]);
 
-        let mut snapshot = block_map.read(wraps_snapshot, vec![], cx);
+        let snapshot = block_map.read(wraps_snapshot, vec![]);
         assert_eq!(snapshot.text(), "aaa\n\n\n\nbbb\nccc\nddd\n\n\n");
 
         let blocks = snapshot
@@ -1046,20 +1044,19 @@ mod tests {
             ]
         );
 
-        // Insert a line break, separating two block decorations into separate
-        // lines.
-        let (buffer_snapshot, buffer_edits) = buffer.update(cx, |buffer, cx| {
-            let v0 = buffer.version();
+        // Insert a line break, separating two block decorations into separate lines.
+        let buffer_snapshot = buffer.update(cx, |buffer, cx| {
             buffer.edit([Point::new(1, 1)..Point::new(1, 1)], "!!!\n", cx);
-            (buffer.snapshot(), buffer.edits_since(&v0).collect())
+            buffer.snapshot(cx)
         });
 
-        let (folds_snapshot, fold_edits) = fold_map.read(buffer_snapshot, buffer_edits);
+        let (folds_snapshot, fold_edits) =
+            fold_map.read(buffer_snapshot, subscription.consume().into_inner());
         let (tabs_snapshot, tab_edits) = tab_map.sync(folds_snapshot, fold_edits);
         let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
             wrap_map.sync(tabs_snapshot, tab_edits, cx)
         });
-        let mut snapshot = block_map.read(wraps_snapshot, wrap_edits, cx);
+        let snapshot = block_map.read(wraps_snapshot, wrap_edits);
         assert_eq!(snapshot.text(), "aaa\n\nb!!!\n\n\nbb\nccc\nddd\n\n\n");
     }
 
@@ -1073,34 +1070,31 @@ mod tests {
 
         let text = "one two three\nfour five six\nseven eight";
 
-        let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
-        let (_, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot());
+        let buffer = MultiBuffer::build_simple(text, cx);
+        let (_, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx));
         let (_, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1);
         let (_, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, Some(60.), cx);
-        let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot.clone());
-
-        let mut writer = block_map.write(wraps_snapshot.clone(), vec![], cx);
-        writer.insert(
-            vec![
-                BlockProperties {
-                    position: Point::new(1, 12),
-                    disposition: BlockDisposition::Above,
-                    render: Arc::new(|_| Empty::new().named("block 1")),
-                    height: 1,
-                },
-                BlockProperties {
-                    position: Point::new(1, 1),
-                    disposition: BlockDisposition::Below,
-                    render: Arc::new(|_| Empty::new().named("block 2")),
-                    height: 1,
-                },
-            ],
-            cx,
-        );
+        let mut block_map = BlockMap::new(wraps_snapshot.clone());
+
+        let mut writer = block_map.write(wraps_snapshot.clone(), vec![]);
+        writer.insert(vec![
+            BlockProperties {
+                position: Point::new(1, 12),
+                disposition: BlockDisposition::Above,
+                render: Arc::new(|_| Empty::new().named("block 1")),
+                height: 1,
+            },
+            BlockProperties {
+                position: Point::new(1, 1),
+                disposition: BlockDisposition::Below,
+                render: Arc::new(|_| Empty::new().named("block 2")),
+                height: 1,
+            },
+        ]);
 
         // Blocks with an 'above' disposition go above their corresponding buffer line.
         // Blocks with a 'below' disposition go below their corresponding buffer line.
-        let mut snapshot = block_map.read(wraps_snapshot, vec![], cx);
+        let snapshot = block_map.read(wraps_snapshot, vec![]);
         assert_eq!(
             snapshot.text(),
             "one two \nthree\n\nfour five \nsix\n\nseven \neight"
@@ -1128,18 +1122,21 @@ mod tests {
 
         log::info!("Wrap width: {:?}", wrap_width);
 
-        let buffer = cx.add_model(|cx| {
+        let buffer = if rng.gen() {
             let len = rng.gen_range(0..10);
             let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
             log::info!("initial buffer text: {:?}", text);
-            Buffer::new(0, text, cx)
-        });
-        let mut buffer_snapshot = buffer.read(cx).snapshot();
+            MultiBuffer::build_simple(&text, cx)
+        } else {
+            MultiBuffer::build_random(&mut rng, cx)
+        };
+
+        let mut buffer_snapshot = buffer.read(cx).snapshot(cx);
         let (fold_map, folds_snapshot) = FoldMap::new(buffer_snapshot.clone());
         let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size);
         let (wrap_map, wraps_snapshot) =
             WrapMap::new(tabs_snapshot, font_id, font_size, wrap_width, cx);
-        let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot);
+        let mut block_map = BlockMap::new(wraps_snapshot);
         let mut expected_blocks = Vec::new();
 
         for _ in 0..operations {
@@ -1155,10 +1152,10 @@ mod tests {
                     wrap_map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx));
                 }
                 20..=39 => {
-                    let block_count = rng.gen_range(1..=1);
+                    let block_count = rng.gen_range(1..=5);
                     let block_properties = (0..block_count)
                         .map(|_| {
-                            let buffer = buffer.read(cx);
+                            let buffer = buffer.read(cx).read(cx);
                             let position = buffer.anchor_after(
                                 buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Left),
                             );
@@ -1172,7 +1169,7 @@ mod tests {
                             log::info!(
                                 "inserting block {:?} {:?} with height {}",
                                 disposition,
-                                position.to_point(buffer),
+                                position.to_point(&buffer),
                                 height
                             );
                             BlockProperties {
@@ -1190,8 +1187,8 @@ mod tests {
                     let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
                         wrap_map.sync(tabs_snapshot, tab_edits, cx)
                     });
-                    let mut block_map = block_map.write(wraps_snapshot, wrap_edits, cx);
-                    let block_ids = block_map.insert(block_properties.clone(), cx);
+                    let mut block_map = block_map.write(wraps_snapshot, wrap_edits);
+                    let block_ids = block_map.insert(block_properties.clone());
                     for (block_id, props) in block_ids.into_iter().zip(block_properties) {
                         expected_blocks.push((block_id, props));
                     }
@@ -1212,17 +1209,17 @@ mod tests {
                     let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
                         wrap_map.sync(tabs_snapshot, tab_edits, cx)
                     });
-                    let mut block_map = block_map.write(wraps_snapshot, wrap_edits, cx);
-                    block_map.remove(block_ids_to_remove, cx);
+                    let mut block_map = block_map.write(wraps_snapshot, wrap_edits);
+                    block_map.remove(block_ids_to_remove);
                 }
                 _ => {
                     buffer.update(cx, |buffer, cx| {
-                        let v0 = buffer.version();
                         let edit_count = rng.gen_range(1..=5);
+                        let subscription = buffer.subscribe();
                         buffer.randomly_edit(&mut rng, edit_count, cx);
-                        log::info!("buffer text: {:?}", buffer.text());
-                        buffer_edits.extend(buffer.edits_since(&v0));
-                        buffer_snapshot = buffer.snapshot();
+                        buffer_snapshot = buffer.snapshot(cx);
+                        buffer_edits.extend(subscription.consume());
+                        log::info!("buffer text: {:?}", buffer_snapshot.text());
                     });
                 }
             }
@@ -1232,26 +1229,25 @@ mod tests {
             let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
                 wrap_map.sync(tabs_snapshot, tab_edits, cx)
             });
-            let mut blocks_snapshot = block_map.read(wraps_snapshot.clone(), wrap_edits, cx);
+            let blocks_snapshot = block_map.read(wraps_snapshot.clone(), wrap_edits);
             assert_eq!(
                 blocks_snapshot.transforms.summary().input_rows,
                 wraps_snapshot.max_point().row() + 1
             );
             log::info!("blocks text: {:?}", blocks_snapshot.text());
 
-            let buffer = buffer.read(cx);
             let mut sorted_blocks = expected_blocks
                 .iter()
                 .cloned()
                 .map(|(id, block)| {
-                    let mut position = block.position.to_point(buffer);
+                    let mut position = block.position.to_point(&buffer_snapshot);
                     let column = wraps_snapshot.from_point(position, Bias::Left).column();
                     match block.disposition {
                         BlockDisposition::Above => {
                             position.column = 0;
                         }
                         BlockDisposition::Below => {
-                            position.column = buffer.line_len(position.row);
+                            position.column = buffer_snapshot.line_len(position.row);
                         }
                     };
                     let row = wraps_snapshot.from_point(position, Bias::Left).row();
@@ -1270,6 +1266,7 @@ mod tests {
                 .sort_unstable_by_key(|(id, block)| (block.position.row, block.disposition, *id));
             let mut sorted_blocks = sorted_blocks.into_iter().peekable();
 
+            let input_buffer_rows = buffer_snapshot.buffer_rows(0).collect::<Vec<_>>();
             let mut expected_buffer_rows = Vec::new();
             let mut expected_text = String::new();
             let input_text = wraps_snapshot.text();
@@ -1279,9 +1276,9 @@ mod tests {
                     expected_text.push('\n');
                 }
 
-                let buffer_row = wraps_snapshot
+                let buffer_row = input_buffer_rows[wraps_snapshot
                     .to_point(WrapPoint::new(row, 0), Bias::Left)
-                    .row;
+                    .row as usize];
 
                 while let Some((_, block)) = sorted_blocks.peek() {
                     if block.position.row == row && block.disposition == BlockDisposition::Above {
@@ -1297,7 +1294,7 @@ mod tests {
                 }
 
                 let soft_wrapped = wraps_snapshot.to_tab_point(WrapPoint::new(row, 0)).column() > 0;
-                expected_buffer_rows.push(if soft_wrapped { None } else { Some(buffer_row) });
+                expected_buffer_rows.push(if soft_wrapped { None } else { buffer_row });
                 expected_text.push_str(input_line);
 
                 while let Some((_, block)) = sorted_blocks.peek() {
@@ -1377,16 +1374,30 @@ mod tests {
             let mut block_point = BlockPoint::new(0, 0);
             for c in expected_text.chars() {
                 let left_point = blocks_snapshot.clip_point(block_point, Bias::Left);
-                let right_point = blocks_snapshot.clip_point(block_point, Bias::Right);
-
+                let left_buffer_point = blocks_snapshot.to_point(left_point, Bias::Left);
                 assert_eq!(
                     blocks_snapshot.to_block_point(blocks_snapshot.to_wrap_point(left_point)),
                     left_point
                 );
+                assert_eq!(
+                    left_buffer_point,
+                    buffer_snapshot.clip_point(left_buffer_point, Bias::Right),
+                    "{:?} is not valid in buffer coordinates",
+                    left_point
+                );
+
+                let right_point = blocks_snapshot.clip_point(block_point, Bias::Right);
+                let right_buffer_point = blocks_snapshot.to_point(right_point, Bias::Right);
                 assert_eq!(
                     blocks_snapshot.to_block_point(blocks_snapshot.to_wrap_point(right_point)),
                     right_point
                 );
+                assert_eq!(
+                    right_buffer_point,
+                    buffer_snapshot.clip_point(right_buffer_point, Bias::Left),
+                    "{:?} is not valid in buffer coordinates",
+                    right_point
+                );
 
                 if c == '\n' {
                     block_point.0 += Point::new(1, 0);
@@ -1396,4 +1407,10 @@ mod tests {
             }
         }
     }
+
+    impl BlockSnapshot {
+        fn to_point(&self, point: BlockPoint, bias: Bias) -> Point {
+            self.wrap_snapshot.to_point(self.to_wrap_point(point), bias)
+        }
+    }
 }

crates/editor/src/display_map/fold_map.rs 🔗

@@ -1,19 +1,20 @@
-use language::{
-    Anchor, AnchorRangeExt, Chunk, Edit, Point, PointUtf16, Snapshot as BufferSnapshot,
-    TextSummary, ToOffset,
+use crate::{
+    multi_buffer::MultiBufferRows, Anchor, AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot,
+    ToOffset,
 };
+use language::{Chunk, Edit, Point, PointUtf16, TextSummary};
 use parking_lot::Mutex;
 use std::{
     cmp::{self, Ordering},
     iter,
-    ops::Range,
+    ops::{Range, Sub},
     sync::atomic::{AtomicUsize, Ordering::SeqCst},
 };
 use sum_tree::{Bias, Cursor, FilterCursor, SumTree};
 use theme::SyntaxTheme;
 
 pub trait ToFoldPoint {
-    fn to_fold_point(&self, snapshot: &Snapshot, bias: Bias) -> FoldPoint;
+    fn to_fold_point(&self, snapshot: &FoldSnapshot, bias: Bias) -> FoldPoint;
 }
 
 #[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)]
@@ -41,23 +42,23 @@ impl FoldPoint {
         &mut self.0.column
     }
 
-    pub fn to_buffer_point(&self, snapshot: &Snapshot) -> Point {
+    pub fn to_buffer_point(&self, snapshot: &FoldSnapshot) -> Point {
         let mut cursor = snapshot.transforms.cursor::<(FoldPoint, Point)>();
         cursor.seek(self, Bias::Right, &());
         let overshoot = self.0 - cursor.start().0 .0;
         cursor.start().1 + overshoot
     }
 
-    pub fn to_buffer_offset(&self, snapshot: &Snapshot) -> usize {
+    pub fn to_buffer_offset(&self, snapshot: &FoldSnapshot) -> usize {
         let mut cursor = snapshot.transforms.cursor::<(FoldPoint, Point)>();
         cursor.seek(self, Bias::Right, &());
         let overshoot = self.0 - cursor.start().0 .0;
         snapshot
             .buffer_snapshot
-            .to_offset(cursor.start().1 + overshoot)
+            .point_to_offset(cursor.start().1 + overshoot)
     }
 
-    pub fn to_offset(&self, snapshot: &Snapshot) -> FoldOffset {
+    pub fn to_offset(&self, snapshot: &FoldSnapshot) -> FoldOffset {
         let mut cursor = snapshot
             .transforms
             .cursor::<(FoldPoint, TransformSummary)>();
@@ -69,7 +70,7 @@ impl FoldPoint {
             assert!(transform.output_text.is_none());
             let end_buffer_offset = snapshot
                 .buffer_snapshot
-                .to_offset(cursor.start().1.input.lines + overshoot);
+                .point_to_offset(cursor.start().1.input.lines + overshoot);
             offset += end_buffer_offset - cursor.start().1.input.bytes;
         }
         FoldOffset(offset)
@@ -77,7 +78,7 @@ impl FoldPoint {
 }
 
 impl ToFoldPoint for Point {
-    fn to_fold_point(&self, snapshot: &Snapshot, bias: Bias) -> FoldPoint {
+    fn to_fold_point(&self, snapshot: &FoldSnapshot, bias: Bias) -> FoldPoint {
         let mut cursor = snapshot.transforms.cursor::<(Point, FoldPoint)>();
         cursor.seek(self, Bias::Right, &());
         if cursor.item().map_or(false, |t| t.is_fold()) {
@@ -102,7 +103,7 @@ impl<'a> FoldMapWriter<'a> {
     pub fn fold<T: ToOffset>(
         &mut self,
         ranges: impl IntoIterator<Item = Range<T>>,
-    ) -> (Snapshot, Vec<FoldEdit>) {
+    ) -> (FoldSnapshot, Vec<FoldEdit>) {
         let mut edits = Vec::new();
         let mut folds = Vec::new();
         let buffer = self.0.buffer.lock().clone();
@@ -133,7 +134,7 @@ impl<'a> FoldMapWriter<'a> {
 
         consolidate_buffer_edits(&mut edits);
         let edits = self.0.sync(buffer.clone(), edits);
-        let snapshot = Snapshot {
+        let snapshot = FoldSnapshot {
             transforms: self.0.transforms.lock().clone(),
             folds: self.0.folds.clone(),
             buffer_snapshot: buffer,
@@ -145,7 +146,7 @@ impl<'a> FoldMapWriter<'a> {
     pub fn unfold<T: ToOffset>(
         &mut self,
         ranges: impl IntoIterator<Item = Range<T>>,
-    ) -> (Snapshot, Vec<FoldEdit>) {
+    ) -> (FoldSnapshot, Vec<FoldEdit>) {
         let mut edits = Vec::new();
         let mut fold_ixs_to_delete = Vec::new();
         let buffer = self.0.buffer.lock().clone();
@@ -154,10 +155,12 @@ impl<'a> FoldMapWriter<'a> {
             let mut folds_cursor = intersecting_folds(&buffer, &self.0.folds, range, true);
             while let Some(fold) = folds_cursor.item() {
                 let offset_range = fold.0.start.to_offset(&buffer)..fold.0.end.to_offset(&buffer);
-                edits.push(text::Edit {
-                    old: offset_range.clone(),
-                    new: offset_range,
-                });
+                if offset_range.end > offset_range.start {
+                    edits.push(text::Edit {
+                        old: offset_range.clone(),
+                        new: offset_range,
+                    });
+                }
                 fold_ixs_to_delete.push(*folds_cursor.start());
                 folds_cursor.next(&buffer);
             }
@@ -179,7 +182,7 @@ impl<'a> FoldMapWriter<'a> {
 
         consolidate_buffer_edits(&mut edits);
         let edits = self.0.sync(buffer.clone(), edits);
-        let snapshot = Snapshot {
+        let snapshot = FoldSnapshot {
             transforms: self.0.transforms.lock().clone(),
             folds: self.0.folds.clone(),
             buffer_snapshot: buffer,
@@ -190,14 +193,14 @@ impl<'a> FoldMapWriter<'a> {
 }
 
 pub struct FoldMap {
-    buffer: Mutex<BufferSnapshot>,
+    buffer: Mutex<MultiBufferSnapshot>,
     transforms: Mutex<SumTree<Transform>>,
     folds: SumTree<Fold>,
     version: AtomicUsize,
 }
 
 impl FoldMap {
-    pub fn new(buffer: BufferSnapshot) -> (Self, Snapshot) {
+    pub fn new(buffer: MultiBufferSnapshot) -> (Self, FoldSnapshot) {
         let this = Self {
             buffer: Mutex::new(buffer.clone()),
             folds: Default::default(),
@@ -214,7 +217,7 @@ impl FoldMap {
             version: Default::default(),
         };
 
-        let snapshot = Snapshot {
+        let snapshot = FoldSnapshot {
             transforms: this.transforms.lock().clone(),
             folds: this.folds.clone(),
             buffer_snapshot: this.buffer.lock().clone(),
@@ -225,12 +228,12 @@ impl FoldMap {
 
     pub fn read(
         &self,
-        buffer: BufferSnapshot,
+        buffer: MultiBufferSnapshot,
         edits: Vec<Edit<usize>>,
-    ) -> (Snapshot, Vec<FoldEdit>) {
+    ) -> (FoldSnapshot, Vec<FoldEdit>) {
         let edits = self.sync(buffer, edits);
         self.check_invariants();
-        let snapshot = Snapshot {
+        let snapshot = FoldSnapshot {
             transforms: self.transforms.lock().clone(),
             folds: self.folds.clone(),
             buffer_snapshot: self.buffer.lock().clone(),
@@ -241,9 +244,9 @@ impl FoldMap {
 
     pub fn write(
         &mut self,
-        buffer: BufferSnapshot,
+        buffer: MultiBufferSnapshot,
         edits: Vec<Edit<usize>>,
-    ) -> (FoldMapWriter, Snapshot, Vec<FoldEdit>) {
+    ) -> (FoldMapWriter, FoldSnapshot, Vec<FoldEdit>) {
         let (snapshot, edits) = self.read(buffer, edits);
         (FoldMapWriter(self), snapshot, edits)
     }
@@ -260,7 +263,7 @@ impl FoldMap {
 
     fn sync(
         &self,
-        new_buffer: BufferSnapshot,
+        new_buffer: MultiBufferSnapshot,
         buffer_edits: Vec<text::Edit<usize>>,
     ) -> Vec<FoldEdit> {
         if buffer_edits.is_empty() {
@@ -457,8 +460,8 @@ impl FoldMap {
                         new_transforms.start().1 .0 + (edit.new.end - new_transforms.start().0);
 
                     fold_edits.push(FoldEdit {
-                        old_bytes: FoldOffset(old_start)..FoldOffset(old_end),
-                        new_bytes: FoldOffset(new_start)..FoldOffset(new_end),
+                        old: FoldOffset(old_start)..FoldOffset(old_end),
+                        new: FoldOffset(new_start)..FoldOffset(new_end),
                     });
                 }
 
@@ -474,14 +477,18 @@ impl FoldMap {
 }
 
 #[derive(Clone)]
-pub struct Snapshot {
+pub struct FoldSnapshot {
     transforms: SumTree<Transform>,
     folds: SumTree<Fold>,
-    buffer_snapshot: language::Snapshot,
+    buffer_snapshot: MultiBufferSnapshot,
     pub version: usize,
 }
 
-impl Snapshot {
+impl FoldSnapshot {
+    pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot {
+        &self.buffer_snapshot
+    }
+
     #[cfg(test)]
     pub fn text(&self) -> String {
         self.chunks(FoldOffset(0)..self.len(), None)
@@ -553,15 +560,24 @@ impl Snapshot {
         (line_end - line_start) as u32
     }
 
-    pub fn buffer_rows(&self, start_row: u32) -> BufferRows {
+    pub fn buffer_rows(&self, start_row: u32) -> FoldBufferRows {
         if start_row > self.transforms.summary().output.lines.row {
             panic!("invalid display row {}", start_row);
         }
 
         let fold_point = FoldPoint::new(start_row, 0);
-        let mut cursor = self.transforms.cursor();
+        let mut cursor = self.transforms.cursor::<(FoldPoint, Point)>();
         cursor.seek(&fold_point, Bias::Left, &());
-        BufferRows { fold_point, cursor }
+
+        let overshoot = fold_point.0 - cursor.start().0 .0;
+        let buffer_point = cursor.start().1 + overshoot;
+        let input_buffer_rows = self.buffer_snapshot.buffer_rows(buffer_point.row);
+
+        FoldBufferRows {
+            fold_point,
+            input_buffer_rows,
+            cursor,
+        }
     }
 
     pub fn max_point(&self) -> FoldPoint {
@@ -624,7 +640,7 @@ impl Snapshot {
         &'a self,
         range: Range<FoldOffset>,
         theme: Option<&'a SyntaxTheme>,
-    ) -> Chunks<'a> {
+    ) -> FoldChunks<'a> {
         let mut transform_cursor = self.transforms.cursor::<(FoldOffset, usize)>();
 
         transform_cursor.seek(&range.end, Bias::Right, &());
@@ -635,7 +651,7 @@ impl Snapshot {
         let overshoot = range.start.0 - transform_cursor.start().0 .0;
         let buffer_start = transform_cursor.start().1 + overshoot;
 
-        Chunks {
+        FoldChunks {
             transform_cursor,
             buffer_chunks: self.buffer_snapshot.chunks(buffer_start..buffer_end, theme),
             buffer_chunk: None,
@@ -687,11 +703,7 @@ impl Snapshot {
                 let buffer_position = cursor.start().1 + overshoot;
                 let clipped_buffer_position =
                     self.buffer_snapshot.clip_point(buffer_position, bias);
-                FoldPoint::new(
-                    point.row(),
-                    ((point.column() as i32) + clipped_buffer_position.column as i32
-                        - buffer_position.column as i32) as u32,
-                )
+                FoldPoint(cursor.start().0 .0 + (clipped_buffer_position - cursor.start().1))
             }
         } else {
             FoldPoint(self.transforms.summary().output.lines)
@@ -700,7 +712,7 @@ impl Snapshot {
 }
 
 fn intersecting_folds<'a, T>(
-    buffer: &'a text::Snapshot,
+    buffer: &'a MultiBufferSnapshot,
     folds: &'a SumTree<Fold>,
     range: Range<T>,
     inclusive: bool,
@@ -750,20 +762,20 @@ fn consolidate_buffer_edits(edits: &mut Vec<text::Edit<usize>>) {
 
 fn consolidate_fold_edits(edits: &mut Vec<FoldEdit>) {
     edits.sort_unstable_by(|a, b| {
-        a.old_bytes
+        a.old
             .start
-            .cmp(&b.old_bytes.start)
-            .then_with(|| b.old_bytes.end.cmp(&a.old_bytes.end))
+            .cmp(&b.old.start)
+            .then_with(|| b.old.end.cmp(&a.old.end))
     });
 
     let mut i = 1;
     while i < edits.len() {
         let edit = edits[i].clone();
         let prev_edit = &mut edits[i - 1];
-        if prev_edit.old_bytes.end >= edit.old_bytes.start {
-            prev_edit.old_bytes.end = prev_edit.old_bytes.end.max(edit.old_bytes.end);
-            prev_edit.new_bytes.start = prev_edit.new_bytes.start.min(edit.new_bytes.start);
-            prev_edit.new_bytes.end = prev_edit.new_bytes.end.max(edit.new_bytes.end);
+        if prev_edit.old.end >= edit.old.start {
+            prev_edit.old.end = prev_edit.old.end.max(edit.old.end);
+            prev_edit.new.start = prev_edit.new.start.min(edit.new.start);
+            prev_edit.new.end = prev_edit.new.end.max(edit.new.end);
             edits.remove(i);
             continue;
         }
@@ -851,9 +863,9 @@ impl Default for FoldSummary {
 }
 
 impl sum_tree::Summary for FoldSummary {
-    type Context = text::Snapshot;
+    type Context = MultiBufferSnapshot;
 
-    fn add_summary(&mut self, other: &Self, buffer: &text::Snapshot) {
+    fn add_summary(&mut self, other: &Self, buffer: &MultiBufferSnapshot) {
         if other.min_start.cmp(&self.min_start, buffer).unwrap() == Ordering::Less {
             self.min_start = other.min_start.clone();
         }
@@ -877,62 +889,66 @@ impl sum_tree::Summary for FoldSummary {
 }
 
 impl<'a> sum_tree::Dimension<'a, FoldSummary> for Fold {
-    fn add_summary(&mut self, summary: &'a FoldSummary, _: &text::Snapshot) {
+    fn add_summary(&mut self, summary: &'a FoldSummary, _: &MultiBufferSnapshot) {
         self.0.start = summary.start.clone();
         self.0.end = summary.end.clone();
     }
 }
 
 impl<'a> sum_tree::SeekTarget<'a, FoldSummary, Fold> for Fold {
-    fn cmp(&self, other: &Self, buffer: &text::Snapshot) -> Ordering {
+    fn cmp(&self, other: &Self, buffer: &MultiBufferSnapshot) -> Ordering {
         self.0.cmp(&other.0, buffer).unwrap()
     }
 }
 
 impl<'a> sum_tree::Dimension<'a, FoldSummary> for usize {
-    fn add_summary(&mut self, summary: &'a FoldSummary, _: &text::Snapshot) {
+    fn add_summary(&mut self, summary: &'a FoldSummary, _: &MultiBufferSnapshot) {
         *self += summary.count;
     }
 }
 
-pub struct BufferRows<'a> {
+pub struct FoldBufferRows<'a> {
     cursor: Cursor<'a, Transform, (FoldPoint, Point)>,
+    input_buffer_rows: MultiBufferRows<'a>,
     fold_point: FoldPoint,
 }
 
-impl<'a> Iterator for BufferRows<'a> {
-    type Item = u32;
+impl<'a> Iterator for FoldBufferRows<'a> {
+    type Item = Option<u32>;
 
     fn next(&mut self) -> Option<Self::Item> {
+        let mut traversed_fold = false;
         while self.fold_point > self.cursor.end(&()).0 {
             self.cursor.next(&());
+            traversed_fold = true;
             if self.cursor.item().is_none() {
-                // TODO: Return a bool from next?
                 break;
             }
         }
 
         if self.cursor.item().is_some() {
-            let overshoot = self.fold_point.0 - self.cursor.start().0 .0;
-            let buffer_point = self.cursor.start().1 + overshoot;
+            if traversed_fold {
+                self.input_buffer_rows.seek(self.cursor.start().1.row);
+                self.input_buffer_rows.next();
+            }
             *self.fold_point.row_mut() += 1;
-            Some(buffer_point.row)
+            self.input_buffer_rows.next()
         } else {
             None
         }
     }
 }
 
-pub struct Chunks<'a> {
+pub struct FoldChunks<'a> {
     transform_cursor: Cursor<'a, Transform, (FoldOffset, usize)>,
-    buffer_chunks: language::Chunks<'a>,
+    buffer_chunks: MultiBufferChunks<'a>,
     buffer_chunk: Option<(usize, Chunk<'a>)>,
     buffer_offset: usize,
     output_offset: usize,
     max_output_offset: usize,
 }
 
-impl<'a> Iterator for Chunks<'a> {
+impl<'a> Iterator for FoldChunks<'a> {
     type Item = Chunk<'a>;
 
     fn next(&mut self) -> Option<Self::Item> {
@@ -1006,7 +1022,7 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for FoldPoint {
 pub struct FoldOffset(pub usize);
 
 impl FoldOffset {
-    pub fn to_point(&self, snapshot: &Snapshot) -> FoldPoint {
+    pub fn to_point(&self, snapshot: &FoldSnapshot) -> FoldPoint {
         let mut cursor = snapshot
             .transforms
             .cursor::<(FoldOffset, TransformSummary)>();
@@ -1015,13 +1031,21 @@ impl FoldOffset {
             Point::new(0, (self.0 - cursor.start().0 .0) as u32)
         } else {
             let buffer_offset = cursor.start().1.input.bytes + self.0 - cursor.start().0 .0;
-            let buffer_point = snapshot.buffer_snapshot.to_point(buffer_offset);
+            let buffer_point = snapshot.buffer_snapshot.offset_to_point(buffer_offset);
             buffer_point - cursor.start().1.input.lines
         };
         FoldPoint(cursor.start().1.output.lines + overshoot)
     }
 }
 
+impl Sub for FoldOffset {
+    type Output = Self;
+
+    fn sub(self, rhs: Self) -> Self::Output {
+        Self(self.0 - rhs.0)
+    }
+}
+
 impl<'a> sum_tree::Dimension<'a, TransformSummary> for FoldOffset {
     fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) {
         self.0 += &summary.output.bytes;
@@ -1040,41 +1064,23 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for usize {
     }
 }
 
-#[derive(Clone, Debug, PartialEq, Eq)]
-pub struct FoldEdit {
-    pub old_bytes: Range<FoldOffset>,
-    pub new_bytes: Range<FoldOffset>,
-}
-
-#[cfg(test)]
-impl FoldEdit {
-    pub fn delta(&self) -> isize {
-        self.inserted_bytes() as isize - self.deleted_bytes() as isize
-    }
-
-    pub fn deleted_bytes(&self) -> usize {
-        self.old_bytes.end.0 - self.old_bytes.start.0
-    }
-
-    pub fn inserted_bytes(&self) -> usize {
-        self.new_bytes.end.0 - self.new_bytes.start.0
-    }
-}
+pub type FoldEdit = Edit<FoldOffset>;
 
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::{test::sample_text, ToPoint};
-    use language::Buffer;
+    use crate::{MultiBuffer, ToPoint};
     use rand::prelude::*;
     use std::{env, mem};
     use text::RandomCharIter;
+    use util::test::sample_text;
     use Bias::{Left, Right};
 
     #[gpui::test]
     fn test_basic_folds(cx: &mut gpui::MutableAppContext) {
-        let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx));
-        let buffer_snapshot = buffer.read(cx).snapshot();
+        let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx);
+        let subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
+        let buffer_snapshot = buffer.read(cx).snapshot(cx);
         let mut map = FoldMap::new(buffer_snapshot.clone()).0;
 
         let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
@@ -1087,18 +1093,17 @@ mod tests {
             edits,
             &[
                 FoldEdit {
-                    old_bytes: FoldOffset(2)..FoldOffset(16),
-                    new_bytes: FoldOffset(2)..FoldOffset(5),
+                    old: FoldOffset(2)..FoldOffset(16),
+                    new: FoldOffset(2)..FoldOffset(5),
                 },
                 FoldEdit {
-                    old_bytes: FoldOffset(18)..FoldOffset(29),
-                    new_bytes: FoldOffset(7)..FoldOffset(10)
+                    old: FoldOffset(18)..FoldOffset(29),
+                    new: FoldOffset(7)..FoldOffset(10)
                 },
             ]
         );
 
-        let (buffer_snapshot, edits) = buffer.update(cx, |buffer, cx| {
-            let v0 = buffer.version();
+        let buffer_snapshot = buffer.update(cx, |buffer, cx| {
             buffer.edit(
                 vec![
                     Point::new(0, 0)..Point::new(0, 1),
@@ -1107,30 +1112,30 @@ mod tests {
                 "123",
                 cx,
             );
-            (buffer.snapshot(), buffer.edits_since(&v0).collect())
+            buffer.snapshot(cx)
         });
-        let (snapshot3, edits) = map.read(buffer_snapshot.clone(), edits);
+        let (snapshot3, edits) =
+            map.read(buffer_snapshot.clone(), subscription.consume().into_inner());
         assert_eq!(snapshot3.text(), "123a…c123c…eeeee");
         assert_eq!(
             edits,
             &[
                 FoldEdit {
-                    old_bytes: FoldOffset(0)..FoldOffset(1),
-                    new_bytes: FoldOffset(0)..FoldOffset(3),
+                    old: FoldOffset(0)..FoldOffset(1),
+                    new: FoldOffset(0)..FoldOffset(3),
                 },
                 FoldEdit {
-                    old_bytes: FoldOffset(6)..FoldOffset(6),
-                    new_bytes: FoldOffset(8)..FoldOffset(11),
+                    old: FoldOffset(6)..FoldOffset(6),
+                    new: FoldOffset(8)..FoldOffset(11),
                 },
             ]
         );
 
-        let (buffer_snapshot, edits) = buffer.update(cx, |buffer, cx| {
-            let v0 = buffer.version();
+        let buffer_snapshot = buffer.update(cx, |buffer, cx| {
             buffer.edit(vec![Point::new(2, 6)..Point::new(4, 3)], "456", cx);
-            (buffer.snapshot(), buffer.edits_since(&v0).collect())
+            buffer.snapshot(cx)
         });
-        let (snapshot4, _) = map.read(buffer_snapshot.clone(), edits);
+        let (snapshot4, _) = map.read(buffer_snapshot.clone(), subscription.consume().into_inner());
         assert_eq!(snapshot4.text(), "123a…c123456eee");
 
         let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
@@ -1141,8 +1146,9 @@ mod tests {
 
     #[gpui::test]
     fn test_adjacent_folds(cx: &mut gpui::MutableAppContext) {
-        let buffer = cx.add_model(|cx| Buffer::new(0, "abcdefghijkl", cx));
-        let buffer_snapshot = buffer.read(cx).snapshot();
+        let buffer = MultiBuffer::build_simple("abcdefghijkl", cx);
+        let subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
+        let buffer_snapshot = buffer.read(cx).snapshot(cx);
 
         {
             let mut map = FoldMap::new(buffer_snapshot.clone()).0;
@@ -1175,20 +1181,20 @@ mod tests {
             assert_eq!(snapshot.text(), "…fghijkl");
 
             // Edit within one of the folds.
-            let (buffer_snapshot, edits) = buffer.update(cx, |buffer, cx| {
-                let v0 = buffer.version();
+            let buffer_snapshot = buffer.update(cx, |buffer, cx| {
                 buffer.edit(vec![0..1], "12345", cx);
-                (buffer.snapshot(), buffer.edits_since(&v0).collect())
+                buffer.snapshot(cx)
             });
-            let (snapshot, _) = map.read(buffer_snapshot.clone(), edits);
+            let (snapshot, _) =
+                map.read(buffer_snapshot.clone(), subscription.consume().into_inner());
             assert_eq!(snapshot.text(), "12345…fghijkl");
         }
     }
 
     #[gpui::test]
     fn test_overlapping_folds(cx: &mut gpui::MutableAppContext) {
-        let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx));
-        let buffer_snapshot = buffer.read(cx).snapshot();
+        let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx);
+        let buffer_snapshot = buffer.read(cx).snapshot(cx);
         let mut map = FoldMap::new(buffer_snapshot.clone()).0;
         let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
         writer.fold(vec![
@@ -1203,8 +1209,9 @@ mod tests {
 
     #[gpui::test]
     fn test_merging_folds_via_edit(cx: &mut gpui::MutableAppContext) {
-        let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx));
-        let buffer_snapshot = buffer.read(cx).snapshot();
+        let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx);
+        let subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
+        let buffer_snapshot = buffer.read(cx).snapshot(cx);
         let mut map = FoldMap::new(buffer_snapshot.clone()).0;
 
         let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
@@ -1215,21 +1222,19 @@ mod tests {
         let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
         assert_eq!(snapshot.text(), "aa…cccc\nd…eeeee");
 
-        let (buffer_snapshot, edits) = buffer.update(cx, |buffer, cx| {
-            let v0 = buffer.version();
+        let buffer_snapshot = buffer.update(cx, |buffer, cx| {
             buffer.edit(Some(Point::new(2, 2)..Point::new(3, 1)), "", cx);
-            (buffer.snapshot(), buffer.edits_since(&v0).collect())
+            buffer.snapshot(cx)
         });
-        let (snapshot, _) = map.read(buffer_snapshot.clone(), edits);
+        let (snapshot, _) = map.read(buffer_snapshot.clone(), subscription.consume().into_inner());
         assert_eq!(snapshot.text(), "aa…eeeee");
     }
 
     #[gpui::test]
     fn test_folds_in_range(cx: &mut gpui::MutableAppContext) {
-        let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx));
-        let buffer_snapshot = buffer.read(cx).snapshot();
+        let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx);
+        let buffer_snapshot = buffer.read(cx).snapshot(cx);
         let mut map = FoldMap::new(buffer_snapshot.clone()).0;
-        let buffer = buffer.read(cx);
 
         let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
         writer.fold(vec![
@@ -1241,7 +1246,7 @@ mod tests {
         let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
         let fold_ranges = snapshot
             .folds_in_range(Point::new(1, 0)..Point::new(1, 3))
-            .map(|fold| fold.start.to_point(buffer)..fold.end.to_point(buffer))
+            .map(|fold| fold.start.to_point(&buffer_snapshot)..fold.end.to_point(&buffer_snapshot))
             .collect::<Vec<_>>();
         assert_eq!(
             fold_ranges,
@@ -1258,55 +1263,73 @@ mod tests {
             .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
             .unwrap_or(10);
 
-        let buffer = cx.add_model(|cx| {
-            let len = rng.gen_range(0..10);
-            let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
-            Buffer::new(0, text, cx)
-        });
-        let buffer_snapshot = buffer.read(cx).snapshot();
+        let len = rng.gen_range(0..10);
+        let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
+        let buffer = if rng.gen() {
+            MultiBuffer::build_simple(&text, cx)
+        } else {
+            MultiBuffer::build_random(&mut rng, cx)
+        };
+        let mut buffer_snapshot = buffer.read(cx).snapshot(cx);
         let mut map = FoldMap::new(buffer_snapshot.clone()).0;
 
         let (mut initial_snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
         let mut snapshot_edits = Vec::new();
 
         for _ in 0..operations {
-            log::info!("text: {:?}", buffer.read(cx).text());
-            let buffer_edits = match rng.gen_range(0..=100) {
+            log::info!("text: {:?}", buffer_snapshot.text());
+            let mut buffer_edits = Vec::new();
+            match rng.gen_range(0..=100) {
                 0..=59 => {
                     snapshot_edits.extend(map.randomly_mutate(&mut rng));
-                    vec![]
                 }
                 _ => buffer.update(cx, |buffer, cx| {
-                    let start_version = buffer.version.clone();
+                    let subscription = buffer.subscribe();
                     let edit_count = rng.gen_range(1..=5);
                     buffer.randomly_edit(&mut rng, edit_count, cx);
-                    let edits = buffer
-                        .edits_since::<Point>(&start_version)
-                        .collect::<Vec<_>>();
+                    buffer_snapshot = buffer.snapshot(cx);
+                    let edits = subscription.consume().into_inner();
                     log::info!("editing {:?}", edits);
-                    buffer.edits_since::<usize>(&start_version).collect()
+                    buffer_edits.extend(edits);
                 }),
             };
-            let buffer_snapshot = buffer.read(cx).snapshot();
 
             let (snapshot, edits) = map.read(buffer_snapshot.clone(), buffer_edits);
             snapshot_edits.push((snapshot.clone(), edits));
 
             let mut expected_text: String = buffer_snapshot.text().to_string();
-            let mut expected_buffer_rows = Vec::new();
-            let mut next_row = buffer_snapshot.max_point().row;
             for fold_range in map.merged_fold_ranges().into_iter().rev() {
-                let fold_start = buffer_snapshot.point_for_offset(fold_range.start).unwrap();
-                let fold_end = buffer_snapshot.point_for_offset(fold_range.end).unwrap();
-                expected_buffer_rows.extend((fold_end.row + 1..=next_row).rev());
-                next_row = fold_start.row;
-
                 expected_text.replace_range(fold_range.start..fold_range.end, "…");
             }
-            expected_buffer_rows.extend((0..=next_row).rev());
-            expected_buffer_rows.reverse();
 
             assert_eq!(snapshot.text(), expected_text);
+            log::info!(
+                "fold text {:?} ({} lines)",
+                expected_text,
+                expected_text.matches('\n').count() + 1
+            );
+
+            let mut prev_row = 0;
+            let mut expected_buffer_rows = Vec::new();
+            for fold_range in map.merged_fold_ranges().into_iter() {
+                let fold_start = buffer_snapshot.offset_to_point(fold_range.start).row;
+                let fold_end = buffer_snapshot.offset_to_point(fold_range.end).row;
+                expected_buffer_rows.extend(
+                    buffer_snapshot
+                        .buffer_rows(prev_row)
+                        .take((1 + fold_start - prev_row) as usize),
+                );
+                prev_row = 1 + fold_end;
+            }
+            expected_buffer_rows.extend(buffer_snapshot.buffer_rows(prev_row));
+
+            assert_eq!(
+                expected_buffer_rows.len(),
+                expected_text.matches('\n').count() + 1,
+                "wrong expected buffer rows {:?}. text: {:?}",
+                expected_buffer_rows,
+                expected_text
+            );
 
             for (output_row, line) in expected_text.lines().enumerate() {
                 let line_len = snapshot.line_len(output_row as u32);
@@ -1375,7 +1398,6 @@ mod tests {
                 }
 
                 let text = &expected_text[start.0..end.0];
-                log::info!("slicing {:?}..{:?} (text: {:?})", start, end, text);
                 assert_eq!(
                     snapshot
                         .chunks(start..end, None)
@@ -1385,14 +1407,19 @@ mod tests {
                 );
             }
 
-            for (idx, buffer_row) in expected_buffer_rows.iter().enumerate() {
-                let fold_row = Point::new(*buffer_row, 0)
-                    .to_fold_point(&snapshot, Right)
+            let mut fold_row = 0;
+            while fold_row < expected_buffer_rows.len() as u32 {
+                fold_row = snapshot
+                    .clip_point(FoldPoint::new(fold_row, 0), Bias::Right)
                     .row();
+                eprintln!("fold_row: {} of {}", fold_row, expected_buffer_rows.len());
                 assert_eq!(
                     snapshot.buffer_rows(fold_row).collect::<Vec<_>>(),
-                    expected_buffer_rows[idx..],
+                    expected_buffer_rows[(fold_row as usize)..],
+                    "wrong buffer rows starting at fold row {}",
+                    fold_row,
                 );
+                fold_row += 1;
             }
 
             for fold_range in map.merged_fold_ranges() {
@@ -1454,12 +1481,9 @@ mod tests {
             let mut text = initial_snapshot.text();
             for (snapshot, edits) in snapshot_edits.drain(..) {
                 let new_text = snapshot.text();
-                let mut delta = 0isize;
                 for edit in edits {
-                    let old_bytes = ((edit.old_bytes.start.0 as isize) + delta) as usize
-                        ..((edit.old_bytes.end.0 as isize) + delta) as usize;
-                    let new_bytes = edit.new_bytes.start.0..edit.new_bytes.end.0;
-                    delta += edit.delta();
+                    let old_bytes = edit.new.start.0..edit.new.start.0 + edit.old_len().0;
+                    let new_bytes = edit.new.start.0..edit.new.end.0;
                     text.replace_range(old_bytes, &new_text[new_bytes]);
                 }
 
@@ -1471,10 +1495,10 @@ mod tests {
 
     #[gpui::test]
     fn test_buffer_rows(cx: &mut gpui::MutableAppContext) {
-        let text = sample_text(6, 6) + "\n";
-        let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
+        let text = sample_text(6, 6, 'a') + "\n";
+        let buffer = MultiBuffer::build_simple(&text, cx);
 
-        let buffer_snapshot = buffer.read(cx).snapshot();
+        let buffer_snapshot = buffer.read(cx).snapshot(cx);
         let mut map = FoldMap::new(buffer_snapshot.clone()).0;
 
         let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]);
@@ -1485,8 +1509,11 @@ mod tests {
 
         let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]);
         assert_eq!(snapshot.text(), "aa…cccc\nd…eeeee\nffffff\n");
-        assert_eq!(snapshot.buffer_rows(0).collect::<Vec<_>>(), [0, 3, 5, 6]);
-        assert_eq!(snapshot.buffer_rows(3).collect::<Vec<_>>(), [6]);
+        assert_eq!(
+            snapshot.buffer_rows(0).collect::<Vec<_>>(),
+            [Some(0), Some(3), Some(5), Some(6)]
+        );
+        assert_eq!(snapshot.buffer_rows(3).collect::<Vec<_>>(), [Some(6)]);
     }
 
     impl FoldMap {
@@ -1519,7 +1546,10 @@ mod tests {
             merged_ranges
         }
 
-        pub fn randomly_mutate(&mut self, rng: &mut impl Rng) -> Vec<(Snapshot, Vec<FoldEdit>)> {
+        pub fn randomly_mutate(
+            &mut self,
+            rng: &mut impl Rng,
+        ) -> Vec<(FoldSnapshot, Vec<FoldEdit>)> {
             let mut snapshot_edits = Vec::new();
             match rng.gen_range(0..=100) {
                 0..=39 if !self.folds.is_empty() => {

crates/editor/src/display_map/tab_map.rs 🔗

@@ -1,4 +1,5 @@
-use super::fold_map::{self, FoldEdit, FoldPoint, Snapshot as FoldSnapshot, ToFoldPoint};
+use super::fold_map::{self, FoldEdit, FoldPoint, FoldSnapshot, ToFoldPoint};
+use crate::MultiBufferSnapshot;
 use language::{rope, Chunk};
 use parking_lot::Mutex;
 use std::{cmp, mem, ops::Range};
@@ -6,11 +7,11 @@ use sum_tree::Bias;
 use text::Point;
 use theme::SyntaxTheme;
 
-pub struct TabMap(Mutex<Snapshot>);
+pub struct TabMap(Mutex<TabSnapshot>);
 
 impl TabMap {
-    pub fn new(input: FoldSnapshot, tab_size: usize) -> (Self, Snapshot) {
-        let snapshot = Snapshot {
+    pub fn new(input: FoldSnapshot, tab_size: usize) -> (Self, TabSnapshot) {
+        let snapshot = TabSnapshot {
             fold_snapshot: input,
             tab_size,
         };
@@ -21,10 +22,10 @@ impl TabMap {
         &self,
         fold_snapshot: FoldSnapshot,
         mut fold_edits: Vec<FoldEdit>,
-    ) -> (Snapshot, Vec<Edit>) {
+    ) -> (TabSnapshot, Vec<TabEdit>) {
         let mut old_snapshot = self.0.lock();
         let max_offset = old_snapshot.fold_snapshot.len();
-        let new_snapshot = Snapshot {
+        let new_snapshot = TabSnapshot {
             fold_snapshot,
             tab_size: old_snapshot.tab_size,
         };
@@ -34,13 +35,13 @@ impl TabMap {
             let mut delta = 0;
             for chunk in old_snapshot
                 .fold_snapshot
-                .chunks(fold_edit.old_bytes.end..max_offset, None)
+                .chunks(fold_edit.old.end..max_offset, None)
             {
                 let patterns: &[_] = &['\t', '\n'];
                 if let Some(ix) = chunk.text.find(patterns) {
                     if &chunk.text[ix..ix + 1] == "\t" {
-                        fold_edit.old_bytes.end.0 += delta + ix + 1;
-                        fold_edit.new_bytes.end.0 += delta + ix + 1;
+                        fold_edit.old.end.0 += delta + ix + 1;
+                        fold_edit.new.end.0 += delta + ix + 1;
                     }
 
                     break;
@@ -55,9 +56,9 @@ impl TabMap {
             let (prev_edits, next_edits) = fold_edits.split_at_mut(ix);
             let prev_edit = prev_edits.last_mut().unwrap();
             let edit = &next_edits[0];
-            if prev_edit.old_bytes.end >= edit.old_bytes.start {
-                prev_edit.old_bytes.end = edit.old_bytes.end;
-                prev_edit.new_bytes.end = edit.new_bytes.end;
+            if prev_edit.old.end >= edit.old.start {
+                prev_edit.old.end = edit.old.end;
+                prev_edit.new.end = edit.new.end;
                 fold_edits.remove(ix);
             } else {
                 ix += 1;
@@ -65,25 +66,13 @@ impl TabMap {
         }
 
         for fold_edit in fold_edits {
-            let old_start = fold_edit
-                .old_bytes
-                .start
-                .to_point(&old_snapshot.fold_snapshot);
-            let old_end = fold_edit
-                .old_bytes
-                .end
-                .to_point(&old_snapshot.fold_snapshot);
-            let new_start = fold_edit
-                .new_bytes
-                .start
-                .to_point(&new_snapshot.fold_snapshot);
-            let new_end = fold_edit
-                .new_bytes
-                .end
-                .to_point(&new_snapshot.fold_snapshot);
-            tab_edits.push(Edit {
-                old_lines: old_snapshot.to_tab_point(old_start)..old_snapshot.to_tab_point(old_end),
-                new_lines: new_snapshot.to_tab_point(new_start)..new_snapshot.to_tab_point(new_end),
+            let old_start = fold_edit.old.start.to_point(&old_snapshot.fold_snapshot);
+            let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot);
+            let new_start = fold_edit.new.start.to_point(&new_snapshot.fold_snapshot);
+            let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot);
+            tab_edits.push(TabEdit {
+                old: old_snapshot.to_tab_point(old_start)..old_snapshot.to_tab_point(old_end),
+                new: new_snapshot.to_tab_point(new_start)..new_snapshot.to_tab_point(new_end),
             });
         }
 
@@ -93,12 +82,16 @@ impl TabMap {
 }
 
 #[derive(Clone)]
-pub struct Snapshot {
+pub struct TabSnapshot {
     pub fold_snapshot: FoldSnapshot,
     pub tab_size: usize,
 }
 
-impl Snapshot {
+impl TabSnapshot {
+    pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot {
+        self.fold_snapshot.buffer_snapshot()
+    }
+
     pub fn text_summary(&self) -> TextSummary {
         self.text_summary_for_range(TabPoint::zero()..self.max_point())
     }
@@ -155,7 +148,7 @@ impl Snapshot {
         &'a self,
         range: Range<TabPoint>,
         theme: Option<&'a SyntaxTheme>,
-    ) -> Chunks<'a> {
+    ) -> TabChunks<'a> {
         let (input_start, expanded_char_column, to_next_stop) =
             self.to_fold_point(range.start, Bias::Left);
         let input_start = input_start.to_offset(&self.fold_snapshot);
@@ -169,7 +162,7 @@ impl Snapshot {
             to_next_stop
         };
 
-        Chunks {
+        TabChunks {
             fold_chunks: self.fold_snapshot.chunks(input_start..input_end, theme),
             column: expanded_char_column,
             output_position: range.start.0,
@@ -183,7 +176,7 @@ impl Snapshot {
         }
     }
 
-    pub fn buffer_rows(&self, row: u32) -> fold_map::BufferRows {
+    pub fn buffer_rows(&self, row: u32) -> fold_map::FoldBufferRows {
         self.fold_snapshot.buffer_rows(row)
     }
 
@@ -322,11 +315,7 @@ impl From<super::Point> for TabPoint {
     }
 }
 
-#[derive(Clone, Debug, PartialEq, Eq)]
-pub struct Edit {
-    pub old_lines: Range<TabPoint>,
-    pub new_lines: Range<TabPoint>,
-}
+pub type TabEdit = text::Edit<TabPoint>;
 
 #[derive(Clone, Debug, Default, Eq, PartialEq)]
 pub struct TextSummary {
@@ -380,8 +369,8 @@ impl<'a> std::ops::AddAssign<&'a Self> for TextSummary {
 // Handles a tab width <= 16
 const SPACES: &'static str = "                ";
 
-pub struct Chunks<'a> {
-    fold_chunks: fold_map::Chunks<'a>,
+pub struct TabChunks<'a> {
+    fold_chunks: fold_map::FoldChunks<'a>,
     chunk: Chunk<'a>,
     column: usize,
     output_position: Point,
@@ -390,7 +379,7 @@ pub struct Chunks<'a> {
     skip_leading_tab: bool,
 }
 
-impl<'a> Iterator for Chunks<'a> {
+impl<'a> Iterator for TabChunks<'a> {
     type Item = Chunk<'a>;
 
     fn next(&mut self) -> Option<Self::Item> {
@@ -450,28 +439,29 @@ impl<'a> Iterator for Chunks<'a> {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::display_map::fold_map::FoldMap;
-    use language::Buffer;
+    use crate::{display_map::fold_map::FoldMap, MultiBuffer};
     use rand::{prelude::StdRng, Rng};
     use text::{RandomCharIter, Rope};
 
     #[test]
     fn test_expand_tabs() {
-        assert_eq!(Snapshot::expand_tabs("\t".chars(), 0, 4), 0);
-        assert_eq!(Snapshot::expand_tabs("\t".chars(), 1, 4), 4);
-        assert_eq!(Snapshot::expand_tabs("\ta".chars(), 2, 4), 5);
+        assert_eq!(TabSnapshot::expand_tabs("\t".chars(), 0, 4), 0);
+        assert_eq!(TabSnapshot::expand_tabs("\t".chars(), 1, 4), 4);
+        assert_eq!(TabSnapshot::expand_tabs("\ta".chars(), 2, 4), 5);
     }
 
     #[gpui::test(iterations = 100)]
-    fn test_random(cx: &mut gpui::MutableAppContext, mut rng: StdRng) {
+    fn test_random_tabs(cx: &mut gpui::MutableAppContext, mut rng: StdRng) {
         let tab_size = rng.gen_range(1..=4);
-        let buffer = cx.add_model(|cx| {
-            let len = rng.gen_range(0..30);
+        let len = rng.gen_range(0..30);
+        let buffer = if rng.gen() {
             let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
-            Buffer::new(0, text, cx)
-        });
-        let buffer_snapshot = buffer.read(cx).snapshot();
-        log::info!("Buffer text: {:?}", buffer.read(cx).text());
+            MultiBuffer::build_simple(&text, cx)
+        } else {
+            MultiBuffer::build_random(&mut rng, cx)
+        };
+        let buffer_snapshot = buffer.read(cx).snapshot(cx);
+        log::info!("Buffer text: {:?}", buffer_snapshot.text());
 
         let (mut fold_map, _) = FoldMap::new(buffer_snapshot.clone());
         fold_map.randomly_mutate(&mut rng);
@@ -502,13 +492,15 @@ mod tests {
                 .chunks_in_range(text.point_to_offset(start.0)..text.point_to_offset(end.0))
                 .collect::<String>();
             let expected_summary = TextSummary::from(expected_text.as_str());
-            log::info!("slicing {:?}..{:?} (text: {:?})", start, end, text);
             assert_eq!(
                 expected_text,
                 tabs_snapshot
                     .chunks(start..end, None)
                     .map(|c| c.text)
-                    .collect::<String>()
+                    .collect::<String>(),
+                "chunks({:?}..{:?})",
+                start,
+                end
             );
 
             let mut actual_summary = tabs_snapshot.text_summary_for_range(start..end);

crates/editor/src/display_map/wrap_map.rs 🔗

@@ -1,24 +1,25 @@
 use super::{
     fold_map,
-    tab_map::{self, Edit as TabEdit, Snapshot as TabSnapshot, TabPoint},
+    tab_map::{self, TabEdit, TabPoint, TabSnapshot},
 };
+use crate::{MultiBufferSnapshot, Point};
 use gpui::{
     fonts::FontId, text_layout::LineWrapper, Entity, ModelContext, ModelHandle, MutableAppContext,
     Task,
 };
-use language::{Chunk, Point};
+use language::Chunk;
 use lazy_static::lazy_static;
 use smol::future::yield_now;
-use std::{collections::VecDeque, mem, ops::Range, time::Duration};
+use std::{cmp, collections::VecDeque, mem, ops::Range, time::Duration};
 use sum_tree::{Bias, Cursor, SumTree};
 use text::Patch;
 use theme::SyntaxTheme;
 
 pub use super::tab_map::TextSummary;
-pub type Edit = text::Edit<u32>;
+pub type WrapEdit = text::Edit<u32>;
 
 pub struct WrapMap {
-    snapshot: Snapshot,
+    snapshot: WrapSnapshot,
     pending_edits: VecDeque<(TabSnapshot, Vec<TabEdit>)>,
     interpolated_edits: Patch<u32>,
     edits_since_sync: Patch<u32>,
@@ -32,7 +33,7 @@ impl Entity for WrapMap {
 }
 
 #[derive(Clone)]
-pub struct Snapshot {
+pub struct WrapSnapshot {
     tab_snapshot: TabSnapshot,
     transforms: SumTree<Transform>,
     interpolated: bool,
@@ -53,17 +54,17 @@ struct TransformSummary {
 #[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)]
 pub struct WrapPoint(pub super::Point);
 
-pub struct Chunks<'a> {
-    input_chunks: tab_map::Chunks<'a>,
+pub struct WrapChunks<'a> {
+    input_chunks: tab_map::TabChunks<'a>,
     input_chunk: Chunk<'a>,
     output_position: WrapPoint,
     max_output_row: u32,
     transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
 }
 
-pub struct BufferRows<'a> {
-    input_buffer_rows: fold_map::BufferRows<'a>,
-    input_buffer_row: u32,
+pub struct WrapBufferRows<'a> {
+    input_buffer_rows: fold_map::FoldBufferRows<'a>,
+    input_buffer_row: Option<u32>,
     output_row: u32,
     soft_wrapped: bool,
     max_output_row: u32,
@@ -77,7 +78,7 @@ impl WrapMap {
         font_size: f32,
         wrap_width: Option<f32>,
         cx: &mut MutableAppContext,
-    ) -> (ModelHandle<Self>, Snapshot) {
+    ) -> (ModelHandle<Self>, WrapSnapshot) {
         let handle = cx.add_model(|cx| {
             let mut this = Self {
                 font: (font_id, font_size),
@@ -85,7 +86,7 @@ impl WrapMap {
                 pending_edits: Default::default(),
                 interpolated_edits: Default::default(),
                 edits_since_sync: Default::default(),
-                snapshot: Snapshot::new(tab_snapshot),
+                snapshot: WrapSnapshot::new(tab_snapshot),
                 background_task: None,
             };
             this.set_wrap_width(wrap_width, cx);
@@ -106,7 +107,7 @@ impl WrapMap {
         tab_snapshot: TabSnapshot,
         edits: Vec<TabEdit>,
         cx: &mut ModelContext<Self>,
-    ) -> (Snapshot, Vec<Edit>) {
+    ) -> (WrapSnapshot, Vec<WrapEdit>) {
         if self.wrap_width.is_some() {
             self.pending_edits.push_back((tab_snapshot, edits));
             self.flush_edits(cx);
@@ -157,8 +158,8 @@ impl WrapMap {
                     .update(
                         tab_snapshot,
                         &[TabEdit {
-                            old_lines: range.clone(),
-                            new_lines: range.clone(),
+                            old: range.clone(),
+                            new: range.clone(),
                         }],
                         wrap_width,
                         &mut line_wrapper,
@@ -203,7 +204,7 @@ impl WrapMap {
             }
             let new_rows = self.snapshot.transforms.summary().output.lines.row + 1;
             self.snapshot.interpolated = false;
-            self.edits_since_sync = self.edits_since_sync.compose(&Patch::new(vec![Edit {
+            self.edits_since_sync = self.edits_since_sync.compose(&Patch::new(vec![WrapEdit {
                 old: 0..old_rows,
                 new: 0..new_rows,
             }]));
@@ -291,7 +292,7 @@ impl WrapMap {
     }
 }
 
-impl Snapshot {
+impl WrapSnapshot {
     fn new(tab_snapshot: TabSnapshot) -> Self {
         let mut transforms = SumTree::new();
         let extent = tab_snapshot.text_summary();
@@ -305,6 +306,10 @@ impl Snapshot {
         }
     }
 
+    pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot {
+        self.tab_snapshot.buffer_snapshot()
+    }
+
     fn interpolate(&mut self, new_tab_snapshot: TabSnapshot, tab_edits: &[TabEdit]) -> Patch<u32> {
         let mut new_transforms;
         if tab_edits.is_empty() {
@@ -313,47 +318,44 @@ impl Snapshot {
             let mut old_cursor = self.transforms.cursor::<TabPoint>();
 
             let mut tab_edits_iter = tab_edits.iter().peekable();
-            new_transforms = old_cursor.slice(
-                &tab_edits_iter.peek().unwrap().old_lines.start,
-                Bias::Right,
-                &(),
-            );
+            new_transforms =
+                old_cursor.slice(&tab_edits_iter.peek().unwrap().old.start, Bias::Right, &());
 
             while let Some(edit) = tab_edits_iter.next() {
-                if edit.new_lines.start > TabPoint::from(new_transforms.summary().input.lines) {
+                if edit.new.start > TabPoint::from(new_transforms.summary().input.lines) {
                     let summary = new_tab_snapshot.text_summary_for_range(
-                        TabPoint::from(new_transforms.summary().input.lines)..edit.new_lines.start,
+                        TabPoint::from(new_transforms.summary().input.lines)..edit.new.start,
                     );
                     new_transforms.push_or_extend(Transform::isomorphic(summary));
                 }
 
-                if !edit.new_lines.is_empty() {
+                if !edit.new.is_empty() {
                     new_transforms.push_or_extend(Transform::isomorphic(
-                        new_tab_snapshot.text_summary_for_range(edit.new_lines.clone()),
+                        new_tab_snapshot.text_summary_for_range(edit.new.clone()),
                     ));
                 }
 
-                old_cursor.seek_forward(&edit.old_lines.end, Bias::Right, &());
+                old_cursor.seek_forward(&edit.old.end, Bias::Right, &());
                 if let Some(next_edit) = tab_edits_iter.peek() {
-                    if next_edit.old_lines.start > old_cursor.end(&()) {
-                        if old_cursor.end(&()) > edit.old_lines.end {
+                    if next_edit.old.start > old_cursor.end(&()) {
+                        if old_cursor.end(&()) > edit.old.end {
                             let summary = self
                                 .tab_snapshot
-                                .text_summary_for_range(edit.old_lines.end..old_cursor.end(&()));
+                                .text_summary_for_range(edit.old.end..old_cursor.end(&()));
                             new_transforms.push_or_extend(Transform::isomorphic(summary));
                         }
 
                         old_cursor.next(&());
                         new_transforms.push_tree(
-                            old_cursor.slice(&next_edit.old_lines.start, Bias::Right, &()),
+                            old_cursor.slice(&next_edit.old.start, Bias::Right, &()),
                             &(),
                         );
                     }
                 } else {
-                    if old_cursor.end(&()) > edit.old_lines.end {
+                    if old_cursor.end(&()) > edit.old.end {
                         let summary = self
                             .tab_snapshot
-                            .text_summary_for_range(edit.old_lines.end..old_cursor.end(&()));
+                            .text_summary_for_range(edit.old.end..old_cursor.end(&()));
                         new_transforms.push_or_extend(Transform::isomorphic(summary));
                     }
                     old_cursor.next(&());
@@ -364,7 +366,7 @@ impl Snapshot {
 
         let old_snapshot = mem::replace(
             self,
-            Snapshot {
+            WrapSnapshot {
                 tab_snapshot: new_tab_snapshot,
                 transforms: new_transforms,
                 interpolated: true,
@@ -391,14 +393,14 @@ impl Snapshot {
         let mut row_edits = Vec::new();
         while let Some(edit) = tab_edits_iter.next() {
             let mut row_edit = RowEdit {
-                old_rows: edit.old_lines.start.row()..edit.old_lines.end.row() + 1,
-                new_rows: edit.new_lines.start.row()..edit.new_lines.end.row() + 1,
+                old_rows: edit.old.start.row()..edit.old.end.row() + 1,
+                new_rows: edit.new.start.row()..edit.new.end.row() + 1,
             };
 
             while let Some(next_edit) = tab_edits_iter.peek() {
-                if next_edit.old_lines.start.row() <= row_edit.old_rows.end {
-                    row_edit.old_rows.end = next_edit.old_lines.end.row() + 1;
-                    row_edit.new_rows.end = next_edit.new_lines.end.row() + 1;
+                if next_edit.old.start.row() <= row_edit.old_rows.end {
+                    row_edit.old_rows.end = next_edit.old.end.row() + 1;
+                    row_edit.new_rows.end = next_edit.new.end.row() + 1;
                     tab_edits_iter.next();
                 } else {
                     break;
@@ -513,7 +515,7 @@ impl Snapshot {
 
         let old_snapshot = mem::replace(
             self,
-            Snapshot {
+            WrapSnapshot {
                 tab_snapshot: new_tab_snapshot,
                 transforms: new_transforms,
                 interpolated: false,
@@ -523,33 +525,33 @@ impl Snapshot {
         old_snapshot.compute_edits(tab_edits, self)
     }
 
-    fn compute_edits(&self, tab_edits: &[TabEdit], new_snapshot: &Snapshot) -> Patch<u32> {
+    fn compute_edits(&self, tab_edits: &[TabEdit], new_snapshot: &WrapSnapshot) -> Patch<u32> {
         let mut wrap_edits = Vec::new();
         let mut old_cursor = self.transforms.cursor::<TransformSummary>();
         let mut new_cursor = new_snapshot.transforms.cursor::<TransformSummary>();
         for mut tab_edit in tab_edits.iter().cloned() {
-            tab_edit.old_lines.start.0.column = 0;
-            tab_edit.old_lines.end.0 += Point::new(1, 0);
-            tab_edit.new_lines.start.0.column = 0;
-            tab_edit.new_lines.end.0 += Point::new(1, 0);
+            tab_edit.old.start.0.column = 0;
+            tab_edit.old.end.0 += Point::new(1, 0);
+            tab_edit.new.start.0.column = 0;
+            tab_edit.new.end.0 += Point::new(1, 0);
 
-            old_cursor.seek(&tab_edit.old_lines.start, Bias::Right, &());
+            old_cursor.seek(&tab_edit.old.start, Bias::Right, &());
             let mut old_start = old_cursor.start().output.lines;
-            old_start += tab_edit.old_lines.start.0 - old_cursor.start().input.lines;
+            old_start += tab_edit.old.start.0 - old_cursor.start().input.lines;
 
-            old_cursor.seek(&tab_edit.old_lines.end, Bias::Right, &());
+            old_cursor.seek(&tab_edit.old.end, Bias::Right, &());
             let mut old_end = old_cursor.start().output.lines;
-            old_end += tab_edit.old_lines.end.0 - old_cursor.start().input.lines;
+            old_end += tab_edit.old.end.0 - old_cursor.start().input.lines;
 
-            new_cursor.seek(&tab_edit.new_lines.start, Bias::Right, &());
+            new_cursor.seek(&tab_edit.new.start, Bias::Right, &());
             let mut new_start = new_cursor.start().output.lines;
-            new_start += tab_edit.new_lines.start.0 - new_cursor.start().input.lines;
+            new_start += tab_edit.new.start.0 - new_cursor.start().input.lines;
 
-            new_cursor.seek(&tab_edit.new_lines.end, Bias::Right, &());
+            new_cursor.seek(&tab_edit.new.end, Bias::Right, &());
             let mut new_end = new_cursor.start().output.lines;
-            new_end += tab_edit.new_lines.end.0 - new_cursor.start().input.lines;
+            new_end += tab_edit.new.end.0 - new_cursor.start().input.lines;
 
-            wrap_edits.push(Edit {
+            wrap_edits.push(WrapEdit {
                 old: old_start.row..old_end.row,
                 new: new_start.row..new_end.row,
             });
@@ -564,7 +566,11 @@ impl Snapshot {
             .map(|h| h.text)
     }
 
-    pub fn chunks<'a>(&'a self, rows: Range<u32>, theme: Option<&'a SyntaxTheme>) -> Chunks<'a> {
+    pub fn chunks<'a>(
+        &'a self,
+        rows: Range<u32>,
+        theme: Option<&'a SyntaxTheme>,
+    ) -> WrapChunks<'a> {
         let output_start = WrapPoint::new(rows.start, 0);
         let output_end = WrapPoint::new(rows.end, 0);
         let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>();
@@ -576,7 +582,7 @@ impl Snapshot {
         let input_end = self
             .to_tab_point(output_end)
             .min(self.tab_snapshot.max_point());
-        Chunks {
+        WrapChunks {
             input_chunks: self.tab_snapshot.chunks(input_start..input_end, theme),
             input_chunk: Default::default(),
             output_position: output_start,
@@ -622,7 +628,7 @@ impl Snapshot {
         self.transforms.summary().output.longest_row
     }
 
-    pub fn buffer_rows(&self, start_row: u32) -> BufferRows {
+    pub fn buffer_rows(&self, start_row: u32) -> WrapBufferRows {
         let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>();
         transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &());
         let mut input_row = transforms.start().1.row();
@@ -632,7 +638,7 @@ impl Snapshot {
         let soft_wrapped = transforms.item().map_or(false, |t| !t.is_isomorphic());
         let mut input_buffer_rows = self.tab_snapshot.buffer_rows(input_row);
         let input_buffer_row = input_buffer_rows.next().unwrap();
-        BufferRows {
+        WrapBufferRows {
             transforms,
             input_buffer_row,
             input_buffer_rows,
@@ -666,6 +672,15 @@ impl Snapshot {
         WrapPoint(cursor.start().1 .0 + (point.0 - cursor.start().0 .0))
     }
 
+    pub fn from_tab_point_with_clipping(&self, point: TabPoint, bias: Bias) -> WrapPoint {
+        let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>();
+        cursor.seek(&point, bias, &());
+        self.clip_point(
+            WrapPoint(cursor.start().1 .0 + (point.0 - cursor.start().0 .0)),
+            bias,
+        )
+    }
+
     pub fn clip_point(&self, mut point: WrapPoint, bias: Bias) -> WrapPoint {
         if bias == Bias::Left {
             let mut cursor = self.transforms.cursor::<WrapPoint>();
@@ -679,6 +694,46 @@ impl Snapshot {
         self.from_tab_point(self.tab_snapshot.clip_point(self.to_tab_point(point), bias))
     }
 
+    pub fn prev_row_boundary(&self, mut point: WrapPoint) -> u32 {
+        if self.transforms.is_empty() {
+            return 0;
+        }
+
+        *point.column_mut() = 0;
+
+        let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>();
+        cursor.seek(&point, Bias::Right, &());
+        if cursor.item().is_none() {
+            cursor.prev(&());
+        }
+
+        while let Some(transform) = cursor.item() {
+            if transform.is_isomorphic() && cursor.start().1.column() == 0 {
+                return cmp::min(cursor.end(&()).0.row(), point.row());
+            } else {
+                cursor.prev(&());
+            }
+        }
+
+        unreachable!()
+    }
+
+    pub fn next_row_boundary(&self, mut point: WrapPoint) -> Option<u32> {
+        point.0 += Point::new(1, 0);
+
+        let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>();
+        cursor.seek(&point, Bias::Right, &());
+        while let Some(transform) = cursor.item() {
+            if transform.is_isomorphic() && cursor.start().1.column() == 0 {
+                return Some(cmp::max(cursor.start().0.row(), point.row()));
+            } else {
+                cursor.next(&());
+            }
+        }
+
+        None
+    }
+
     fn check_invariants(&self) {
         #[cfg(test)]
         {
@@ -696,22 +751,19 @@ impl Snapshot {
                 }
             }
 
+            let input_buffer_rows = self.buffer_snapshot().buffer_rows(0).collect::<Vec<_>>();
             let mut expected_buffer_rows = Vec::new();
-            let mut buffer_row = 0;
             let mut prev_tab_row = 0;
             for display_row in 0..=self.max_point().row() {
                 let tab_point = self.to_tab_point(WrapPoint::new(display_row, 0));
-                let soft_wrapped;
-                if tab_point.row() == prev_tab_row {
-                    soft_wrapped = display_row != 0;
+                if tab_point.row() == prev_tab_row && display_row != 0 {
+                    expected_buffer_rows.push(None);
                 } else {
                     let fold_point = self.tab_snapshot.to_fold_point(tab_point, Bias::Left).0;
                     let buffer_point = fold_point.to_buffer_point(&self.tab_snapshot.fold_snapshot);
-                    buffer_row = buffer_point.row;
+                    expected_buffer_rows.push(input_buffer_rows[buffer_point.row as usize]);
                     prev_tab_row = tab_point.row();
-                    soft_wrapped = false;
                 }
-                expected_buffer_rows.push(if soft_wrapped { None } else { Some(buffer_row) });
             }
 
             for start_display_row in 0..expected_buffer_rows.len() {
@@ -727,7 +779,7 @@ impl Snapshot {
     }
 }
 
-impl<'a> Iterator for Chunks<'a> {
+impl<'a> Iterator for WrapChunks<'a> {
     type Item = Chunk<'a>;
 
     fn next(&mut self) -> Option<Self::Item> {
@@ -790,7 +842,7 @@ impl<'a> Iterator for Chunks<'a> {
     }
 }
 
-impl<'a> Iterator for BufferRows<'a> {
+impl<'a> Iterator for WrapBufferRows<'a> {
     type Item = Option<u32>;
 
     fn next(&mut self) -> Option<Self::Item> {
@@ -811,7 +863,7 @@ impl<'a> Iterator for BufferRows<'a> {
             self.soft_wrapped = true;
         }
 
-        Some(if soft_wrapped { None } else { Some(buffer_row) })
+        Some(if soft_wrapped { None } else { buffer_row })
     }
 }
 
@@ -951,7 +1003,7 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for WrapPoint {
     }
 }
 
-fn consolidate_wrap_edits(edits: &mut Vec<Edit>) {
+fn consolidate_wrap_edits(edits: &mut Vec<WrapEdit>) {
     let mut i = 1;
     while i < edits.len() {
         let edit = edits[i].clone();
@@ -972,8 +1024,9 @@ mod tests {
     use crate::{
         display_map::{fold_map::FoldMap, tab_map::TabMap},
         test::Observer,
+        MultiBuffer,
     };
-    use language::{Buffer, RandomCharIter};
+    use language::RandomCharIter;
     use rand::prelude::*;
     use std::{cmp, env};
     use text::Rope;
@@ -1003,18 +1056,19 @@ mod tests {
         log::info!("Tab size: {}", tab_size);
         log::info!("Wrap width: {:?}", wrap_width);
 
-        let buffer = cx.add_model(|cx| {
-            let len = rng.gen_range(0..10);
-            let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
-            Buffer::new(0, text, cx)
+        let buffer = cx.update(|cx| {
+            if rng.gen() {
+                MultiBuffer::build_random(&mut rng, cx)
+            } else {
+                let len = rng.gen_range(0..10);
+                let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
+                MultiBuffer::build_simple(&text, cx)
+            }
         });
-        let buffer_snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
+        let mut buffer_snapshot = buffer.read_with(&cx, |buffer, cx| buffer.snapshot(cx));
         let (mut fold_map, folds_snapshot) = FoldMap::new(buffer_snapshot.clone());
         let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size);
-        log::info!(
-            "Unwrapped text (no folds): {:?}",
-            buffer.read_with(&cx, |buf, _| buf.text())
-        );
+        log::info!("Unwrapped text (no folds): {:?}", buffer_snapshot.text());
         log::info!(
             "Unwrapped text (unexpanded tabs): {:?}",
             folds_snapshot.text()
@@ -1073,17 +1127,17 @@ mod tests {
                 }
                 _ => {
                     buffer.update(&mut cx, |buffer, cx| {
-                        let v0 = buffer.version();
+                        let subscription = buffer.subscribe();
                         let edit_count = rng.gen_range(1..=5);
                         buffer.randomly_edit(&mut rng, edit_count, cx);
-                        buffer_edits.extend(buffer.edits_since(&v0));
+                        buffer_snapshot = buffer.snapshot(cx);
+                        buffer_edits.extend(subscription.consume());
                     });
                 }
             }
 
-            let buffer_snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
             log::info!("Unwrapped text (no folds): {:?}", buffer_snapshot.text());
-            let (folds_snapshot, fold_edits) = fold_map.read(buffer_snapshot, buffer_edits);
+            let (folds_snapshot, fold_edits) = fold_map.read(buffer_snapshot.clone(), buffer_edits);
             log::info!(
                 "Unwrapped text (unexpanded tabs): {:?}",
                 folds_snapshot.text()
@@ -1224,7 +1278,7 @@ mod tests {
         }
     }
 
-    impl Snapshot {
+    impl WrapSnapshot {
         pub fn text(&self) -> String {
             self.text_chunks(0).collect()
         }

crates/editor/src/editor.rs 🔗

@@ -2,37 +2,45 @@ pub mod display_map;
 mod element;
 pub mod items;
 pub mod movement;
+mod multi_buffer;
 
 #[cfg(test)]
 mod test;
 
 use aho_corasick::AhoCorasick;
 use clock::ReplicaId;
+use collections::{HashMap, HashSet};
 pub use display_map::DisplayPoint;
 use display_map::*;
 pub use element::*;
 use gpui::{
     action,
-    elements::Text,
+    elements::*,
+    fonts::TextStyle,
     geometry::vector::{vec2f, Vector2F},
     keymap::Binding,
     text_layout, AppContext, ClipboardItem, Element, ElementBox, Entity, ModelHandle,
-    MutableAppContext, RenderContext, View, ViewContext, WeakViewHandle,
+    MutableAppContext, RenderContext, View, ViewContext, WeakModelHandle, WeakViewHandle,
 };
 use items::BufferItemHandle;
-use language::*;
+use itertools::Itertools as _;
+use language::{
+    BracketPair, Buffer, Diagnostic, DiagnosticSeverity, Language, Point, Selection, SelectionGoal,
+    TransactionId,
+};
+pub use multi_buffer::{Anchor, ExcerptId, ExcerptProperties, MultiBuffer};
+use multi_buffer::{AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, ToOffset, ToPoint};
+use postage::watch;
 use serde::{Deserialize, Serialize};
 use smallvec::SmallVec;
 use smol::Timer;
 use std::{
-    cell::RefCell,
     cmp,
-    collections::HashMap,
-    iter, mem,
-    ops::{Deref, Range, RangeInclusive},
-    rc::Rc,
+    iter::{self, FromIterator},
+    mem,
+    ops::{Deref, Range, RangeInclusive, Sub},
     sync::Arc,
-    time::Duration,
+    time::{Duration, Instant},
 };
 use sum_tree::Bias;
 use text::rope::TextDimension;
@@ -273,11 +281,13 @@ pub fn init(cx: &mut MutableAppContext, entry_openers: &mut Vec<Box<dyn EntryOpe
 }
 
 trait SelectionExt {
-    fn display_range(&self, map: &DisplayMapSnapshot) -> Range<DisplayPoint>;
+    fn offset_range(&self, buffer: &MultiBufferSnapshot) -> Range<usize>;
+    fn point_range(&self, buffer: &MultiBufferSnapshot) -> Range<Point>;
+    fn display_range(&self, map: &DisplaySnapshot) -> Range<DisplayPoint>;
     fn spanned_rows(
         &self,
         include_end_if_at_line_start: bool,
-        map: &DisplayMapSnapshot,
+        map: &DisplaySnapshot,
     ) -> SpannedRows;
 }
 
@@ -345,23 +355,27 @@ pub enum SoftWrap {
     Column(u32),
 }
 
+pub type BuildSettings = Arc<dyn 'static + Send + Sync + Fn(&AppContext) -> EditorSettings>;
+
 pub struct Editor {
     handle: WeakViewHandle<Self>,
-    buffer: ModelHandle<Buffer>,
+    buffer: ModelHandle<MultiBuffer>,
     display_map: ModelHandle<DisplayMap>,
-    selection_set_id: SelectionSetId,
+    next_selection_id: usize,
+    selections: Arc<[Selection<Anchor>]>,
     pending_selection: Option<PendingSelection>,
     columnar_selection_tail: Option<Anchor>,
-    next_selection_id: usize,
     add_selections_state: Option<AddSelectionsState>,
     select_next_state: Option<SelectNextState>,
+    selection_history:
+        HashMap<TransactionId, (Arc<[Selection<Anchor>]>, Option<Arc<[Selection<Anchor>]>>)>,
     autoclose_stack: Vec<BracketPairState>,
     select_larger_syntax_node_stack: Vec<Box<[Selection<usize>]>>,
     active_diagnostics: Option<ActiveDiagnosticGroup>,
     scroll_position: Vector2F,
     scroll_top_anchor: Anchor,
     autoscroll_request: Option<Autoscroll>,
-    build_settings: Rc<RefCell<dyn Fn(&AppContext) -> EditorSettings>>,
+    build_settings: BuildSettings,
     focused: bool,
     show_local_cursors: bool,
     blink_epoch: usize,
@@ -371,9 +385,9 @@ pub struct Editor {
     highlighted_row: Option<u32>,
 }
 
-pub struct Snapshot {
+pub struct EditorSnapshot {
     pub mode: EditorMode,
-    pub display_snapshot: DisplayMapSnapshot,
+    pub display_snapshot: DisplaySnapshot,
     pub placeholder_text: Option<Arc<str>>,
     is_focused: bool,
     scroll_position: Vector2F,
@@ -398,7 +412,7 @@ struct SelectNextState {
 
 #[derive(Debug)]
 struct BracketPairState {
-    ranges: AnchorRangeSet,
+    ranges: Vec<Range<Anchor>>,
     pair: BracketPair,
 }
 
@@ -417,11 +431,9 @@ struct ClipboardSelection {
 }
 
 impl Editor {
-    pub fn single_line(
-        build_settings: impl 'static + Fn(&AppContext) -> EditorSettings,
-        cx: &mut ViewContext<Self>,
-    ) -> Self {
+    pub fn single_line(build_settings: BuildSettings, cx: &mut ViewContext<Self>) -> Self {
         let buffer = cx.add_model(|cx| Buffer::new(0, String::new(), cx));
+        let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
         let mut view = Self::for_buffer(buffer, build_settings, cx);
         view.mode = EditorMode::SingleLine;
         view
@@ -429,21 +441,22 @@ impl Editor {
 
     pub fn auto_height(
         max_lines: usize,
-        build_settings: impl 'static + Fn(&AppContext) -> EditorSettings,
+        build_settings: BuildSettings,
         cx: &mut ViewContext<Self>,
     ) -> Self {
         let buffer = cx.add_model(|cx| Buffer::new(0, String::new(), cx));
+        let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
         let mut view = Self::for_buffer(buffer, build_settings, cx);
         view.mode = EditorMode::AutoHeight { max_lines };
         view
     }
 
     pub fn for_buffer(
-        buffer: ModelHandle<Buffer>,
-        build_settings: impl 'static + Fn(&AppContext) -> EditorSettings,
+        buffer: ModelHandle<MultiBuffer>,
+        build_settings: BuildSettings,
         cx: &mut ViewContext<Self>,
     ) -> Self {
-        Self::new(buffer, Rc::new(RefCell::new(build_settings)), cx)
+        Self::new(buffer, build_settings, cx)
     }
 
     pub fn clone(&self, cx: &mut ViewContext<Self>) -> Self {
@@ -454,11 +467,11 @@ impl Editor {
     }
 
     pub fn new(
-        buffer: ModelHandle<Buffer>,
-        build_settings: Rc<RefCell<dyn Fn(&AppContext) -> EditorSettings>>,
+        buffer: ModelHandle<MultiBuffer>,
+        build_settings: BuildSettings,
         cx: &mut ViewContext<Self>,
     ) -> Self {
-        let settings = build_settings.borrow_mut()(cx);
+        let settings = build_settings(cx);
         let display_map = cx.add_model(|cx| {
             DisplayMap::new(
                 buffer.clone(),
@@ -475,28 +488,27 @@ impl Editor {
             .detach();
 
         let mut next_selection_id = 0;
-        let selection_set_id = buffer.update(cx, |buffer, cx| {
-            buffer.add_selection_set(
-                &[Selection {
-                    id: post_inc(&mut next_selection_id),
-                    start: 0,
-                    end: 0,
-                    reversed: false,
-                    goal: SelectionGoal::None,
-                }],
-                cx,
-            )
-        });
+        let selections = Arc::from(
+            &[Selection {
+                id: post_inc(&mut next_selection_id),
+                start: Anchor::min(),
+                end: Anchor::min(),
+                reversed: false,
+                goal: SelectionGoal::None,
+            }][..],
+        );
+
         Self {
             handle: cx.weak_handle(),
             buffer,
             display_map,
-            selection_set_id,
+            selections,
             pending_selection: None,
             columnar_selection_tail: None,
             next_selection_id,
             add_selections_state: None,
             select_next_state: None,
+            selection_history: Default::default(),
             autoclose_stack: Default::default(),
             select_larger_syntax_node_stack: Vec::new(),
             active_diagnostics: None,
@@ -522,6 +534,7 @@ impl Editor {
         let buffer = cx.add_model(|cx| {
             Buffer::new(0, "", cx).with_language(Some(language::PLAIN_TEXT.clone()), None, cx)
         });
+        let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
         workspace.add_item(BufferItemHandle(buffer), cx);
     }
 
@@ -529,12 +542,12 @@ impl Editor {
         self.buffer.read(cx).replica_id()
     }
 
-    pub fn buffer(&self) -> &ModelHandle<Buffer> {
+    pub fn buffer(&self) -> &ModelHandle<MultiBuffer> {
         &self.buffer
     }
 
-    pub fn snapshot(&mut self, cx: &mut MutableAppContext) -> Snapshot {
-        Snapshot {
+    pub fn snapshot(&mut self, cx: &mut MutableAppContext) -> EditorSnapshot {
+        EditorSnapshot {
             mode: self.mode,
             display_snapshot: self.display_map.update(cx, |map, cx| map.snapshot(cx)),
             scroll_position: self.scroll_position,
@@ -548,7 +561,7 @@ impl Editor {
     }
 
     pub fn language<'a>(&self, cx: &'a AppContext) -> Option<&'a Arc<Language>> {
-        self.buffer.read(cx).language()
+        self.buffer.read(cx).language(cx)
     }
 
     pub fn set_placeholder_text(
@@ -564,20 +577,14 @@ impl Editor {
         let map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
         let scroll_top_buffer_offset =
             DisplayPoint::new(scroll_position.y() as u32, 0).to_offset(&map, Bias::Right);
-        self.scroll_top_anchor = self
-            .buffer
-            .read(cx)
+        self.scroll_top_anchor = map
+            .buffer_snapshot
             .anchor_at(scroll_top_buffer_offset, Bias::Right);
         self.scroll_position = vec2f(
             scroll_position.x(),
             scroll_position.y() - self.scroll_top_anchor.to_display_point(&map).row() as f32,
         );
 
-        debug_assert_eq!(
-            compute_scroll_position(&map, self.scroll_position, &self.scroll_top_anchor),
-            scroll_position
-        );
-
         cx.notify();
     }
 
@@ -624,13 +631,13 @@ impl Editor {
         let first_cursor_top;
         let last_cursor_bottom;
         if autoscroll == Autoscroll::Newest {
-            let newest_selection = self.newest_selection::<Point>(cx);
+            let newest_selection = self.newest_selection::<Point>(&display_map.buffer_snapshot);
             first_cursor_top = newest_selection.head().to_display_point(&display_map).row() as f32;
             last_cursor_bottom = first_cursor_top + 1.;
         } else {
-            let mut selections = self.selections::<Point>(cx).peekable();
+            let selections = self.local_selections::<Point>(cx);
             first_cursor_top = selections
-                .peek()
+                .first()
                 .unwrap()
                 .head()
                 .to_display_point(&display_map)
@@ -688,7 +695,7 @@ impl Editor {
         cx: &mut ViewContext<Self>,
     ) -> bool {
         let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-        let selections = self.selections::<Point>(cx);
+        let selections = self.local_selections::<Point>(cx);
         let mut target_left = std::f32::INFINITY;
         let mut target_right = 0.0_f32;
         for selection in selections {
@@ -756,14 +763,14 @@ impl Editor {
         click_count: usize,
         cx: &mut ViewContext<Self>,
     ) {
-        let tail = self.newest_selection::<usize>(cx).tail();
-
+        let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
+        let tail = self
+            .newest_selection::<usize>(&display_map.buffer_snapshot)
+            .tail();
         self.begin_selection(position, false, click_count, cx);
 
-        let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-        let buffer = self.buffer.read(cx);
         let position = position.to_offset(&display_map, Bias::Left);
-        let tail_anchor = buffer.anchor_before(tail);
+        let tail_anchor = display_map.buffer_snapshot.anchor_before(tail);
         let pending = self.pending_selection.as_mut().unwrap();
 
         if position >= tail {
@@ -794,7 +801,7 @@ impl Editor {
         }
 
         let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-        let buffer = self.buffer.read(cx);
+        let buffer = &display_map.buffer_snapshot;
         let start;
         let end;
         let mode;
@@ -841,14 +848,10 @@ impl Editor {
             self.update_selections::<usize>(Vec::new(), None, cx);
         } else if click_count > 1 {
             // Remove the newest selection since it was only added as part of this multi-click.
-            let newest_selection = self.newest_selection::<usize>(cx);
-            self.update_selections::<usize>(
-                self.selections(cx)
-                    .filter(|selection| selection.id != newest_selection.id)
-                    .collect(),
-                None,
-                cx,
-            )
+            let newest_selection = self.newest_selection::<usize>(buffer);
+            let mut selections = self.local_selections(cx);
+            selections.retain(|selection| selection.id != newest_selection.id);
+            self.update_selections::<usize>(selections, None, cx)
         }
 
         self.pending_selection = Some(PendingSelection { selection, mode });
@@ -868,10 +871,10 @@ impl Editor {
         }
 
         let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-        let buffer = self.buffer.read(cx);
-
-        let tail = self.newest_selection::<Point>(cx).tail();
-        self.columnar_selection_tail = Some(buffer.anchor_before(tail));
+        let tail = self
+            .newest_selection::<Point>(&display_map.buffer_snapshot)
+            .tail();
+        self.columnar_selection_tail = Some(display_map.buffer_snapshot.anchor_before(tail));
 
         self.select_columns(
             tail.to_display_point(&display_map),
@@ -895,13 +898,13 @@ impl Editor {
             let tail = tail.to_display_point(&display_map);
             self.select_columns(tail, position, overshoot, &display_map, cx);
         } else if let Some(PendingSelection { selection, mode }) = self.pending_selection.as_mut() {
-            let buffer = self.buffer.read(cx);
+            let buffer = self.buffer.read(cx).snapshot(cx);
             let head;
             let tail;
             match mode {
                 SelectMode::Character => {
                     head = position.to_point(&display_map);
-                    tail = selection.tail().to_point(buffer);
+                    tail = selection.tail().to_point(&buffer);
                 }
                 SelectMode::Word(original_range) => {
                     let original_display_range = original_range.start.to_display_point(&display_map)
@@ -976,7 +979,7 @@ impl Editor {
     fn end_selection(&mut self, cx: &mut ViewContext<Self>) {
         self.columnar_selection_tail.take();
         if self.pending_selection.is_some() {
-            let selections = self.selections::<usize>(cx).collect::<Vec<_>>();
+            let selections = self.local_selections::<usize>(cx);
             self.update_selections(selections, None, cx);
         }
     }
@@ -986,7 +989,7 @@ impl Editor {
         tail: DisplayPoint,
         head: DisplayPoint,
         overshoot: u32,
-        display_map: &DisplayMapSnapshot,
+        display_map: &DisplaySnapshot,
         cx: &mut ViewContext<Self>,
     ) {
         let start_row = cmp::min(tail.row(), head.row());
@@ -1029,20 +1032,21 @@ impl Editor {
         if self.active_diagnostics.is_some() {
             self.dismiss_diagnostics(cx);
         } else if let Some(PendingSelection { selection, .. }) = self.pending_selection.take() {
-            let buffer = self.buffer.read(cx);
+            let buffer = self.buffer.read(cx).snapshot(cx);
             let selection = Selection {
                 id: selection.id,
-                start: selection.start.to_point(buffer),
-                end: selection.end.to_point(buffer),
+                start: selection.start.to_point(&buffer),
+                end: selection.end.to_point(&buffer),
                 reversed: selection.reversed,
                 goal: selection.goal,
             };
-            if self.selections::<Point>(cx).next().is_none() {
+            if self.local_selections::<Point>(cx).is_empty() {
                 self.update_selections(vec![selection], Some(Autoscroll::Fit), cx);
             }
         } else {
-            let mut oldest_selection = self.oldest_selection::<usize>(cx);
-            if self.selection_count(cx) == 1 {
+            let buffer = self.buffer.read(cx).snapshot(cx);
+            let mut oldest_selection = self.oldest_selection::<usize>(&buffer);
+            if self.selection_count() == 1 {
                 oldest_selection.start = oldest_selection.head().clone();
                 oldest_selection.end = oldest_selection.head().clone();
             }
@@ -1059,12 +1063,12 @@ impl Editor {
         I: IntoIterator<Item = Range<T>>,
         T: ToOffset,
     {
-        let buffer = self.buffer.read(cx);
+        let buffer = self.buffer.read(cx).snapshot(cx);
         let selections = ranges
             .into_iter()
             .map(|range| {
-                let mut start = range.start.to_offset(buffer);
-                let mut end = range.end.to_offset(buffer);
+                let mut start = range.start.to_offset(&buffer);
+                let mut end = range.end.to_offset(&buffer);
                 let reversed = if start > end {
                     mem::swap(&mut start, &mut end);
                     true
@@ -1131,15 +1135,15 @@ impl Editor {
         self.start_transaction(cx);
         let mut old_selections = SmallVec::<[_; 32]>::new();
         {
-            let selections = self.selections::<Point>(cx).collect::<Vec<_>>();
-            let buffer = self.buffer.read(cx);
+            let selections = self.local_selections::<Point>(cx);
+            let buffer = self.buffer.read(cx).snapshot(cx);
             for selection in selections.iter() {
                 let start_point = selection.start;
                 let indent = buffer
                     .indent_column_for_line(start_point.row)
                     .min(start_point.column);
-                let start = selection.start.to_offset(buffer);
-                let end = selection.end.to_offset(buffer);
+                let start = selection.start.to_offset(&buffer);
+                let end = selection.end.to_offset(&buffer);
 
                 let mut insert_extra_newline = false;
                 if let Some(language) = buffer.language() {
@@ -1253,30 +1257,27 @@ impl Editor {
 
     pub fn insert(&mut self, text: &str, cx: &mut ViewContext<Self>) {
         self.start_transaction(cx);
-        let old_selections = self.selections::<usize>(cx).collect::<SmallVec<[_; 32]>>();
-        let mut new_selections = Vec::new();
-        self.buffer.update(cx, |buffer, cx| {
+        let old_selections = self.local_selections::<usize>(cx);
+        let new_selections = self.buffer.update(cx, |buffer, cx| {
+            let snapshot = buffer.read(cx);
+            let new_selections = old_selections
+                .iter()
+                .map(|selection| Selection {
+                    id: selection.id,
+                    start: snapshot.anchor_after(selection.start),
+                    end: snapshot.anchor_after(selection.end),
+                    reversed: false,
+                    goal: SelectionGoal::None,
+                })
+                .collect::<Vec<_>>();
+
+            drop(snapshot);
             let edit_ranges = old_selections.iter().map(|s| s.start..s.end);
             buffer.edit_with_autoindent(edit_ranges, text, cx);
-            let text_len = text.len() as isize;
-            let mut delta = 0_isize;
-            new_selections = old_selections
-                .into_iter()
-                .map(|selection| {
-                    let start = selection.start as isize;
-                    let end = selection.end as isize;
-                    let cursor = (start + delta + text_len) as usize;
-                    let deleted_count = end - start;
-                    delta += text_len - deleted_count;
-                    Selection {
-                        id: selection.id,
-                        start: cursor,
-                        end: cursor,
-                        reversed: false,
-                        goal: SelectionGoal::None,
-                    }
-                })
-                .collect();
+
+            let snapshot = buffer.read(cx);
+            self.resolve_selections::<usize, _>(new_selections.iter(), &snapshot)
+                .collect()
         });
 
         self.update_selections(new_selections, Some(Autoscroll::Fit), cx);
@@ -1284,19 +1285,20 @@ impl Editor {
     }
 
     fn autoclose_pairs(&mut self, cx: &mut ViewContext<Self>) {
-        let selections = self.selections::<usize>(cx).collect::<Vec<_>>();
-        let new_autoclose_pair_state = self.buffer.update(cx, |buffer, cx| {
-            let autoclose_pair = buffer.language().and_then(|language| {
+        let selections = self.local_selections::<usize>(cx);
+        let new_autoclose_pair = self.buffer.update(cx, |buffer, cx| {
+            let snapshot = buffer.snapshot(cx);
+            let autoclose_pair = snapshot.language().and_then(|language| {
                 let first_selection_start = selections.first().unwrap().start;
                 let pair = language.brackets().iter().find(|pair| {
-                    buffer.contains_str_at(
+                    snapshot.contains_str_at(
                         first_selection_start.saturating_sub(pair.start.len()),
                         &pair.start,
                     )
                 });
                 pair.and_then(|pair| {
                     let should_autoclose = selections[1..].iter().all(|selection| {
-                        buffer.contains_str_at(
+                        snapshot.contains_str_at(
                             selection.start.saturating_sub(pair.start.len()),
                             &pair.start,
                         )
@@ -1314,25 +1316,25 @@ impl Editor {
                 let selection_ranges = selections
                     .iter()
                     .map(|selection| {
-                        let start = selection.start.to_offset(&*buffer);
+                        let start = selection.start.to_offset(&snapshot);
                         start..start
                     })
                     .collect::<SmallVec<[_; 32]>>();
 
                 buffer.edit(selection_ranges, &pair.end, cx);
+                let snapshot = buffer.snapshot(cx);
 
                 if pair.end.len() == 1 {
                     let mut delta = 0;
                     Some(BracketPairState {
-                        ranges: buffer.anchor_range_set(
-                            Bias::Left,
-                            Bias::Right,
-                            selections.iter().map(move |selection| {
+                        ranges: selections
+                            .iter()
+                            .map(move |selection| {
                                 let offset = selection.start + delta;
                                 delta += 1;
-                                offset..offset
-                            }),
-                        ),
+                                snapshot.anchor_before(offset)..snapshot.anchor_after(offset)
+                            })
+                            .collect(),
                         pair,
                     })
                 } else {
@@ -1340,28 +1342,28 @@ impl Editor {
                 }
             })
         });
-        self.autoclose_stack.extend(new_autoclose_pair_state);
+        self.autoclose_stack.extend(new_autoclose_pair);
     }
 
     fn skip_autoclose_end(&mut self, text: &str, cx: &mut ViewContext<Self>) -> bool {
-        let old_selections = self.selections::<usize>(cx).collect::<Vec<_>>();
-        let autoclose_pair_state = if let Some(autoclose_pair_state) = self.autoclose_stack.last() {
-            autoclose_pair_state
+        let old_selections = self.local_selections::<usize>(cx);
+        let autoclose_pair = if let Some(autoclose_pair) = self.autoclose_stack.last() {
+            autoclose_pair
         } else {
             return false;
         };
-        if text != autoclose_pair_state.pair.end {
+        if text != autoclose_pair.pair.end {
             return false;
         }
 
-        debug_assert_eq!(old_selections.len(), autoclose_pair_state.ranges.len());
+        debug_assert_eq!(old_selections.len(), autoclose_pair.ranges.len());
 
-        let buffer = self.buffer.read(cx);
+        let buffer = self.buffer.read(cx).snapshot(cx);
         if old_selections
             .iter()
-            .zip(autoclose_pair_state.ranges.ranges::<usize>(buffer))
+            .zip(autoclose_pair.ranges.iter().map(|r| r.to_offset(&buffer)))
             .all(|(selection, autoclose_range)| {
-                let autoclose_range_end = autoclose_range.end.to_offset(buffer);
+                let autoclose_range_end = autoclose_range.end.to_offset(&buffer);
                 selection.is_empty() && selection.start == autoclose_range_end
             })
         {
@@ -1395,7 +1397,7 @@ impl Editor {
 
     pub fn backspace(&mut self, _: &Backspace, cx: &mut ViewContext<Self>) {
         self.start_transaction(cx);
-        let mut selections = self.selections::<Point>(cx).collect::<Vec<_>>();
+        let mut selections = self.local_selections::<Point>(cx);
         let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
         for selection in &mut selections {
             if selection.is_empty() {
@@ -1415,7 +1417,7 @@ impl Editor {
     pub fn delete(&mut self, _: &Delete, cx: &mut ViewContext<Self>) {
         self.start_transaction(cx);
         let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-        let mut selections = self.selections::<Point>(cx).collect::<Vec<_>>();
+        let mut selections = self.local_selections::<Point>(cx);
         for selection in &mut selections {
             if selection.is_empty() {
                 let head = selection.head().to_display_point(&display_map);
@@ -1433,14 +1435,16 @@ impl Editor {
 
     pub fn tab(&mut self, _: &Tab, cx: &mut ViewContext<Self>) {
         self.start_transaction(cx);
-        let tab_size = self.build_settings.borrow()(cx).tab_size;
-        let mut selections = self.selections::<Point>(cx).collect::<Vec<_>>();
+        let tab_size = (self.build_settings)(cx).tab_size;
+        let mut selections = self.local_selections::<Point>(cx);
         let mut last_indent = None;
         self.buffer.update(cx, |buffer, cx| {
             for selection in &mut selections {
                 if selection.is_empty() {
                     let char_column = buffer
-                        .chars_for_range(Point::new(selection.start.row, 0)..selection.start)
+                        .read(cx)
+                        .text_for_range(Point::new(selection.start.row, 0)..selection.start)
+                        .flat_map(str::chars)
                         .count();
                     let chars_to_next_tab_stop = tab_size - (char_column % tab_size);
                     buffer.edit(
@@ -1474,7 +1478,7 @@ impl Editor {
                     }
 
                     for row in start_row..end_row {
-                        let indent_column = buffer.indent_column_for_line(row) as usize;
+                        let indent_column = buffer.read(cx).indent_column_for_line(row) as usize;
                         let columns_to_next_tab_stop = tab_size - (indent_column % tab_size);
                         let row_start = Point::new(row, 0);
                         buffer.edit(
@@ -1503,11 +1507,12 @@ impl Editor {
 
     pub fn outdent(&mut self, _: &Outdent, cx: &mut ViewContext<Self>) {
         self.start_transaction(cx);
-        let tab_size = self.build_settings.borrow()(cx).tab_size;
-        let selections = self.selections::<Point>(cx).collect::<Vec<_>>();
+        let tab_size = (self.build_settings)(cx).tab_size;
+        let selections = self.local_selections::<Point>(cx);
         let mut deletion_ranges = Vec::new();
         let mut last_outdent = None;
-        self.buffer.update(cx, |buffer, cx| {
+        {
+            let buffer = self.buffer.read(cx).read(cx);
             for selection in &selections {
                 let mut start_row = selection.start.row;
                 let mut end_row = selection.end.row + 1;
@@ -1538,11 +1543,13 @@ impl Editor {
                     }
                 }
             }
+        }
+        self.buffer.update(cx, |buffer, cx| {
             buffer.edit(deletion_ranges, "", cx);
         });
 
         self.update_selections(
-            self.selections::<usize>(cx).collect(),
+            self.local_selections::<usize>(cx),
             Some(Autoscroll::Fit),
             cx,
         );
@@ -1552,9 +1559,9 @@ impl Editor {
     pub fn delete_line(&mut self, _: &DeleteLine, cx: &mut ViewContext<Self>) {
         self.start_transaction(cx);
 
-        let selections = self.selections::<Point>(cx).collect::<Vec<_>>();
+        let selections = self.local_selections::<Point>(cx);
         let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-        let buffer = self.buffer.read(cx);
+        let buffer = self.buffer.read(cx).snapshot(cx);
 
         let mut row_delta = 0;
         let mut new_cursors = Vec::new();
@@ -1575,13 +1582,13 @@ impl Editor {
                 }
             }
 
-            let mut edit_start = Point::new(rows.start, 0).to_offset(buffer);
+            let mut edit_start = Point::new(rows.start, 0).to_offset(&buffer);
             let edit_end;
             let cursor_buffer_row;
             if buffer.max_point().row >= rows.end {
                 // If there's a line after the range, delete the \n from the end of the row range
                 // and position the cursor on the next line.
-                edit_end = Point::new(rows.end, 0).to_offset(buffer);
+                edit_end = Point::new(rows.end, 0).to_offset(&buffer);
                 cursor_buffer_row = rows.start;
             } else {
                 // If there isn't a line after the range, delete the \n from the line before the
@@ -1621,9 +1628,9 @@ impl Editor {
     pub fn duplicate_line(&mut self, _: &DuplicateLine, cx: &mut ViewContext<Self>) {
         self.start_transaction(cx);
 
-        let mut selections = self.selections::<Point>(cx).collect::<Vec<_>>();
+        let mut selections = self.local_selections::<Point>(cx);
         let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-        let buffer = self.buffer.read(cx);
+        let buffer = &display_map.buffer_snapshot;
 
         let mut edits = Vec::new();
         let mut selections_iter = selections.iter().peekable();
@@ -1679,9 +1686,9 @@ impl Editor {
     pub fn move_line_up(&mut self, _: &MoveLineUp, cx: &mut ViewContext<Self>) {
         self.start_transaction(cx);
 
-        let selections = self.selections::<Point>(cx).collect::<Vec<_>>();
+        let selections = self.local_selections::<Point>(cx);
         let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-        let buffer = self.buffer.read(cx);
+        let buffer = self.buffer.read(cx).snapshot(cx);
 
         let mut edits = Vec::new();
         let mut new_selection_ranges = Vec::new();
@@ -1692,7 +1699,7 @@ impl Editor {
         let mut contiguous_selections = Vec::new();
         while let Some(selection) = selections.next() {
             // Accumulate contiguous regions of rows that we want to move.
-            contiguous_selections.push(selection.point_range(buffer));
+            contiguous_selections.push(selection.point_range(&buffer));
             let SpannedRows {
                 mut buffer_rows,
                 mut display_rows,
@@ -1706,7 +1713,7 @@ impl Editor {
                 if next_buffer_rows.start <= buffer_rows.end {
                     buffer_rows.end = next_buffer_rows.end;
                     display_rows.end = next_display_rows.end;
-                    contiguous_selections.push(next_selection.point_range(buffer));
+                    contiguous_selections.push(next_selection.point_range(&buffer));
                     selections.next().unwrap();
                 } else {
                     break;
@@ -1715,13 +1722,13 @@ impl Editor {
 
             // Cut the text from the selected rows and paste it at the start of the previous line.
             if display_rows.start != 0 {
-                let start = Point::new(buffer_rows.start, 0).to_offset(buffer);
+                let start = Point::new(buffer_rows.start, 0).to_offset(&buffer);
                 let end = Point::new(buffer_rows.end - 1, buffer.line_len(buffer_rows.end - 1))
-                    .to_offset(buffer);
+                    .to_offset(&buffer);
 
                 let prev_row_display_start = DisplayPoint::new(display_rows.start - 1, 0);
                 let prev_row_buffer_start = display_map.prev_row_boundary(prev_row_display_start).1;
-                let prev_row_buffer_start_offset = prev_row_buffer_start.to_offset(buffer);
+                let prev_row_buffer_start_offset = prev_row_buffer_start.to_offset(&buffer);
 
                 let mut text = String::new();
                 text.extend(buffer.text_for_range(start..end));
@@ -1743,8 +1750,8 @@ impl Editor {
                 // Move folds up.
                 old_folds.push(start..end);
                 for fold in display_map.folds_in_range(start..end) {
-                    let mut start = fold.start.to_point(buffer);
-                    let mut end = fold.end.to_point(buffer);
+                    let mut start = fold.start.to_point(&buffer);
+                    let mut end = fold.end.to_point(&buffer);
                     start.row -= row_delta;
                     end.row -= row_delta;
                     new_folds.push(start..end);
@@ -1769,9 +1776,9 @@ impl Editor {
     pub fn move_line_down(&mut self, _: &MoveLineDown, cx: &mut ViewContext<Self>) {
         self.start_transaction(cx);
 
-        let selections = self.selections::<Point>(cx).collect::<Vec<_>>();
+        let selections = self.local_selections::<Point>(cx);
         let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-        let buffer = self.buffer.read(cx);
+        let buffer = self.buffer.read(cx).snapshot(cx);
 
         let mut edits = Vec::new();
         let mut new_selection_ranges = Vec::new();
@@ -1782,7 +1789,7 @@ impl Editor {
         let mut contiguous_selections = Vec::new();
         while let Some(selection) = selections.next() {
             // Accumulate contiguous regions of rows that we want to move.
-            contiguous_selections.push(selection.point_range(buffer));
+            contiguous_selections.push(selection.point_range(&buffer));
             let SpannedRows {
                 mut buffer_rows,
                 mut display_rows,
@@ -1795,7 +1802,7 @@ impl Editor {
                 if next_buffer_rows.start <= buffer_rows.end {
                     buffer_rows.end = next_buffer_rows.end;
                     display_rows.end = next_display_rows.end;
-                    contiguous_selections.push(next_selection.point_range(buffer));
+                    contiguous_selections.push(next_selection.point_range(&buffer));
                     selections.next().unwrap();
                 } else {
                     break;
@@ -1804,14 +1811,14 @@ impl Editor {
 
             // Cut the text from the selected rows and paste it at the end of the next line.
             if display_rows.end <= display_map.max_point().row() {
-                let start = Point::new(buffer_rows.start, 0).to_offset(buffer);
+                let start = Point::new(buffer_rows.start, 0).to_offset(&buffer);
                 let end = Point::new(buffer_rows.end - 1, buffer.line_len(buffer_rows.end - 1))
-                    .to_offset(buffer);
+                    .to_offset(&buffer);
 
                 let next_row_display_end =
                     DisplayPoint::new(display_rows.end, display_map.line_len(display_rows.end));
                 let next_row_buffer_end = display_map.next_row_boundary(next_row_display_end).1;
-                let next_row_buffer_end_offset = next_row_buffer_end.to_offset(buffer);
+                let next_row_buffer_end_offset = next_row_buffer_end.to_offset(&buffer);
 
                 let mut text = String::new();
                 text.push('\n');
@@ -1830,8 +1837,8 @@ impl Editor {
                 // Move folds down.
                 old_folds.push(start..end);
                 for fold in display_map.folds_in_range(start..end) {
-                    let mut start = fold.start.to_point(buffer);
-                    let mut end = fold.end.to_point(buffer);
+                    let mut start = fold.start.to_point(&buffer);
+                    let mut end = fold.end.to_point(&buffer);
                     start.row += row_delta;
                     end.row += row_delta;
                     new_folds.push(start..end);
@@ -1856,10 +1863,10 @@ impl Editor {
     pub fn cut(&mut self, _: &Cut, cx: &mut ViewContext<Self>) {
         self.start_transaction(cx);
         let mut text = String::new();
-        let mut selections = self.selections::<Point>(cx).collect::<Vec<_>>();
+        let mut selections = self.local_selections::<Point>(cx);
         let mut clipboard_selections = Vec::with_capacity(selections.len());
         {
-            let buffer = self.buffer.read(cx);
+            let buffer = self.buffer.read(cx).read(cx);
             let max_point = buffer.max_point();
             for selection in &mut selections {
                 let is_entire_line = selection.is_empty();
@@ -1887,28 +1894,30 @@ impl Editor {
     }
 
     pub fn copy(&mut self, _: &Copy, cx: &mut ViewContext<Self>) {
-        let selections = self.selections::<Point>(cx).collect::<Vec<_>>();
-        let buffer = self.buffer.read(cx);
-        let max_point = buffer.max_point();
+        let selections = self.local_selections::<Point>(cx);
         let mut text = String::new();
         let mut clipboard_selections = Vec::with_capacity(selections.len());
-        for selection in selections.iter() {
-            let mut start = selection.start;
-            let mut end = selection.end;
-            let is_entire_line = selection.is_empty();
-            if is_entire_line {
-                start = Point::new(start.row, 0);
-                end = cmp::min(max_point, Point::new(start.row + 1, 0));
-            }
-            let mut len = 0;
-            for chunk in buffer.text_for_range(start..end) {
-                text.push_str(chunk);
-                len += chunk.len();
+        {
+            let buffer = self.buffer.read(cx).read(cx);
+            let max_point = buffer.max_point();
+            for selection in selections.iter() {
+                let mut start = selection.start;
+                let mut end = selection.end;
+                let is_entire_line = selection.is_empty();
+                if is_entire_line {
+                    start = Point::new(start.row, 0);
+                    end = cmp::min(max_point, Point::new(start.row + 1, 0));
+                }
+                let mut len = 0;
+                for chunk in buffer.text_for_range(start..end) {
+                    text.push_str(chunk);
+                    len += chunk.len();
+                }
+                clipboard_selections.push(ClipboardSelection {
+                    len,
+                    is_entire_line,
+                });
             }
-            clipboard_selections.push(ClipboardSelection {
-                len,
-                is_entire_line,
-            });
         }
 
         cx.as_mut()
@@ -1919,7 +1928,7 @@ impl Editor {
         if let Some(item) = cx.as_mut().read_from_clipboard() {
             let clipboard_text = item.text();
             if let Some(mut clipboard_selections) = item.metadata::<Vec<ClipboardSelection>>() {
-                let mut selections = self.selections::<usize>(cx).collect::<Vec<_>>();
+                let mut selections = self.local_selections::<usize>(cx);
                 let all_selections_were_entire_line =
                     clipboard_selections.iter().all(|s| s.is_entire_line);
                 if clipboard_selections.len() != selections.len() {
@@ -1950,7 +1959,7 @@ impl Editor {
                         // selection was copied. If this selection is also currently empty,
                         // then paste the line before the current line of the buffer.
                         let range = if selection.is_empty() && entire_line {
-                            let column = selection.start.to_point(&*buffer).column as usize;
+                            let column = selection.start.to_point(&buffer.read(cx)).column as usize;
                             let line_start = selection.start - column;
                             line_start..line_start
                         } else {
@@ -1971,18 +1980,26 @@ impl Editor {
     }
 
     pub fn undo(&mut self, _: &Undo, cx: &mut ViewContext<Self>) {
-        self.buffer.update(cx, |buffer, cx| buffer.undo(cx));
-        self.request_autoscroll(Autoscroll::Fit, cx);
+        if let Some(tx_id) = self.buffer.update(cx, |buffer, cx| buffer.undo(cx)) {
+            if let Some((selections, _)) = self.selection_history.get(&tx_id).cloned() {
+                self.set_selections(selections, cx);
+            }
+            self.request_autoscroll(Autoscroll::Fit, cx);
+        }
     }
 
     pub fn redo(&mut self, _: &Redo, cx: &mut ViewContext<Self>) {
-        self.buffer.update(cx, |buffer, cx| buffer.redo(cx));
-        self.request_autoscroll(Autoscroll::Fit, cx);
+        if let Some(tx_id) = self.buffer.update(cx, |buffer, cx| buffer.redo(cx)) {
+            if let Some((_, Some(selections))) = self.selection_history.get(&tx_id).cloned() {
+                self.set_selections(selections, cx);
+            }
+            self.request_autoscroll(Autoscroll::Fit, cx);
+        }
     }
 
     pub fn move_left(&mut self, _: &MoveLeft, cx: &mut ViewContext<Self>) {
         let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-        let mut selections = self.selections::<Point>(cx).collect::<Vec<_>>();
+        let mut selections = self.local_selections::<Point>(cx);
         for selection in &mut selections {
             let start = selection.start.to_display_point(&display_map);
             let end = selection.end.to_display_point(&display_map);
@@ -2004,7 +2021,7 @@ impl Editor {
 
     pub fn select_left(&mut self, _: &SelectLeft, cx: &mut ViewContext<Self>) {
         let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-        let mut selections = self.selections::<Point>(cx).collect::<Vec<_>>();
+        let mut selections = self.local_selections::<Point>(cx);
         for selection in &mut selections {
             let head = selection.head().to_display_point(&display_map);
             let cursor = movement::left(&display_map, head)
@@ -2018,7 +2035,7 @@ impl Editor {
 
     pub fn move_right(&mut self, _: &MoveRight, cx: &mut ViewContext<Self>) {
         let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-        let mut selections = self.selections::<Point>(cx).collect::<Vec<_>>();
+        let mut selections = self.local_selections::<Point>(cx);
         for selection in &mut selections {
             let start = selection.start.to_display_point(&display_map);
             let end = selection.end.to_display_point(&display_map);
@@ -2040,7 +2057,7 @@ impl Editor {
 
     pub fn select_right(&mut self, _: &SelectRight, cx: &mut ViewContext<Self>) {
         let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-        let mut selections = self.selections::<Point>(cx).collect::<Vec<_>>();
+        let mut selections = self.local_selections::<Point>(cx);
         for selection in &mut selections {
             let head = selection.head().to_display_point(&display_map);
             let cursor = movement::right(&display_map, head)
@@ -2059,7 +2076,7 @@ impl Editor {
         }
 
         let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-        let mut selections = self.selections::<Point>(cx).collect::<Vec<_>>();
+        let mut selections = self.local_selections::<Point>(cx);
         for selection in &mut selections {
             let start = selection.start.to_display_point(&display_map);
             let end = selection.end.to_display_point(&display_map);
@@ -2079,7 +2096,7 @@ impl Editor {
 
     pub fn select_up(&mut self, _: &SelectUp, cx: &mut ViewContext<Self>) {
         let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-        let mut selections = self.selections::<Point>(cx).collect::<Vec<_>>();
+        let mut selections = self.local_selections::<Point>(cx);
         for selection in &mut selections {
             let head = selection.head().to_display_point(&display_map);
             let (head, goal) = movement::up(&display_map, head, selection.goal).unwrap();

crates/editor/src/element.rs 🔗

@@ -1,10 +1,10 @@
-use crate::display_map::{BlockContext, ToDisplayPoint};
-
 use super::{
-    DisplayPoint, Editor, EditorMode, EditorSettings, EditorStyle, Input, Scroll, Select,
-    SelectPhase, Snapshot, SoftWrap, MAX_LINE_LEN,
+    display_map::{BlockContext, ToDisplayPoint},
+    DisplayPoint, Editor, EditorMode, EditorSettings, EditorSnapshot, EditorStyle, Input, Scroll,
+    Select, SelectPhase, SoftWrap, ToPoint, MAX_LINE_LEN,
 };
 use clock::ReplicaId;
+use collections::{BTreeMap, HashMap};
 use gpui::{
     color::Color,
     geometry::{
@@ -19,11 +19,10 @@ use gpui::{
     MutableAppContext, PaintContext, Quad, Scene, SizeConstraint, ViewContext, WeakViewHandle,
 };
 use json::json;
-use language::{Chunk, ToPoint};
+use language::Chunk;
 use smallvec::SmallVec;
 use std::{
     cmp::{self, Ordering},
-    collections::{BTreeMap, HashMap},
     fmt::Write,
     ops::Range,
 };
@@ -49,7 +48,7 @@ impl EditorElement {
         self.view.upgrade(cx).unwrap().update(cx, f)
     }
 
-    fn snapshot(&self, cx: &mut MutableAppContext) -> Snapshot {
+    fn snapshot(&self, cx: &mut MutableAppContext) -> EditorSnapshot {
         self.update_view(cx, |view, cx| view.snapshot(cx))
     }
 
@@ -434,8 +433,8 @@ impl EditorElement {
         }
     }
 
-    fn max_line_number_width(&self, snapshot: &Snapshot, cx: &LayoutContext) -> f32 {
-        let digit_count = (snapshot.buffer_row_count() as f32).log10().floor() as usize + 1;
+    fn max_line_number_width(&self, snapshot: &EditorSnapshot, cx: &LayoutContext) -> f32 {
+        let digit_count = (snapshot.max_buffer_row() as f32).log10().floor() as usize + 1;
         let style = &self.settings.style;
 
         cx.text_layout_cache
@@ -458,7 +457,7 @@ impl EditorElement {
         &self,
         rows: Range<u32>,
         active_rows: &BTreeMap<u32, bool>,
-        snapshot: &Snapshot,
+        snapshot: &EditorSnapshot,
         cx: &LayoutContext,
     ) -> Vec<Option<text_layout::Line>> {
         let style = &self.settings.style;
@@ -504,7 +503,7 @@ impl EditorElement {
     fn layout_lines(
         &mut self,
         mut rows: Range<u32>,
-        snapshot: &mut Snapshot,
+        snapshot: &mut EditorSnapshot,
         cx: &LayoutContext,
     ) -> Vec<text_layout::Line> {
         rows.end = cmp::min(rows.end, snapshot.max_point().row() + 1);
@@ -623,7 +622,7 @@ impl EditorElement {
     fn layout_blocks(
         &mut self,
         rows: Range<u32>,
-        snapshot: &Snapshot,
+        snapshot: &EditorSnapshot,
         text_width: f32,
         line_height: f32,
         style: &EditorStyle,
@@ -732,21 +731,14 @@ impl Element for EditorElement {
         let scroll_top = scroll_position.y() * line_height;
         let end_row = ((scroll_top + size.y()) / line_height).ceil() as u32 + 1; // Add 1 to ensure selections bleed off screen
 
-        let mut selections = HashMap::new();
         let mut active_rows = BTreeMap::new();
         let mut highlighted_row = None;
-        self.update_view(cx.app, |view, cx| {
+        let selections = self.update_view(cx.app, |view, cx| {
             highlighted_row = view.highlighted_row();
-            for selection_set_id in view.active_selection_sets(cx).collect::<Vec<_>>() {
-                let replica_selections = view
-                    .intersecting_selections(
-                        selection_set_id,
-                        DisplayPoint::new(start_row, 0)..DisplayPoint::new(end_row, 0),
-                        cx,
-                    )
-                    .collect::<Vec<_>>();
-                for selection in &replica_selections {
-                    if selection_set_id == view.selection_set_id {
+            let selections = view.visible_selections(start_row..end_row, cx);
+            for (replica_id, selections) in &selections {
+                if *replica_id == view.replica_id(cx) {
+                    for selection in selections {
                         let is_empty = selection.start == selection.end;
                         let selection_start = snapshot.prev_row_boundary(selection.start).0;
                         let selection_end = snapshot.next_row_boundary(selection.end).0;
@@ -759,9 +751,8 @@ impl Element for EditorElement {
                         }
                     }
                 }
-
-                selections.insert(selection_set_id.replica_id, replica_selections);
             }
+            selections
         });
 
         let line_number_layouts = self.layout_rows(start_row..end_row, &active_rows, &snapshot, cx);
@@ -923,7 +914,7 @@ pub struct LayoutState {
     gutter_padding: f32,
     text_size: Vector2F,
     style: EditorStyle,
-    snapshot: Snapshot,
+    snapshot: EditorSnapshot,
     active_rows: BTreeMap<u32, bool>,
     highlighted_row: Option<u32>,
     line_layouts: Vec<text_layout::Line>,
@@ -961,7 +952,7 @@ impl LayoutState {
 
 fn layout_line(
     row: u32,
-    snapshot: &Snapshot,
+    snapshot: &EditorSnapshot,
     style: &EditorStyle,
     layout_cache: &TextLayoutCache,
 ) -> text_layout::Line {
@@ -998,7 +989,7 @@ pub struct PaintState {
 impl PaintState {
     fn point_for_position(
         &self,
-        snapshot: &Snapshot,
+        snapshot: &EditorSnapshot,
         layout: &LayoutState,
         position: Vector2F,
     ) -> (DisplayPoint, u32) {
@@ -1164,23 +1155,20 @@ fn scale_horizontal_mouse_autoscroll_delta(delta: f32) -> f32 {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::{
-        test::sample_text,
-        {Editor, EditorSettings},
-    };
-    use language::Buffer;
+    use crate::{Editor, EditorSettings, MultiBuffer};
+    use std::sync::Arc;
+    use util::test::sample_text;
 
     #[gpui::test]
     fn test_layout_line_numbers(cx: &mut gpui::MutableAppContext) {
         let settings = EditorSettings::test(cx);
-
-        let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6), cx));
+        let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx);
         let (window_id, editor) = cx.add_window(Default::default(), |cx| {
             Editor::for_buffer(
                 buffer,
                 {
                     let settings = settings.clone();
-                    move |_| settings.clone()
+                    Arc::new(move |_| settings.clone())
                 },
                 cx,
             )

crates/editor/src/items.rs 🔗

@@ -1,28 +1,27 @@
-use crate::{Editor, EditorSettings, Event};
+use crate::{Editor, Event};
+use crate::{MultiBuffer, ToPoint as _};
 use anyhow::Result;
 use gpui::{
-    elements::*, fonts::TextStyle, AppContext, Entity, ModelContext, ModelHandle,
-    MutableAppContext, RenderContext, Subscription, Task, View, ViewContext, ViewHandle,
-    WeakModelHandle,
+    elements::*, AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, RenderContext,
+    Subscription, Task, View, ViewContext, ViewHandle, WeakModelHandle,
 };
-use language::{Buffer, Diagnostic, File as _};
+use language::{Diagnostic, File as _};
 use postage::watch;
 use project::{ProjectPath, Worktree};
 use std::fmt::Write;
 use std::path::Path;
-use text::{Point, Selection, ToPoint};
+use text::{Point, Selection};
 use workspace::{
-    settings, EntryOpener, ItemHandle, ItemView, ItemViewHandle, Settings, StatusItemView,
-    WeakItemHandle,
+    EntryOpener, ItemHandle, ItemView, ItemViewHandle, Settings, StatusItemView, WeakItemHandle,
 };
 
 pub struct BufferOpener;
 
 #[derive(Clone)]
-pub struct BufferItemHandle(pub ModelHandle<Buffer>);
+pub struct BufferItemHandle(pub ModelHandle<MultiBuffer>);
 
 #[derive(Clone)]
-struct WeakBufferItemHandle(WeakModelHandle<Buffer>);
+struct WeakBufferItemHandle(WeakModelHandle<MultiBuffer>);
 
 impl EntryOpener for BufferOpener {
     fn open(
@@ -32,10 +31,10 @@ impl EntryOpener for BufferOpener {
         cx: &mut ModelContext<Worktree>,
     ) -> Option<Task<Result<Box<dyn ItemHandle>>>> {
         let buffer = worktree.open_buffer(project_path.path, cx);
-        let task = cx.spawn(|_, _| async move {
-            buffer
-                .await
-                .map(|buffer| Box::new(BufferItemHandle(buffer)) as Box<dyn ItemHandle>)
+        let task = cx.spawn(|_, mut cx| async move {
+            let buffer = buffer.await?;
+            let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
+            Ok(Box::new(BufferItemHandle(buffer)) as Box<dyn ItemHandle>)
         });
         Some(task)
     }
@@ -52,42 +51,7 @@ impl ItemHandle for BufferItemHandle {
         Box::new(cx.add_view(window_id, |cx| {
             Editor::for_buffer(
                 self.0.clone(),
-                move |cx| {
-                    let settings = settings.borrow();
-                    let font_cache = cx.font_cache();
-                    let font_family_id = settings.buffer_font_family;
-                    let font_family_name = cx.font_cache().family_name(font_family_id).unwrap();
-                    let font_properties = Default::default();
-                    let font_id = font_cache
-                        .select_font(font_family_id, &font_properties)
-                        .unwrap();
-                    let font_size = settings.buffer_font_size;
-
-                    let mut theme = settings.theme.editor.clone();
-                    theme.text = TextStyle {
-                        color: theme.text.color,
-                        font_family_name,
-                        font_family_id,
-                        font_id,
-                        font_size,
-                        font_properties,
-                        underline: None,
-                    };
-                    let language = buffer.upgrade(cx).and_then(|buf| buf.read(cx).language());
-                    let soft_wrap = match settings.soft_wrap(language) {
-                        settings::SoftWrap::None => crate::SoftWrap::None,
-                        settings::SoftWrap::EditorWidth => crate::SoftWrap::EditorWidth,
-                        settings::SoftWrap::PreferredLineLength => crate::SoftWrap::Column(
-                            settings.preferred_line_length(language).saturating_sub(1),
-                        ),
-                    };
-
-                    EditorSettings {
-                        tab_size: settings.tab_size,
-                        soft_wrap,
-                        style: theme,
-                    }
-                },
+                crate::settings_builder(buffer, settings),
                 cx,
             )
         }))
@@ -102,7 +66,7 @@ impl ItemHandle for BufferItemHandle {
     }
 
     fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
-        self.0.read(cx).file().map(|f| ProjectPath {
+        self.0.read(cx).file(cx).map(|f| ProjectPath {
             worktree_id: f.worktree_id(),
             path: f.path().clone(),
         })
@@ -137,7 +101,7 @@ impl ItemView for Editor {
         let filename = self
             .buffer()
             .read(cx)
-            .file()
+            .file(cx)
             .and_then(|file| file.file_name());
         if let Some(name) = filename {
             name.to_string_lossy().into()
@@ -147,7 +111,7 @@ impl ItemView for Editor {
     }
 
     fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
-        self.buffer().read(cx).file().map(|file| ProjectPath {
+        self.buffer().read(cx).file(cx).map(|file| ProjectPath {
             worktree_id: file.worktree_id(),
             path: file.path().clone(),
         })
@@ -174,7 +138,14 @@ impl ItemView for Editor {
         path: &Path,
         cx: &mut ViewContext<Self>,
     ) -> Task<Result<()>> {
-        self.buffer().update(cx, |buffer, cx| {
+        let buffer = self
+            .buffer()
+            .read(cx)
+            .as_singleton()
+            .expect("cannot call save_as on an excerpt list")
+            .clone();
+
+        buffer.update(cx, |buffer, cx| {
             let handle = cx.handle();
             let text = buffer.as_rope().clone();
             let version = buffer.version();
@@ -191,12 +162,12 @@ impl ItemView for Editor {
                     let (language, language_server) = worktree.update(&mut cx, |worktree, cx| {
                         let worktree = worktree.as_local_mut().unwrap();
                         let language = worktree
-                            .languages()
+                            .language_registry()
                             .select_language(new_file.full_path())
                             .cloned();
                         let language_server = language
                             .as_ref()
-                            .and_then(|language| worktree.ensure_language_server(language, cx));
+                            .and_then(|language| worktree.register_language(language, cx));
                         (language, language_server.clone())
                     });
 
@@ -210,11 +181,19 @@ impl ItemView for Editor {
     }
 
     fn is_dirty(&self, cx: &AppContext) -> bool {
-        self.buffer().read(cx).is_dirty()
+        self.buffer().read(cx).read(cx).is_dirty()
     }
 
     fn has_conflict(&self, cx: &AppContext) -> bool {
-        self.buffer().read(cx).has_conflict()
+        self.buffer().read(cx).read(cx).has_conflict()
+    }
+
+    fn can_save(&self, cx: &AppContext) -> bool {
+        self.project_path(cx).is_some()
+    }
+
+    fn can_save_as(&self, _: &AppContext) -> bool {
+        true
     }
 }
 
@@ -237,11 +216,11 @@ impl CursorPosition {
 
     fn update_position(&mut self, editor: ViewHandle<Editor>, cx: &mut ViewContext<Self>) {
         let editor = editor.read(cx);
-        let buffer = editor.buffer().read(cx);
+        let buffer = editor.buffer().read(cx).snapshot(cx);
 
         self.selected_count = 0;
         let mut last_selection: Option<Selection<usize>> = None;
-        for selection in editor.selections::<usize>(cx) {
+        for selection in editor.local_selections::<usize>(cx) {
             self.selected_count += selection.end - selection.start;
             if last_selection
                 .as_ref()
@@ -250,7 +229,7 @@ impl CursorPosition {
                 last_selection = Some(selection);
             }
         }
-        self.position = last_selection.map(|s| s.head().to_point(buffer));
+        self.position = last_selection.map(|s| s.head().to_point(&buffer));
 
         cx.notify();
     }
@@ -314,14 +293,14 @@ impl DiagnosticMessage {
 
     fn update(&mut self, editor: ViewHandle<Editor>, cx: &mut ViewContext<Self>) {
         let editor = editor.read(cx);
-        let cursor_position = editor.newest_selection(cx).head();
-        let new_diagnostic = editor
-            .buffer()
+        let buffer = editor.buffer().read(cx);
+        let cursor_position = editor.newest_selection::<usize>(&buffer.read(cx)).head();
+        let new_diagnostic = buffer
             .read(cx)
-            .diagnostics_in_range::<usize, usize>(cursor_position..cursor_position)
-            .filter(|(range, _)| !range.is_empty())
-            .min_by_key(|(range, diagnostic)| (diagnostic.severity, range.len()))
-            .map(|(_, diagnostic)| diagnostic.clone());
+            .diagnostics_in_range::<_, usize>(cursor_position..cursor_position)
+            .filter(|(_, entry)| !entry.range.is_empty())
+            .min_by_key(|(_, entry)| (entry.diagnostic.severity, entry.range.len()))
+            .map(|(_, entry)| entry.diagnostic);
         if new_diagnostic != self.diagnostic {
             self.diagnostic = new_diagnostic;
             cx.notify();

crates/editor/src/movement.rs 🔗

@@ -1,9 +1,9 @@
-use super::{Bias, DisplayMapSnapshot, DisplayPoint, SelectionGoal, ToDisplayPoint};
+use super::{Bias, DisplayPoint, DisplaySnapshot, SelectionGoal, ToDisplayPoint};
+use crate::ToPoint;
 use anyhow::Result;
 use std::{cmp, ops::Range};
-use text::ToPoint;
 
-pub fn left(map: &DisplayMapSnapshot, mut point: DisplayPoint) -> Result<DisplayPoint> {
+pub fn left(map: &DisplaySnapshot, mut point: DisplayPoint) -> Result<DisplayPoint> {
     if point.column() > 0 {
         *point.column_mut() -= 1;
     } else if point.row() > 0 {
@@ -13,7 +13,7 @@ pub fn left(map: &DisplayMapSnapshot, mut point: DisplayPoint) -> Result<Display
     Ok(map.clip_point(point, Bias::Left))
 }
 
-pub fn right(map: &DisplayMapSnapshot, mut point: DisplayPoint) -> Result<DisplayPoint> {
+pub fn right(map: &DisplaySnapshot, mut point: DisplayPoint) -> Result<DisplayPoint> {
     let max_column = map.line_len(point.row());
     if point.column() < max_column {
         *point.column_mut() += 1;
@@ -25,27 +25,26 @@ pub fn right(map: &DisplayMapSnapshot, mut point: DisplayPoint) -> Result<Displa
 }
 
 pub fn up(
-    map: &DisplayMapSnapshot,
-    mut point: DisplayPoint,
+    map: &DisplaySnapshot,
+    start: DisplayPoint,
     goal: SelectionGoal,
 ) -> Result<(DisplayPoint, SelectionGoal)> {
-    let goal_column = if let SelectionGoal::Column(column) = goal {
+    let mut goal_column = if let SelectionGoal::Column(column) = goal {
         column
     } else {
-        map.column_to_chars(point.row(), point.column())
+        map.column_to_chars(start.row(), start.column())
     };
 
-    loop {
-        if point.row() > 0 {
-            *point.row_mut() -= 1;
-            *point.column_mut() = map.column_from_chars(point.row(), goal_column);
-            if !map.is_block_line(point.row()) {
-                break;
-            }
-        } else {
-            point = DisplayPoint::new(0, 0);
-            break;
-        }
+    let prev_row = start.row().saturating_sub(1);
+    let mut point = map.clip_point(
+        DisplayPoint::new(prev_row, map.line_len(prev_row)),
+        Bias::Left,
+    );
+    if point.row() < start.row() {
+        *point.column_mut() = map.column_from_chars(point.row(), goal_column);
+    } else {
+        point = DisplayPoint::new(0, 0);
+        goal_column = 0;
     }
 
     let clip_bias = if point.column() == map.line_len(point.row()) {
@@ -61,28 +60,23 @@ pub fn up(
 }
 
 pub fn down(
-    map: &DisplayMapSnapshot,
-    mut point: DisplayPoint,
+    map: &DisplaySnapshot,
+    start: DisplayPoint,
     goal: SelectionGoal,
 ) -> Result<(DisplayPoint, SelectionGoal)> {
-    let max_point = map.max_point();
-    let goal_column = if let SelectionGoal::Column(column) = goal {
+    let mut goal_column = if let SelectionGoal::Column(column) = goal {
         column
     } else {
-        map.column_to_chars(point.row(), point.column())
+        map.column_to_chars(start.row(), start.column())
     };
 
-    loop {
-        if point.row() < max_point.row() {
-            *point.row_mut() += 1;
-            *point.column_mut() = map.column_from_chars(point.row(), goal_column);
-            if !map.is_block_line(point.row()) {
-                break;
-            }
-        } else {
-            point = max_point;
-            break;
-        }
+    let next_row = start.row() + 1;
+    let mut point = map.clip_point(DisplayPoint::new(next_row, 0), Bias::Right);
+    if point.row() > start.row() {
+        *point.column_mut() = map.column_from_chars(point.row(), goal_column);
+    } else {
+        point = map.max_point();
+        goal_column = map.column_to_chars(point.row(), point.column())
     }
 
     let clip_bias = if point.column() == map.line_len(point.row()) {
@@ -98,7 +92,7 @@ pub fn down(
 }
 
 pub fn line_beginning(
-    map: &DisplayMapSnapshot,
+    map: &DisplaySnapshot,
     point: DisplayPoint,
     toggle_indent: bool,
 ) -> DisplayPoint {
@@ -110,12 +104,12 @@ pub fn line_beginning(
     }
 }
 
-pub fn line_end(map: &DisplayMapSnapshot, point: DisplayPoint) -> DisplayPoint {
+pub fn line_end(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint {
     let line_end = DisplayPoint::new(point.row(), map.line_len(point.row()));
     map.clip_point(line_end, Bias::Left)
 }
 
-pub fn prev_word_boundary(map: &DisplayMapSnapshot, mut point: DisplayPoint) -> DisplayPoint {
+pub fn prev_word_boundary(map: &DisplaySnapshot, mut point: DisplayPoint) -> DisplayPoint {
     let mut line_start = 0;
     if point.row() > 0 {
         if let Some(indent) = map.soft_wrap_indent(point.row() - 1) {
@@ -154,7 +148,7 @@ pub fn prev_word_boundary(map: &DisplayMapSnapshot, mut point: DisplayPoint) ->
     boundary
 }
 
-pub fn next_word_boundary(map: &DisplayMapSnapshot, mut point: DisplayPoint) -> DisplayPoint {
+pub fn next_word_boundary(map: &DisplaySnapshot, mut point: DisplayPoint) -> DisplayPoint {
     let mut prev_char_kind = None;
     for c in map.chars_at(point) {
         let char_kind = char_kind(c);
@@ -181,7 +175,7 @@ pub fn next_word_boundary(map: &DisplayMapSnapshot, mut point: DisplayPoint) ->
     point
 }
 
-pub fn is_inside_word(map: &DisplayMapSnapshot, point: DisplayPoint) -> bool {
+pub fn is_inside_word(map: &DisplaySnapshot, point: DisplayPoint) -> bool {
     let ix = map.clip_point(point, Bias::Left).to_offset(map, Bias::Left);
     let text = &map.buffer_snapshot;
     let next_char_kind = text.chars_at(ix).next().map(char_kind);
@@ -189,7 +183,7 @@ pub fn is_inside_word(map: &DisplayMapSnapshot, point: DisplayPoint) -> bool {
     prev_char_kind.zip(next_char_kind) == Some((CharKind::Word, CharKind::Word))
 }
 
-pub fn surrounding_word(map: &DisplayMapSnapshot, point: DisplayPoint) -> Range<DisplayPoint> {
+pub fn surrounding_word(map: &DisplaySnapshot, point: DisplayPoint) -> Range<DisplayPoint> {
     let mut start = map.clip_point(point, Bias::Left).to_offset(map, Bias::Left);
     let mut end = start;
 
@@ -244,7 +238,120 @@ fn char_kind(c: char) -> CharKind {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::{display_map::DisplayMap, Buffer};
+    use crate::{
+        display_map::{BlockDisposition, BlockProperties},
+        Buffer, DisplayMap, ExcerptProperties, MultiBuffer,
+    };
+    use gpui::{elements::Empty, Element};
+    use language::Point;
+    use std::sync::Arc;
+
+    #[gpui::test]
+    fn test_move_up_and_down_with_excerpts(cx: &mut gpui::MutableAppContext) {
+        let family_id = cx.font_cache().load_family(&["Helvetica"]).unwrap();
+        let font_id = cx
+            .font_cache()
+            .select_font(family_id, &Default::default())
+            .unwrap();
+
+        let buffer = cx.add_model(|cx| Buffer::new(0, "abc\ndefg\nhijkl\nmn", cx));
+        let mut excerpt1_header_position = None;
+        let mut excerpt2_header_position = None;
+        let multibuffer = cx.add_model(|cx| {
+            let mut multibuffer = MultiBuffer::new(0);
+            let excerpt1_id = multibuffer.push_excerpt(
+                ExcerptProperties {
+                    buffer: &buffer,
+                    range: Point::new(0, 0)..Point::new(1, 4),
+                },
+                cx,
+            );
+            let excerpt2_id = multibuffer.push_excerpt(
+                ExcerptProperties {
+                    buffer: &buffer,
+                    range: Point::new(2, 0)..Point::new(3, 2),
+                },
+                cx,
+            );
+
+            excerpt1_header_position = Some(
+                multibuffer
+                    .read(cx)
+                    .anchor_in_excerpt(excerpt1_id, language::Anchor::min()),
+            );
+            excerpt2_header_position = Some(
+                multibuffer
+                    .read(cx)
+                    .anchor_in_excerpt(excerpt2_id, language::Anchor::min()),
+            );
+            multibuffer
+        });
+
+        let display_map =
+            cx.add_model(|cx| DisplayMap::new(multibuffer, 2, font_id, 14.0, None, cx));
+        display_map.update(cx, |display_map, cx| {
+            display_map.insert_blocks(
+                [
+                    BlockProperties {
+                        position: excerpt1_header_position.unwrap(),
+                        height: 2,
+                        render: Arc::new(|_| Empty::new().boxed()),
+                        disposition: BlockDisposition::Above,
+                    },
+                    BlockProperties {
+                        position: excerpt2_header_position.unwrap(),
+                        height: 3,
+                        render: Arc::new(|_| Empty::new().boxed()),
+                        disposition: BlockDisposition::Above,
+                    },
+                ],
+                cx,
+            )
+        });
+
+        let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx));
+        assert_eq!(snapshot.text(), "\n\nabc\ndefg\n\n\n\nhijkl\nmn");
+
+        // Can't move up into the first excerpt's header
+        assert_eq!(
+            up(&snapshot, DisplayPoint::new(2, 2), SelectionGoal::Column(2)).unwrap(),
+            (DisplayPoint::new(2, 0), SelectionGoal::Column(0)),
+        );
+        assert_eq!(
+            up(&snapshot, DisplayPoint::new(2, 0), SelectionGoal::None).unwrap(),
+            (DisplayPoint::new(2, 0), SelectionGoal::Column(0)),
+        );
+
+        // Move up and down within first excerpt
+        assert_eq!(
+            up(&snapshot, DisplayPoint::new(3, 4), SelectionGoal::Column(4)).unwrap(),
+            (DisplayPoint::new(2, 3), SelectionGoal::Column(4)),
+        );
+        assert_eq!(
+            down(&snapshot, DisplayPoint::new(2, 3), SelectionGoal::Column(4)).unwrap(),
+            (DisplayPoint::new(3, 4), SelectionGoal::Column(4)),
+        );
+
+        // Move up and down across second excerpt's header
+        assert_eq!(
+            up(&snapshot, DisplayPoint::new(7, 5), SelectionGoal::Column(5)).unwrap(),
+            (DisplayPoint::new(3, 4), SelectionGoal::Column(5)),
+        );
+        assert_eq!(
+            down(&snapshot, DisplayPoint::new(3, 4), SelectionGoal::Column(5)).unwrap(),
+            (DisplayPoint::new(7, 5), SelectionGoal::Column(5)),
+        );
+
+        // Can't move down off the end
+        assert_eq!(
+            down(&snapshot, DisplayPoint::new(8, 0), SelectionGoal::Column(0)).unwrap(),
+            (DisplayPoint::new(8, 2), SelectionGoal::Column(2)),
+        );
+        assert_eq!(
+            down(&snapshot, DisplayPoint::new(8, 2), SelectionGoal::Column(2)).unwrap(),
+            (DisplayPoint::new(8, 2), SelectionGoal::Column(2)),
+        );
+    }
 
     #[gpui::test]
     fn test_prev_next_word_boundary_multibyte(cx: &mut gpui::MutableAppContext) {
@@ -256,7 +363,7 @@ mod tests {
             .unwrap();
         let font_size = 14.0;
 
-        let buffer = cx.add_model(|cx| Buffer::new(0, "a bcΔ defγ hi—jk", cx));
+        let buffer = MultiBuffer::build_simple("a bcΔ defγ hi—jk", cx);
         let display_map =
             cx.add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, cx));
         let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx));
@@ -312,7 +419,7 @@ mod tests {
             .select_font(family_id, &Default::default())
             .unwrap();
         let font_size = 14.0;
-        let buffer = cx.add_model(|cx| Buffer::new(0, "lorem ipsum   dolor\n    sit", cx));
+        let buffer = MultiBuffer::build_simple("lorem ipsum   dolor\n    sit", cx);
         let display_map =
             cx.add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, cx));
         let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx));

crates/editor/src/multi_buffer.rs 🔗

@@ -0,0 +1,2754 @@
+mod anchor;
+
+pub use anchor::{Anchor, AnchorRangeExt};
+use anyhow::Result;
+use clock::ReplicaId;
+use collections::{HashMap, HashSet};
+use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task};
+use language::{
+    Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, Language, Selection,
+    ToOffset as _, ToPoint as _, TransactionId,
+};
+use std::{
+    cell::{Ref, RefCell},
+    cmp, fmt, io,
+    iter::{self, FromIterator},
+    ops::{Range, Sub},
+    str,
+    sync::Arc,
+    time::{Duration, Instant},
+};
+use sum_tree::{Bias, Cursor, SumTree};
+use text::{
+    locator::Locator,
+    rope::TextDimension,
+    subscription::{Subscription, Topic},
+    AnchorRangeExt as _, Edit, Point, PointUtf16, TextSummary,
+};
+use theme::SyntaxTheme;
+use util::post_inc;
+
+const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize];
+
+pub type ExcerptId = Locator;
+
+pub struct MultiBuffer {
+    snapshot: RefCell<MultiBufferSnapshot>,
+    buffers: RefCell<HashMap<usize, BufferState>>,
+    subscriptions: Topic,
+    singleton: bool,
+    replica_id: ReplicaId,
+    history: History,
+}
+
+struct History {
+    next_transaction_id: usize,
+    undo_stack: Vec<Transaction>,
+    redo_stack: Vec<Transaction>,
+    transaction_depth: usize,
+    group_interval: Duration,
+}
+
+struct Transaction {
+    id: usize,
+    buffer_transactions: HashSet<(usize, text::TransactionId)>,
+    first_edit_at: Instant,
+    last_edit_at: Instant,
+}
+
+pub trait ToOffset: 'static + fmt::Debug {
+    fn to_offset(&self, snapshot: &MultiBufferSnapshot) -> usize;
+}
+
+pub trait ToPoint: 'static + fmt::Debug {
+    fn to_point(&self, snapshot: &MultiBufferSnapshot) -> Point;
+}
+
+struct BufferState {
+    buffer: ModelHandle<Buffer>,
+    last_version: clock::Global,
+    last_parse_count: usize,
+    last_diagnostics_update_count: usize,
+    excerpts: Vec<ExcerptId>,
+    _subscriptions: [gpui::Subscription; 2],
+}
+
+#[derive(Clone, Default)]
+pub struct MultiBufferSnapshot {
+    excerpts: SumTree<Excerpt>,
+    parse_count: usize,
+    diagnostics_update_count: usize,
+    is_dirty: bool,
+    has_conflict: bool,
+}
+
+pub struct ExcerptProperties<'a, T> {
+    pub buffer: &'a ModelHandle<Buffer>,
+    pub range: Range<T>,
+}
+
+#[derive(Clone)]
+struct Excerpt {
+    id: ExcerptId,
+    buffer_id: usize,
+    buffer: BufferSnapshot,
+    range: Range<text::Anchor>,
+    max_buffer_row: u32,
+    text_summary: TextSummary,
+    has_trailing_newline: bool,
+}
+
+#[derive(Clone, Debug, Default)]
+struct ExcerptSummary {
+    excerpt_id: ExcerptId,
+    max_buffer_row: u32,
+    text: TextSummary,
+}
+
+pub struct MultiBufferRows<'a> {
+    buffer_row_range: Range<u32>,
+    excerpts: Cursor<'a, Excerpt, Point>,
+}
+
+pub struct MultiBufferChunks<'a> {
+    range: Range<usize>,
+    excerpts: Cursor<'a, Excerpt, usize>,
+    excerpt_chunks: Option<ExcerptChunks<'a>>,
+    theme: Option<&'a SyntaxTheme>,
+}
+
+pub struct MultiBufferBytes<'a> {
+    range: Range<usize>,
+    excerpts: Cursor<'a, Excerpt, usize>,
+    excerpt_bytes: Option<ExcerptBytes<'a>>,
+    chunk: &'a [u8],
+}
+
+struct ExcerptChunks<'a> {
+    content_chunks: BufferChunks<'a>,
+    footer_height: usize,
+}
+
+struct ExcerptBytes<'a> {
+    content_bytes: language::rope::Bytes<'a>,
+    footer_height: usize,
+}
+
+impl MultiBuffer {
+    pub fn new(replica_id: ReplicaId) -> Self {
+        Self {
+            snapshot: Default::default(),
+            buffers: Default::default(),
+            subscriptions: Default::default(),
+            singleton: false,
+            replica_id,
+            history: History {
+                next_transaction_id: Default::default(),
+                undo_stack: Default::default(),
+                redo_stack: Default::default(),
+                transaction_depth: 0,
+                group_interval: Duration::from_millis(300),
+            },
+        }
+    }
+
+    pub fn singleton(buffer: ModelHandle<Buffer>, cx: &mut ModelContext<Self>) -> Self {
+        let mut this = Self::new(buffer.read(cx).replica_id());
+        this.singleton = true;
+        this.push_excerpt(
+            ExcerptProperties {
+                buffer: &buffer,
+                range: text::Anchor::min()..text::Anchor::max(),
+            },
+            cx,
+        );
+        this
+    }
+
+    #[cfg(any(test, feature = "test-support"))]
+    pub fn build_simple(text: &str, cx: &mut gpui::MutableAppContext) -> ModelHandle<Self> {
+        let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
+        cx.add_model(|cx| Self::singleton(buffer, cx))
+    }
+
+    #[cfg(any(test, feature = "test-support"))]
+    pub fn build_random(
+        mut rng: &mut impl rand::Rng,
+        cx: &mut gpui::MutableAppContext,
+    ) -> ModelHandle<Self> {
+        use rand::prelude::*;
+        use std::env;
+        use text::RandomCharIter;
+
+        let max_excerpts = env::var("MAX_EXCERPTS")
+            .map(|i| i.parse().expect("invalid `MAX_EXCERPTS` variable"))
+            .unwrap_or(5);
+        let excerpts = rng.gen_range(1..=max_excerpts);
+
+        cx.add_model(|cx| {
+            let mut multibuffer = MultiBuffer::new(0);
+            let mut buffers = Vec::new();
+            for _ in 0..excerpts {
+                let buffer_handle = if rng.gen() || buffers.is_empty() {
+                    let text = RandomCharIter::new(&mut rng).take(10).collect::<String>();
+                    buffers.push(cx.add_model(|cx| Buffer::new(0, text, cx)));
+                    let buffer = buffers.last().unwrap();
+                    log::info!(
+                        "Creating new buffer {} with text: {:?}",
+                        buffer.id(),
+                        buffer.read(cx).text()
+                    );
+                    buffers.last().unwrap()
+                } else {
+                    buffers.choose(rng).unwrap()
+                };
+
+                let buffer = buffer_handle.read(cx);
+                let end_ix = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Right);
+                let start_ix = buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left);
+                let header_height = rng.gen_range(0..=5);
+                log::info!(
+                    "Inserting excerpt from buffer {} with header height {} and range {:?}: {:?}",
+                    buffer_handle.id(),
+                    header_height,
+                    start_ix..end_ix,
+                    &buffer.text()[start_ix..end_ix]
+                );
+
+                multibuffer.push_excerpt(
+                    ExcerptProperties {
+                        buffer: buffer_handle,
+                        range: start_ix..end_ix,
+                    },
+                    cx,
+                );
+            }
+            multibuffer
+        })
+    }
+
+    pub fn replica_id(&self) -> ReplicaId {
+        self.replica_id
+    }
+
+    pub fn snapshot(&self, cx: &AppContext) -> MultiBufferSnapshot {
+        self.sync(cx);
+        self.snapshot.borrow().clone()
+    }
+
+    pub fn read(&self, cx: &AppContext) -> Ref<MultiBufferSnapshot> {
+        self.sync(cx);
+        self.snapshot.borrow()
+    }
+
+    pub fn as_singleton(&self) -> Option<ModelHandle<Buffer>> {
+        if self.singleton {
+            return Some(
+                self.buffers
+                    .borrow()
+                    .values()
+                    .next()
+                    .unwrap()
+                    .buffer
+                    .clone(),
+            );
+        } else {
+            None
+        }
+    }
+
+    pub fn subscribe(&mut self) -> Subscription {
+        self.subscriptions.subscribe()
+    }
+
+    pub fn edit<I, S, T>(&mut self, ranges: I, new_text: T, cx: &mut ModelContext<Self>)
+    where
+        I: IntoIterator<Item = Range<S>>,
+        S: ToOffset,
+        T: Into<String>,
+    {
+        self.edit_internal(ranges, new_text, false, cx)
+    }
+
+    pub fn edit_with_autoindent<I, S, T>(
+        &mut self,
+        ranges: I,
+        new_text: T,
+        cx: &mut ModelContext<Self>,
+    ) where
+        I: IntoIterator<Item = Range<S>>,
+        S: ToOffset,
+        T: Into<String>,
+    {
+        self.edit_internal(ranges, new_text, true, cx)
+    }
+
+    pub fn edit_internal<I, S, T>(
+        &mut self,
+        ranges_iter: I,
+        new_text: T,
+        autoindent: bool,
+        cx: &mut ModelContext<Self>,
+    ) where
+        I: IntoIterator<Item = Range<S>>,
+        S: ToOffset,
+        T: Into<String>,
+    {
+        if let Some(buffer) = self.as_singleton() {
+            let snapshot = self.read(cx);
+            let ranges = ranges_iter
+                .into_iter()
+                .map(|range| range.start.to_offset(&snapshot)..range.end.to_offset(&snapshot));
+            return buffer.update(cx, |buffer, cx| {
+                if autoindent {
+                    buffer.edit_with_autoindent(ranges, new_text, cx)
+                } else {
+                    buffer.edit(ranges, new_text, cx)
+                }
+            });
+        }
+
+        let snapshot = self.read(cx);
+        let mut buffer_edits: HashMap<usize, Vec<(Range<usize>, bool)>> = Default::default();
+        let mut cursor = snapshot.excerpts.cursor::<usize>();
+        for range in ranges_iter {
+            let start = range.start.to_offset(&snapshot);
+            let end = range.end.to_offset(&snapshot);
+            cursor.seek(&start, Bias::Right, &());
+            if cursor.item().is_none() && start == *cursor.start() {
+                cursor.prev(&());
+            }
+            let start_excerpt = cursor.item().expect("start offset out of bounds");
+            let start_overshoot = start - cursor.start();
+            let buffer_start =
+                start_excerpt.range.start.to_offset(&start_excerpt.buffer) + start_overshoot;
+
+            cursor.seek(&end, Bias::Right, &());
+            if cursor.item().is_none() && end == *cursor.start() {
+                cursor.prev(&());
+            }
+            let end_excerpt = cursor.item().expect("end offset out of bounds");
+            let end_overshoot = end - cursor.start();
+            let buffer_end = end_excerpt.range.start.to_offset(&end_excerpt.buffer) + end_overshoot;
+
+            if start_excerpt.id == end_excerpt.id {
+                buffer_edits
+                    .entry(start_excerpt.buffer_id)
+                    .or_insert(Vec::new())
+                    .push((buffer_start..buffer_end, true));
+            } else {
+                let start_excerpt_range =
+                    buffer_start..start_excerpt.range.end.to_offset(&start_excerpt.buffer);
+                let end_excerpt_range =
+                    end_excerpt.range.start.to_offset(&end_excerpt.buffer)..buffer_end;
+                buffer_edits
+                    .entry(start_excerpt.buffer_id)
+                    .or_insert(Vec::new())
+                    .push((start_excerpt_range, true));
+                buffer_edits
+                    .entry(end_excerpt.buffer_id)
+                    .or_insert(Vec::new())
+                    .push((end_excerpt_range, false));
+
+                cursor.seek(&start, Bias::Right, &());
+                cursor.next(&());
+                while let Some(excerpt) = cursor.item() {
+                    if excerpt.id == end_excerpt.id {
+                        break;
+                    }
+                    buffer_edits
+                        .entry(excerpt.buffer_id)
+                        .or_insert(Vec::new())
+                        .push((excerpt.range.to_offset(&excerpt.buffer), false));
+                    cursor.next(&());
+                }
+            }
+        }
+
+        let new_text = new_text.into();
+        for (buffer_id, mut edits) in buffer_edits {
+            edits.sort_unstable_by_key(|(range, _)| range.start);
+            self.buffers.borrow()[&buffer_id]
+                .buffer
+                .update(cx, |buffer, cx| {
+                    let mut edits = edits.into_iter().peekable();
+                    let mut insertions = Vec::new();
+                    let mut deletions = Vec::new();
+                    while let Some((mut range, mut is_insertion)) = edits.next() {
+                        while let Some((next_range, next_is_insertion)) = edits.peek() {
+                            if range.end >= next_range.start {
+                                range.end = cmp::max(next_range.end, range.end);
+                                is_insertion |= *next_is_insertion;
+                                edits.next();
+                            } else {
+                                break;
+                            }
+                        }
+
+                        if is_insertion {
+                            insertions.push(
+                                buffer.anchor_before(range.start)..buffer.anchor_before(range.end),
+                            );
+                        } else if !range.is_empty() {
+                            deletions.push(
+                                buffer.anchor_before(range.start)..buffer.anchor_before(range.end),
+                            );
+                        }
+                    }
+
+                    if autoindent {
+                        buffer.edit_with_autoindent(deletions, "", cx);
+                        buffer.edit_with_autoindent(insertions, new_text.clone(), cx);
+                    } else {
+                        buffer.edit(deletions, "", cx);
+                        buffer.edit(insertions, new_text.clone(), cx);
+                    }
+                })
+        }
+    }
+
+    pub fn start_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
+        self.start_transaction_at(Instant::now(), cx)
+    }
+
+    pub(crate) fn start_transaction_at(
+        &mut self,
+        now: Instant,
+        cx: &mut ModelContext<Self>,
+    ) -> Option<TransactionId> {
+        if let Some(buffer) = self.as_singleton() {
+            return buffer.update(cx, |buffer, _| buffer.start_transaction_at(now));
+        }
+
+        for BufferState { buffer, .. } in self.buffers.borrow().values() {
+            buffer.update(cx, |buffer, _| buffer.start_transaction_at(now));
+        }
+        self.history.start_transaction(now)
+    }
+
+    pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
+        self.end_transaction_at(Instant::now(), cx)
+    }
+
+    pub(crate) fn end_transaction_at(
+        &mut self,
+        now: Instant,
+        cx: &mut ModelContext<Self>,
+    ) -> Option<TransactionId> {
+        if let Some(buffer) = self.as_singleton() {
+            return buffer.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx));
+        }
+
+        let mut buffer_transactions = HashSet::default();
+        for BufferState { buffer, .. } in self.buffers.borrow().values() {
+            if let Some(transaction_id) =
+                buffer.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx))
+            {
+                buffer_transactions.insert((buffer.id(), transaction_id));
+            }
+        }
+
+        if self.history.end_transaction(now, buffer_transactions) {
+            let transaction_id = self.history.group().unwrap();
+            Some(transaction_id)
+        } else {
+            None
+        }
+    }
+
+    pub fn set_active_selections(
+        &mut self,
+        selections: &[Selection<Anchor>],
+        cx: &mut ModelContext<Self>,
+    ) {
+        let mut selections_by_buffer: HashMap<usize, Vec<Selection<text::Anchor>>> =
+            Default::default();
+        let snapshot = self.read(cx);
+        let mut cursor = snapshot.excerpts.cursor::<Option<&ExcerptId>>();
+        for selection in selections {
+            cursor.seek(&Some(&selection.start.excerpt_id), Bias::Left, &());
+            while let Some(excerpt) = cursor.item() {
+                if excerpt.id > selection.end.excerpt_id {
+                    break;
+                }
+
+                let mut start = excerpt.range.start.clone();
+                let mut end = excerpt.range.end.clone();
+                if excerpt.id == selection.start.excerpt_id {
+                    start = selection.start.text_anchor.clone();
+                }
+                if excerpt.id == selection.end.excerpt_id {
+                    end = selection.end.text_anchor.clone();
+                }
+                selections_by_buffer
+                    .entry(excerpt.buffer_id)
+                    .or_default()
+                    .push(Selection {
+                        id: selection.id,
+                        start,
+                        end,
+                        reversed: selection.reversed,
+                        goal: selection.goal,
+                    });
+
+                cursor.next(&());
+            }
+        }
+
+        for (buffer_id, mut selections) in selections_by_buffer {
+            self.buffers.borrow()[&buffer_id]
+                .buffer
+                .update(cx, |buffer, cx| {
+                    selections.sort_unstable_by(|a, b| a.start.cmp(&b.start, buffer).unwrap());
+                    let mut selections = selections.into_iter().peekable();
+                    let merged_selections = Arc::from_iter(iter::from_fn(|| {
+                        let mut selection = selections.next()?;
+                        while let Some(next_selection) = selections.peek() {
+                            if selection
+                                .end
+                                .cmp(&next_selection.start, buffer)
+                                .unwrap()
+                                .is_ge()
+                            {
+                                let next_selection = selections.next().unwrap();
+                                if next_selection
+                                    .end
+                                    .cmp(&selection.end, buffer)
+                                    .unwrap()
+                                    .is_ge()
+                                {
+                                    selection.end = next_selection.end;
+                                }
+                            } else {
+                                break;
+                            }
+                        }
+                        Some(selection)
+                    }));
+                    buffer.set_active_selections(merged_selections, cx);
+                });
+        }
+    }
+
+    pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
+        for buffer in self.buffers.borrow().values() {
+            buffer
+                .buffer
+                .update(cx, |buffer, cx| buffer.remove_active_selections(cx));
+        }
+    }
+
+    pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
+        if let Some(buffer) = self.as_singleton() {
+            return buffer.update(cx, |buffer, cx| buffer.undo(cx));
+        }
+
+        while let Some(transaction) = self.history.pop_undo() {
+            let mut undone = false;
+            for (buffer_id, buffer_transaction_id) in &transaction.buffer_transactions {
+                if let Some(BufferState { buffer, .. }) = self.buffers.borrow().get(&buffer_id) {
+                    undone |= buffer.update(cx, |buf, cx| {
+                        buf.undo_transaction(*buffer_transaction_id, cx)
+                    });
+                }
+            }
+
+            if undone {
+                return Some(transaction.id);
+            }
+        }
+
+        None
+    }
+
+    pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
+        if let Some(buffer) = self.as_singleton() {
+            return buffer.update(cx, |buffer, cx| buffer.redo(cx));
+        }
+
+        while let Some(transaction) = self.history.pop_redo() {
+            let mut redone = false;
+            for (buffer_id, buffer_transaction_id) in &transaction.buffer_transactions {
+                if let Some(BufferState { buffer, .. }) = self.buffers.borrow().get(&buffer_id) {
+                    redone |= buffer.update(cx, |buf, cx| {
+                        buf.redo_transaction(*buffer_transaction_id, cx)
+                    });
+                }
+            }
+
+            if redone {
+                return Some(transaction.id);
+            }
+        }
+
+        None
+    }
+
+    pub fn push_excerpt<O>(
+        &mut self,
+        props: ExcerptProperties<O>,
+        cx: &mut ModelContext<Self>,
+    ) -> ExcerptId
+    where
+        O: text::ToOffset,
+    {
+        self.insert_excerpt_after(&ExcerptId::max(), props, cx)
+    }
+
+    pub fn insert_excerpt_after<O>(
+        &mut self,
+        prev_excerpt_id: &ExcerptId,
+        props: ExcerptProperties<O>,
+        cx: &mut ModelContext<Self>,
+    ) -> ExcerptId
+    where
+        O: text::ToOffset,
+    {
+        assert_eq!(self.history.transaction_depth, 0);
+        self.sync(cx);
+
+        let buffer_snapshot = props.buffer.read(cx).snapshot();
+        let range = buffer_snapshot.anchor_before(&props.range.start)
+            ..buffer_snapshot.anchor_after(&props.range.end);
+        let mut snapshot = self.snapshot.borrow_mut();
+        let mut cursor = snapshot.excerpts.cursor::<Option<&ExcerptId>>();
+        let mut new_excerpts = cursor.slice(&Some(prev_excerpt_id), Bias::Right, &());
+
+        let mut prev_id = ExcerptId::min();
+        let edit_start = new_excerpts.summary().text.bytes;
+        new_excerpts.update_last(
+            |excerpt| {
+                excerpt.has_trailing_newline = true;
+                prev_id = excerpt.id.clone();
+            },
+            &(),
+        );
+
+        let mut next_id = ExcerptId::max();
+        if let Some(next_excerpt) = cursor.item() {
+            next_id = next_excerpt.id.clone();
+        }
+
+        let id = ExcerptId::between(&prev_id, &next_id);
+
+        let mut buffers = self.buffers.borrow_mut();
+        let buffer_state = buffers
+            .entry(props.buffer.id())
+            .or_insert_with(|| BufferState {
+                last_version: buffer_snapshot.version().clone(),
+                last_parse_count: buffer_snapshot.parse_count(),
+                last_diagnostics_update_count: buffer_snapshot.diagnostics_update_count(),
+                excerpts: Default::default(),
+                _subscriptions: [
+                    cx.observe(&props.buffer, |_, _, cx| cx.notify()),
+                    cx.subscribe(&props.buffer, Self::on_buffer_event),
+                ],
+                buffer: props.buffer.clone(),
+            });
+        if let Err(ix) = buffer_state.excerpts.binary_search(&id) {
+            buffer_state.excerpts.insert(ix, id.clone());
+        }
+
+        let excerpt = Excerpt::new(
+            id.clone(),
+            props.buffer.id(),
+            buffer_snapshot,
+            range,
+            cursor.item().is_some(),
+        );
+        new_excerpts.push(excerpt, &());
+        let edit_end = new_excerpts.summary().text.bytes;
+
+        new_excerpts.push_tree(cursor.suffix(&()), &());
+        drop(cursor);
+        snapshot.excerpts = new_excerpts;
+
+        self.subscriptions.publish_mut([Edit {
+            old: edit_start..edit_start,
+            new: edit_start..edit_end,
+        }]);
+
+        cx.notify();
+        id
+    }
+
+    pub fn excerpt_ids_for_buffer(&self, buffer: &ModelHandle<Buffer>) -> Vec<ExcerptId> {
+        self.buffers
+            .borrow()
+            .get(&buffer.id())
+            .map_or(Vec::new(), |state| state.excerpts.clone())
+    }
+
+    pub fn remove_excerpts<'a>(
+        &mut self,
+        excerpt_ids: impl IntoIterator<Item = &'a ExcerptId>,
+        cx: &mut ModelContext<Self>,
+    ) {
+        let mut buffers = self.buffers.borrow_mut();
+        let mut snapshot = self.snapshot.borrow_mut();
+        let mut new_excerpts = SumTree::new();
+        let mut cursor = snapshot.excerpts.cursor::<(Option<&ExcerptId>, usize)>();
+        let mut edits = Vec::new();
+        let mut excerpt_ids = excerpt_ids.into_iter().peekable();
+
+        while let Some(mut excerpt_id) = excerpt_ids.next() {
+            // Seek to the next excerpt to remove, preserving any preceding excerpts.
+            new_excerpts.push_tree(cursor.slice(&Some(excerpt_id), Bias::Left, &()), &());
+            if let Some(mut excerpt) = cursor.item() {
+                if excerpt.id != *excerpt_id {
+                    continue;
+                }
+                let mut old_start = cursor.start().1;
+
+                // Skip over the removed excerpt.
+                loop {
+                    if let Some(buffer_state) = buffers.get_mut(&excerpt.buffer_id) {
+                        buffer_state.excerpts.retain(|id| id != excerpt_id);
+                        if buffer_state.excerpts.is_empty() {
+                            buffers.remove(&excerpt.buffer_id);
+                        }
+                    }
+                    cursor.next(&());
+
+                    // Skip over any subsequent excerpts that are also removed.
+                    if let Some(&next_excerpt_id) = excerpt_ids.peek() {
+                        if let Some(next_excerpt) = cursor.item() {
+                            if next_excerpt.id == *next_excerpt_id {
+                                excerpt = next_excerpt;
+                                excerpt_id = excerpt_ids.next().unwrap();
+                                continue;
+                            }
+                        }
+                    }
+
+                    break;
+                }
+
+                // When removing the last excerpt, remove the trailing newline from
+                // the previous excerpt.
+                if cursor.item().is_none() && old_start > 0 {
+                    old_start -= 1;
+                    new_excerpts.update_last(|e| e.has_trailing_newline = false, &());
+                }
+
+                // Push an edit for the removal of this run of excerpts.
+                let old_end = cursor.start().1;
+                let new_start = new_excerpts.summary().text.bytes;
+                edits.push(Edit {
+                    old: old_start..old_end,
+                    new: new_start..new_start,
+                });
+            }
+        }
+        new_excerpts.push_tree(cursor.suffix(&()), &());
+        drop(cursor);
+        snapshot.excerpts = new_excerpts;
+        self.subscriptions.publish_mut(edits);
+        cx.notify();
+    }
+
+    fn on_buffer_event(
+        &mut self,
+        _: ModelHandle<Buffer>,
+        event: &Event,
+        cx: &mut ModelContext<Self>,
+    ) {
+        cx.emit(event.clone());
+    }
+
+    pub fn save(&mut self, cx: &mut ModelContext<Self>) -> Result<Task<Result<()>>> {
+        let mut save_tasks = Vec::new();
+        for BufferState { buffer, .. } in self.buffers.borrow().values() {
+            save_tasks.push(buffer.update(cx, |buffer, cx| buffer.save(cx))?);
+        }
+
+        Ok(cx.spawn(|_, _| async move {
+            for save in save_tasks {
+                save.await?;
+            }
+            Ok(())
+        }))
+    }
+
+    pub fn language<'a>(&self, cx: &'a AppContext) -> Option<&'a Arc<Language>> {
+        self.buffers
+            .borrow()
+            .values()
+            .next()
+            .and_then(|state| state.buffer.read(cx).language())
+    }
+
+    pub fn file<'a>(&self, cx: &'a AppContext) -> Option<&'a dyn File> {
+        self.as_singleton()?.read(cx).file()
+    }
+
+    #[cfg(test)]
+    pub fn is_parsing(&self, cx: &AppContext) -> bool {
+        self.as_singleton().unwrap().read(cx).is_parsing()
+    }
+
+    fn sync(&self, cx: &AppContext) {
+        let mut snapshot = self.snapshot.borrow_mut();
+        let mut excerpts_to_edit = Vec::new();
+        let mut reparsed = false;
+        let mut diagnostics_updated = false;
+        let mut is_dirty = false;
+        let mut has_conflict = false;
+        let mut buffers = self.buffers.borrow_mut();
+        for buffer_state in buffers.values_mut() {
+            let buffer = buffer_state.buffer.read(cx);
+            let version = buffer.version();
+            let parse_count = buffer.parse_count();
+            let diagnostics_update_count = buffer.diagnostics_update_count();
+
+            let buffer_edited = version.gt(&buffer_state.last_version);
+            let buffer_reparsed = parse_count > buffer_state.last_parse_count;
+            let buffer_diagnostics_updated =
+                diagnostics_update_count > buffer_state.last_diagnostics_update_count;
+            if buffer_edited || buffer_reparsed || buffer_diagnostics_updated {
+                buffer_state.last_version = version;
+                buffer_state.last_parse_count = parse_count;
+                buffer_state.last_diagnostics_update_count = diagnostics_update_count;
+                excerpts_to_edit.extend(
+                    buffer_state
+                        .excerpts
+                        .iter()
+                        .map(|excerpt_id| (excerpt_id, buffer_state.buffer.clone(), buffer_edited)),
+                );
+            }
+
+            reparsed |= buffer_reparsed;
+            diagnostics_updated |= buffer_diagnostics_updated;
+            is_dirty |= buffer.is_dirty();
+            has_conflict |= buffer.has_conflict();
+        }
+        if reparsed {
+            snapshot.parse_count += 1;
+        }
+        if diagnostics_updated {
+            snapshot.diagnostics_update_count += 1;
+        }
+        snapshot.is_dirty = is_dirty;
+        snapshot.has_conflict = has_conflict;
+
+        excerpts_to_edit.sort_unstable_by_key(|(excerpt_id, _, _)| *excerpt_id);
+
+        let mut edits = Vec::new();
+        let mut new_excerpts = SumTree::new();
+        let mut cursor = snapshot.excerpts.cursor::<(Option<&ExcerptId>, usize)>();
+
+        for (id, buffer, buffer_edited) in excerpts_to_edit {
+            new_excerpts.push_tree(cursor.slice(&Some(id), Bias::Left, &()), &());
+            let old_excerpt = cursor.item().unwrap();
+            let buffer_id = buffer.id();
+            let buffer = buffer.read(cx);
+
+            let mut new_excerpt;
+            if buffer_edited {
+                edits.extend(
+                    buffer
+                        .edits_since_in_range::<usize>(
+                            old_excerpt.buffer.version(),
+                            old_excerpt.range.clone(),
+                        )
+                        .map(|mut edit| {
+                            let excerpt_old_start = cursor.start().1;
+                            let excerpt_new_start = new_excerpts.summary().text.bytes;
+                            edit.old.start += excerpt_old_start;
+                            edit.old.end += excerpt_old_start;
+                            edit.new.start += excerpt_new_start;
+                            edit.new.end += excerpt_new_start;
+                            edit
+                        }),
+                );
+
+                new_excerpt = Excerpt::new(
+                    id.clone(),
+                    buffer_id,
+                    buffer.snapshot(),
+                    old_excerpt.range.clone(),
+                    old_excerpt.has_trailing_newline,
+                );
+            } else {
+                new_excerpt = old_excerpt.clone();
+                new_excerpt.buffer = buffer.snapshot();
+            }
+
+            new_excerpts.push(new_excerpt, &());
+            cursor.next(&());
+        }
+        new_excerpts.push_tree(cursor.suffix(&()), &());
+
+        drop(cursor);
+        snapshot.excerpts = new_excerpts;
+
+        self.subscriptions.publish(edits);
+    }
+}
+
+#[cfg(any(test, feature = "test-support"))]
+impl MultiBuffer {
+    pub fn randomly_edit(
+        &mut self,
+        rng: &mut impl rand::Rng,
+        count: usize,
+        cx: &mut ModelContext<Self>,
+    ) {
+        use text::RandomCharIter;
+
+        let snapshot = self.read(cx);
+        let mut old_ranges: Vec<Range<usize>> = Vec::new();
+        for _ in 0..count {
+            let last_end = old_ranges.last().map_or(0, |last_range| last_range.end + 1);
+            if last_end > snapshot.len() {
+                break;
+            }
+            let end_ix = snapshot.clip_offset(rng.gen_range(0..=last_end), Bias::Right);
+            let start_ix = snapshot.clip_offset(rng.gen_range(0..=end_ix), Bias::Left);
+            old_ranges.push(start_ix..end_ix);
+        }
+        let new_text_len = rng.gen_range(0..10);
+        let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
+        log::info!("mutating multi-buffer at {:?}: {:?}", old_ranges, new_text);
+        drop(snapshot);
+
+        self.edit(old_ranges.iter().cloned(), new_text.as_str(), cx);
+    }
+}
+
+impl Entity for MultiBuffer {
+    type Event = language::Event;
+}
+
+impl MultiBufferSnapshot {
+    pub fn text(&self) -> String {
+        self.chunks(0..self.len(), None)
+            .map(|chunk| chunk.text)
+            .collect()
+    }
+
+    pub fn reversed_chars_at<'a, T: ToOffset>(
+        &'a self,
+        position: T,
+    ) -> impl Iterator<Item = char> + 'a {
+        let mut offset = position.to_offset(self);
+        let mut cursor = self.excerpts.cursor::<usize>();
+        cursor.seek(&offset, Bias::Left, &());
+        let mut excerpt_chunks = cursor.item().map(|excerpt| {
+            let end_before_footer = cursor.start() + excerpt.text_summary.bytes;
+            let start = excerpt.range.start.to_offset(&excerpt.buffer);
+            let end = start + (cmp::min(offset, end_before_footer) - cursor.start());
+            excerpt.buffer.reversed_chunks_in_range(start..end)
+        });
+        iter::from_fn(move || {
+            if offset == *cursor.start() {
+                cursor.prev(&());
+                let excerpt = cursor.item()?;
+                excerpt_chunks = Some(
+                    excerpt
+                        .buffer
+                        .reversed_chunks_in_range(excerpt.range.clone()),
+                );
+            }
+
+            let excerpt = cursor.item().unwrap();
+            if offset == cursor.end(&()) && excerpt.has_trailing_newline {
+                offset -= 1;
+                Some("\n")
+            } else {
+                let chunk = excerpt_chunks.as_mut().unwrap().next().unwrap();
+                offset -= chunk.len();
+                Some(chunk)
+            }
+        })
+        .flat_map(|c| c.chars().rev())
+    }
+
+    pub fn chars_at<'a, T: ToOffset>(&'a self, position: T) -> impl Iterator<Item = char> + 'a {
+        let offset = position.to_offset(self);
+        self.text_for_range(offset..self.len())
+            .flat_map(|chunk| chunk.chars())
+    }
+
+    pub fn text_for_range<'a, T: ToOffset>(
+        &'a self,
+        range: Range<T>,
+    ) -> impl Iterator<Item = &'a str> {
+        self.chunks(range, None).map(|chunk| chunk.text)
+    }
+
+    pub fn is_line_blank(&self, row: u32) -> bool {
+        self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
+            .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
+    }
+
+    pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
+    where
+        T: ToOffset,
+    {
+        let position = position.to_offset(self);
+        position == self.clip_offset(position, Bias::Left)
+            && self
+                .bytes_in_range(position..self.len())
+                .flatten()
+                .copied()
+                .take(needle.len())
+                .eq(needle.bytes())
+    }
+
+    fn as_singleton(&self) -> Option<&BufferSnapshot> {
+        let mut excerpts = self.excerpts.iter();
+        let buffer = excerpts.next().map(|excerpt| &excerpt.buffer);
+        if excerpts.next().is_none() {
+            buffer
+        } else {
+            None
+        }
+    }
+
+    pub fn len(&self) -> usize {
+        self.excerpts.summary().text.bytes
+    }
+
+    pub fn max_buffer_row(&self) -> u32 {
+        self.excerpts.summary().max_buffer_row
+    }
+
+    pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
+        let mut cursor = self.excerpts.cursor::<usize>();
+        cursor.seek(&offset, Bias::Right, &());
+        let overshoot = if let Some(excerpt) = cursor.item() {
+            let excerpt_start = excerpt.range.start.to_offset(&excerpt.buffer);
+            let buffer_offset = excerpt
+                .buffer
+                .clip_offset(excerpt_start + (offset - cursor.start()), bias);
+            buffer_offset.saturating_sub(excerpt_start)
+        } else {
+            0
+        };
+        cursor.start() + overshoot
+    }
+
+    pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
+        let mut cursor = self.excerpts.cursor::<Point>();
+        cursor.seek(&point, Bias::Right, &());
+        let overshoot = if let Some(excerpt) = cursor.item() {
+            let excerpt_start = excerpt.range.start.to_point(&excerpt.buffer);
+            let buffer_point = excerpt
+                .buffer
+                .clip_point(excerpt_start + (point - cursor.start()), bias);
+            buffer_point.saturating_sub(excerpt_start)
+        } else {
+            Point::zero()
+        };
+        *cursor.start() + overshoot
+    }
+
+    pub fn clip_point_utf16(&self, point: PointUtf16, bias: Bias) -> PointUtf16 {
+        let mut cursor = self.excerpts.cursor::<PointUtf16>();
+        cursor.seek(&point, Bias::Right, &());
+        let overshoot = if let Some(excerpt) = cursor.item() {
+            let excerpt_start = excerpt
+                .buffer
+                .offset_to_point_utf16(excerpt.range.start.to_offset(&excerpt.buffer));
+            let buffer_point = excerpt
+                .buffer
+                .clip_point_utf16(excerpt_start + (point - cursor.start()), bias);
+            buffer_point.saturating_sub(excerpt_start)
+        } else {
+            PointUtf16::zero()
+        };
+        *cursor.start() + overshoot
+    }
+
+    pub fn bytes_in_range<'a, T: ToOffset>(&'a self, range: Range<T>) -> MultiBufferBytes<'a> {
+        let range = range.start.to_offset(self)..range.end.to_offset(self);
+        let mut excerpts = self.excerpts.cursor::<usize>();
+        excerpts.seek(&range.start, Bias::Right, &());
+
+        let mut chunk = &[][..];
+        let excerpt_bytes = if let Some(excerpt) = excerpts.item() {
+            let mut excerpt_bytes = excerpt
+                .bytes_in_range(range.start - excerpts.start()..range.end - excerpts.start());
+            chunk = excerpt_bytes.next().unwrap_or(&[][..]);
+            Some(excerpt_bytes)
+        } else {
+            None
+        };
+
+        MultiBufferBytes {
+            range,
+            excerpts,
+            excerpt_bytes,
+            chunk,
+        }
+    }
+
+    pub fn buffer_rows<'a>(&'a self, start_row: u32) -> MultiBufferRows<'a> {
+        let mut result = MultiBufferRows {
+            buffer_row_range: 0..0,
+            excerpts: self.excerpts.cursor(),
+        };
+        result.seek(start_row);
+        result
+    }
+
+    pub fn chunks<'a, T: ToOffset>(
+        &'a self,
+        range: Range<T>,
+        theme: Option<&'a SyntaxTheme>,
+    ) -> MultiBufferChunks<'a> {
+        let range = range.start.to_offset(self)..range.end.to_offset(self);
+        let mut chunks = MultiBufferChunks {
+            range: range.clone(),
+            excerpts: self.excerpts.cursor(),
+            excerpt_chunks: None,
+            theme,
+        };
+        chunks.seek(range.start);
+        chunks
+    }
+
+    pub fn offset_to_point(&self, offset: usize) -> Point {
+        let mut cursor = self.excerpts.cursor::<(usize, Point)>();
+        cursor.seek(&offset, Bias::Right, &());
+        if let Some(excerpt) = cursor.item() {
+            let (start_offset, start_point) = cursor.start();
+            let overshoot = offset - start_offset;
+            let excerpt_start_offset = excerpt.range.start.to_offset(&excerpt.buffer);
+            let excerpt_start_point = excerpt.range.start.to_point(&excerpt.buffer);
+            let buffer_point = excerpt
+                .buffer
+                .offset_to_point(excerpt_start_offset + overshoot);
+            *start_point + (buffer_point - excerpt_start_point)
+        } else {
+            self.excerpts.summary().text.lines
+        }
+    }
+
+    pub fn point_to_offset(&self, point: Point) -> usize {
+        let mut cursor = self.excerpts.cursor::<(Point, usize)>();
+        cursor.seek(&point, Bias::Right, &());
+        if let Some(excerpt) = cursor.item() {
+            let (start_point, start_offset) = cursor.start();
+            let overshoot = point - start_point;
+            let excerpt_start_offset = excerpt.range.start.to_offset(&excerpt.buffer);
+            let excerpt_start_point = excerpt.range.start.to_point(&excerpt.buffer);
+            let buffer_offset = excerpt
+                .buffer
+                .point_to_offset(excerpt_start_point + overshoot);
+            *start_offset + buffer_offset - excerpt_start_offset
+        } else {
+            self.excerpts.summary().text.bytes
+        }
+    }
+
+    pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
+        let mut cursor = self.excerpts.cursor::<(PointUtf16, usize)>();
+        cursor.seek(&point, Bias::Right, &());
+        if let Some(excerpt) = cursor.item() {
+            let (start_point, start_offset) = cursor.start();
+            let overshoot = point - start_point;
+            let excerpt_start_offset = excerpt.range.start.to_offset(&excerpt.buffer);
+            let excerpt_start_point = excerpt
+                .buffer
+                .offset_to_point_utf16(excerpt.range.start.to_offset(&excerpt.buffer));
+            let buffer_offset = excerpt
+                .buffer
+                .point_utf16_to_offset(excerpt_start_point + overshoot);
+            *start_offset + (buffer_offset - excerpt_start_offset)
+        } else {
+            self.excerpts.summary().text.bytes
+        }
+    }
+
+    pub fn indent_column_for_line(&self, row: u32) -> u32 {
+        if let Some((buffer, range)) = self.buffer_line_for_row(row) {
+            buffer
+                .indent_column_for_line(range.start.row)
+                .min(range.end.column)
+                .saturating_sub(range.start.column)
+        } else {
+            0
+        }
+    }
+
+    pub fn line_len(&self, row: u32) -> u32 {
+        if let Some((_, range)) = self.buffer_line_for_row(row) {
+            range.end.column - range.start.column
+        } else {
+            0
+        }
+    }
+
+    fn buffer_line_for_row(&self, row: u32) -> Option<(&BufferSnapshot, Range<Point>)> {
+        let mut cursor = self.excerpts.cursor::<Point>();
+        cursor.seek(&Point::new(row, 0), Bias::Right, &());
+        if let Some(excerpt) = cursor.item() {
+            let overshoot = row - cursor.start().row;
+            let excerpt_start = excerpt.range.start.to_point(&excerpt.buffer);
+            let excerpt_end = excerpt.range.end.to_point(&excerpt.buffer);
+            let buffer_row = excerpt_start.row + overshoot;
+            let line_start = Point::new(buffer_row, 0);
+            let line_end = Point::new(buffer_row, excerpt.buffer.line_len(buffer_row));
+            return Some((
+                &excerpt.buffer,
+                line_start.max(excerpt_start)..line_end.min(excerpt_end),
+            ));
+        }
+        None
+    }
+
+    pub fn max_point(&self) -> Point {
+        self.text_summary().lines
+    }
+
+    pub fn text_summary(&self) -> TextSummary {
+        self.excerpts.summary().text
+    }
+
+    pub fn text_summary_for_range<'a, D, O>(&'a self, range: Range<O>) -> D
+    where
+        D: TextDimension,
+        O: ToOffset,
+    {
+        let mut summary = D::default();
+        let mut range = range.start.to_offset(self)..range.end.to_offset(self);
+        let mut cursor = self.excerpts.cursor::<usize>();
+        cursor.seek(&range.start, Bias::Right, &());
+        if let Some(excerpt) = cursor.item() {
+            let mut end_before_newline = cursor.end(&());
+            if excerpt.has_trailing_newline {
+                end_before_newline -= 1;
+            }
+
+            let excerpt_start = excerpt.range.start.to_offset(&excerpt.buffer);
+            let start_in_excerpt = excerpt_start + (range.start - cursor.start());
+            let end_in_excerpt =
+                excerpt_start + (cmp::min(end_before_newline, range.end) - cursor.start());
+            summary.add_assign(
+                &excerpt
+                    .buffer
+                    .text_summary_for_range(start_in_excerpt..end_in_excerpt),
+            );
+
+            if range.end > end_before_newline {
+                summary.add_assign(&D::from_text_summary(&TextSummary {
+                    bytes: 1,
+                    lines: Point::new(1 as u32, 0),
+                    lines_utf16: PointUtf16::new(1 as u32, 0),
+                    first_line_chars: 0,
+                    last_line_chars: 0,
+                    longest_row: 0,
+                    longest_row_chars: 0,
+                }));
+            }
+
+            cursor.next(&());
+        }
+
+        if range.end > *cursor.start() {
+            summary.add_assign(&D::from_text_summary(&cursor.summary::<_, TextSummary>(
+                &range.end,
+                Bias::Right,
+                &(),
+            )));
+            if let Some(excerpt) = cursor.item() {
+                range.end = cmp::max(*cursor.start(), range.end);
+
+                let excerpt_start = excerpt.range.start.to_offset(&excerpt.buffer);
+                let end_in_excerpt = excerpt_start + (range.end - cursor.start());
+                summary.add_assign(
+                    &excerpt
+                        .buffer
+                        .text_summary_for_range(excerpt_start..end_in_excerpt),
+                );
+            }
+        }
+
+        summary
+    }
+
+    pub fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
+    where
+        D: TextDimension + Ord + Sub<D, Output = D>,
+    {
+        let mut cursor = self.excerpts.cursor::<ExcerptSummary>();
+        cursor.seek(&Some(&anchor.excerpt_id), Bias::Left, &());
+        if cursor.item().is_none() {
+            cursor.next(&());
+        }
+
+        let mut position = D::from_text_summary(&cursor.start().text);
+        if let Some(excerpt) = cursor.item() {
+            if excerpt.id == anchor.excerpt_id {
+                let excerpt_buffer_start = excerpt.range.start.summary::<D>(&excerpt.buffer);
+                let buffer_position = anchor.text_anchor.summary::<D>(&excerpt.buffer);
+                if buffer_position > excerpt_buffer_start {
+                    position.add_assign(&(buffer_position - excerpt_buffer_start));
+                }
+            }
+        }
+        position
+    }
+
+    pub fn summaries_for_anchors<'a, D, I>(&'a self, anchors: I) -> Vec<D>
+    where
+        D: TextDimension + Ord + Sub<D, Output = D>,
+        I: 'a + IntoIterator<Item = &'a Anchor>,
+    {
+        let mut anchors = anchors.into_iter().peekable();
+        let mut cursor = self.excerpts.cursor::<ExcerptSummary>();
+        let mut summaries = Vec::new();
+        while let Some(anchor) = anchors.peek() {
+            let excerpt_id = &anchor.excerpt_id;
+            let excerpt_anchors = iter::from_fn(|| {
+                let anchor = anchors.peek()?;
+                if anchor.excerpt_id == *excerpt_id {
+                    Some(&anchors.next().unwrap().text_anchor)
+                } else {
+                    None
+                }
+            });
+
+            cursor.seek_forward(&Some(excerpt_id), Bias::Left, &());
+            if cursor.item().is_none() {
+                cursor.next(&());
+            }
+
+            let position = D::from_text_summary(&cursor.start().text);
+            if let Some(excerpt) = cursor.item() {
+                if excerpt.id == *excerpt_id {
+                    let excerpt_buffer_start = excerpt.range.start.summary::<D>(&excerpt.buffer);
+                    summaries.extend(
+                        excerpt
+                            .buffer
+                            .summaries_for_anchors::<D, _>(excerpt_anchors)
+                            .map(move |summary| {
+                                let mut position = position.clone();
+                                let excerpt_buffer_start = excerpt_buffer_start.clone();
+                                if summary > excerpt_buffer_start {
+                                    position.add_assign(&(summary - excerpt_buffer_start));
+                                }
+                                position
+                            }),
+                    );
+                    continue;
+                }
+            }
+
+            summaries.extend(excerpt_anchors.map(|_| position.clone()));
+        }
+
+        summaries
+    }
+
+    pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
+        self.anchor_at(position, Bias::Left)
+    }
+
+    pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
+        self.anchor_at(position, Bias::Right)
+    }
+
+    pub fn anchor_at<T: ToOffset>(&self, position: T, mut bias: Bias) -> Anchor {
+        let offset = position.to_offset(self);
+        let mut cursor = self.excerpts.cursor::<(usize, Option<&ExcerptId>)>();
+        cursor.seek(&offset, Bias::Right, &());
+        if cursor.item().is_none() && offset == cursor.start().0 && bias == Bias::Left {
+            cursor.prev(&());
+        }
+        if let Some(excerpt) = cursor.item() {
+            let mut overshoot = offset.saturating_sub(cursor.start().0);
+            if excerpt.has_trailing_newline && offset == cursor.end(&()).0 {
+                overshoot -= 1;
+                bias = Bias::Right;
+            }
+
+            let buffer_start = excerpt.range.start.to_offset(&excerpt.buffer);
+            let text_anchor =
+                excerpt.clip_anchor(excerpt.buffer.anchor_at(buffer_start + overshoot, bias));
+            Anchor {
+                excerpt_id: excerpt.id.clone(),
+                text_anchor,
+            }
+        } else if offset == 0 && bias == Bias::Left {
+            Anchor::min()
+        } else {
+            Anchor::max()
+        }
+    }
+
+    pub fn anchor_in_excerpt(&self, excerpt_id: ExcerptId, text_anchor: text::Anchor) -> Anchor {
+        let mut cursor = self.excerpts.cursor::<Option<&ExcerptId>>();
+        cursor.seek(&Some(&excerpt_id), Bias::Left, &());
+        if let Some(excerpt) = cursor.item() {
+            if excerpt.id == excerpt_id {
+                let text_anchor = excerpt.clip_anchor(text_anchor);
+                drop(cursor);
+                return Anchor {
+                    excerpt_id,
+                    text_anchor,
+                };
+            }
+        }
+        panic!("excerpt not found");
+    }
+
+    pub fn parse_count(&self) -> usize {
+        self.parse_count
+    }
+
+    pub fn enclosing_bracket_ranges<T: ToOffset>(
+        &self,
+        range: Range<T>,
+    ) -> Option<(Range<usize>, Range<usize>)> {
+        let range = range.start.to_offset(self)..range.end.to_offset(self);
+
+        let mut cursor = self.excerpts.cursor::<usize>();
+        cursor.seek(&range.start, Bias::Right, &());
+        let start_excerpt = cursor.item();
+
+        cursor.seek(&range.end, Bias::Right, &());
+        let end_excerpt = cursor.item();
+
+        start_excerpt
+            .zip(end_excerpt)
+            .and_then(|(start_excerpt, end_excerpt)| {
+                if start_excerpt.id != end_excerpt.id {
+                    return None;
+                }
+
+                let excerpt_buffer_start =
+                    start_excerpt.range.start.to_offset(&start_excerpt.buffer);
+                let excerpt_buffer_end = excerpt_buffer_start + start_excerpt.text_summary.bytes;
+
+                let start_in_buffer =
+                    excerpt_buffer_start + range.start.saturating_sub(*cursor.start());
+                let end_in_buffer =
+                    excerpt_buffer_start + range.end.saturating_sub(*cursor.start());
+                let (mut start_bracket_range, mut end_bracket_range) = start_excerpt
+                    .buffer
+                    .enclosing_bracket_ranges(start_in_buffer..end_in_buffer)?;
+
+                if start_bracket_range.start >= excerpt_buffer_start
+                    && end_bracket_range.end < excerpt_buffer_end
+                {
+                    start_bracket_range.start =
+                        cursor.start() + (start_bracket_range.start - excerpt_buffer_start);
+                    start_bracket_range.end =
+                        cursor.start() + (start_bracket_range.end - excerpt_buffer_start);
+                    end_bracket_range.start =
+                        cursor.start() + (end_bracket_range.start - excerpt_buffer_start);
+                    end_bracket_range.end =
+                        cursor.start() + (end_bracket_range.end - excerpt_buffer_start);
+                    Some((start_bracket_range, end_bracket_range))
+                } else {
+                    None
+                }
+            })
+    }
+
+    pub fn diagnostics_update_count(&self) -> usize {
+        self.diagnostics_update_count
+    }
+
+    pub fn language(&self) -> Option<&Arc<Language>> {
+        self.excerpts
+            .iter()
+            .next()
+            .and_then(|excerpt| excerpt.buffer.language())
+    }
+
+    pub fn is_dirty(&self) -> bool {
+        self.is_dirty
+    }
+
+    pub fn has_conflict(&self) -> bool {
+        self.has_conflict
+    }
+
+    pub fn diagnostic_group<'a, O>(
+        &'a self,
+        provider_name: &'a str,
+        group_id: usize,
+    ) -> impl Iterator<Item = DiagnosticEntry<O>> + 'a
+    where
+        O: text::FromAnchor + 'a,
+    {
+        self.as_singleton()
+            .into_iter()
+            .flat_map(move |buffer| buffer.diagnostic_group(provider_name, group_id))
+    }
+
+    pub fn diagnostics_in_range<'a, T, O>(
+        &'a self,
+        range: Range<T>,
+    ) -> impl Iterator<Item = (&'a str, DiagnosticEntry<O>)> + 'a
+    where
+        T: 'a + ToOffset,
+        O: 'a + text::FromAnchor,
+    {
+        self.as_singleton().into_iter().flat_map(move |buffer| {
+            buffer.diagnostics_in_range(range.start.to_offset(self)..range.end.to_offset(self))
+        })
+    }
+
+    pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
+        let range = range.start.to_offset(self)..range.end.to_offset(self);
+
+        let mut cursor = self.excerpts.cursor::<usize>();
+        cursor.seek(&range.start, Bias::Right, &());
+        let start_excerpt = cursor.item();
+
+        cursor.seek(&range.end, Bias::Right, &());
+        let end_excerpt = cursor.item();
+
+        start_excerpt
+            .zip(end_excerpt)
+            .and_then(|(start_excerpt, end_excerpt)| {
+                if start_excerpt.id != end_excerpt.id {
+                    return None;
+                }
+
+                let excerpt_buffer_start =
+                    start_excerpt.range.start.to_offset(&start_excerpt.buffer);
+                let excerpt_buffer_end = excerpt_buffer_start + start_excerpt.text_summary.bytes;
+
+                let start_in_buffer =
+                    excerpt_buffer_start + range.start.saturating_sub(*cursor.start());
+                let end_in_buffer =
+                    excerpt_buffer_start + range.end.saturating_sub(*cursor.start());
+                let mut ancestor_buffer_range = start_excerpt
+                    .buffer
+                    .range_for_syntax_ancestor(start_in_buffer..end_in_buffer)?;
+                ancestor_buffer_range.start =
+                    cmp::max(ancestor_buffer_range.start, excerpt_buffer_start);
+                ancestor_buffer_range.end = cmp::min(ancestor_buffer_range.end, excerpt_buffer_end);
+
+                let start = cursor.start() + (ancestor_buffer_range.start - excerpt_buffer_start);
+                let end = cursor.start() + (ancestor_buffer_range.end - excerpt_buffer_start);
+                Some(start..end)
+            })
+    }
+
+    fn buffer_snapshot_for_excerpt<'a>(
+        &'a self,
+        excerpt_id: &'a ExcerptId,
+    ) -> Option<&'a BufferSnapshot> {
+        let mut cursor = self.excerpts.cursor::<Option<&ExcerptId>>();
+        cursor.seek(&Some(excerpt_id), Bias::Left, &());
+        if let Some(excerpt) = cursor.item() {
+            if excerpt.id == *excerpt_id {
+                return Some(&excerpt.buffer);
+            }
+        }
+        None
+    }
+
+    pub fn remote_selections_in_range<'a>(
+        &'a self,
+        range: &'a Range<Anchor>,
+    ) -> impl 'a + Iterator<Item = (ReplicaId, Selection<Anchor>)> {
+        let mut cursor = self.excerpts.cursor::<Option<&ExcerptId>>();
+        cursor.seek(&Some(&range.start.excerpt_id), Bias::Left, &());
+        cursor
+            .take_while(move |excerpt| excerpt.id <= range.end.excerpt_id)
+            .flat_map(move |excerpt| {
+                let mut query_range = excerpt.range.start.clone()..excerpt.range.end.clone();
+                if excerpt.id == range.start.excerpt_id {
+                    query_range.start = range.start.text_anchor.clone();
+                }
+                if excerpt.id == range.end.excerpt_id {
+                    query_range.end = range.end.text_anchor.clone();
+                }
+
+                excerpt
+                    .buffer
+                    .remote_selections_in_range(query_range)
+                    .flat_map(move |(replica_id, selections)| {
+                        selections.map(move |selection| {
+                            let mut start = Anchor {
+                                excerpt_id: excerpt.id.clone(),
+                                text_anchor: selection.start.clone(),
+                            };
+                            let mut end = Anchor {
+                                excerpt_id: excerpt.id.clone(),
+                                text_anchor: selection.end.clone(),
+                            };
+                            if range.start.cmp(&start, self).unwrap().is_gt() {
+                                start = range.start.clone();
+                            }
+                            if range.end.cmp(&end, self).unwrap().is_lt() {
+                                end = range.end.clone();
+                            }
+
+                            (
+                                replica_id,
+                                Selection {
+                                    id: selection.id,
+                                    start,
+                                    end,
+                                    reversed: selection.reversed,
+                                    goal: selection.goal,
+                                },
+                            )
+                        })
+                    })
+            })
+    }
+}
+
+impl History {
+    fn start_transaction(&mut self, now: Instant) -> Option<TransactionId> {
+        self.transaction_depth += 1;
+        if self.transaction_depth == 1 {
+            let id = post_inc(&mut self.next_transaction_id);
+            self.undo_stack.push(Transaction {
+                id,
+                buffer_transactions: Default::default(),
+                first_edit_at: now,
+                last_edit_at: now,
+            });
+            Some(id)
+        } else {
+            None
+        }
+    }
+
+    fn end_transaction(
+        &mut self,
+        now: Instant,
+        buffer_transactions: HashSet<(usize, TransactionId)>,
+    ) -> bool {
+        assert_ne!(self.transaction_depth, 0);
+        self.transaction_depth -= 1;
+        if self.transaction_depth == 0 {
+            if buffer_transactions.is_empty() {
+                self.undo_stack.pop();
+                false
+            } else {
+                let transaction = self.undo_stack.last_mut().unwrap();
+                transaction.last_edit_at = now;
+                transaction.buffer_transactions.extend(buffer_transactions);
+                true
+            }
+        } else {
+            false
+        }
+    }
+
+    fn pop_undo(&mut self) -> Option<&Transaction> {
+        assert_eq!(self.transaction_depth, 0);
+        if let Some(transaction) = self.undo_stack.pop() {
+            self.redo_stack.push(transaction);
+            self.redo_stack.last()
+        } else {
+            None
+        }
+    }
+
+    fn pop_redo(&mut self) -> Option<&Transaction> {
+        assert_eq!(self.transaction_depth, 0);
+        if let Some(transaction) = self.redo_stack.pop() {
+            self.undo_stack.push(transaction);
+            self.undo_stack.last()
+        } else {
+            None
+        }
+    }
+
+    fn group(&mut self) -> Option<TransactionId> {
+        let mut new_len = self.undo_stack.len();
+        let mut transactions = self.undo_stack.iter_mut();
+
+        if let Some(mut transaction) = transactions.next_back() {
+            while let Some(prev_transaction) = transactions.next_back() {
+                if transaction.first_edit_at - prev_transaction.last_edit_at <= self.group_interval
+                {
+                    transaction = prev_transaction;
+                    new_len -= 1;
+                } else {
+                    break;
+                }
+            }
+        }
+
+        let (transactions_to_keep, transactions_to_merge) = self.undo_stack.split_at_mut(new_len);
+        if let Some(last_transaction) = transactions_to_keep.last_mut() {
+            if let Some(transaction) = transactions_to_merge.last() {
+                last_transaction.last_edit_at = transaction.last_edit_at;
+            }
+        }
+
+        self.undo_stack.truncate(new_len);
+        self.undo_stack.last().map(|t| t.id)
+    }
+}
+
+impl Excerpt {
+    fn new(
+        id: ExcerptId,
+        buffer_id: usize,
+        buffer: BufferSnapshot,
+        range: Range<text::Anchor>,
+        has_trailing_newline: bool,
+    ) -> Self {
+        Excerpt {
+            id,
+            max_buffer_row: range.end.to_point(&buffer).row,
+            text_summary: buffer.text_summary_for_range::<TextSummary, _>(range.to_offset(&buffer)),
+            buffer_id,
+            buffer,
+            range,
+            has_trailing_newline,
+        }
+    }
+
+    fn chunks_in_range<'a>(
+        &'a self,
+        range: Range<usize>,
+        theme: Option<&'a SyntaxTheme>,
+    ) -> ExcerptChunks<'a> {
+        let content_start = self.range.start.to_offset(&self.buffer);
+        let chunks_start = content_start + range.start;
+        let chunks_end = content_start + cmp::min(range.end, self.text_summary.bytes);
+
+        let footer_height = if self.has_trailing_newline
+            && range.start <= self.text_summary.bytes
+            && range.end > self.text_summary.bytes
+        {
+            1
+        } else {
+            0
+        };
+
+        let content_chunks = self.buffer.chunks(chunks_start..chunks_end, theme);
+
+        ExcerptChunks {
+            content_chunks,
+            footer_height,
+        }
+    }
+
+    fn bytes_in_range(&self, range: Range<usize>) -> ExcerptBytes {
+        let content_start = self.range.start.to_offset(&self.buffer);
+        let bytes_start = content_start + range.start;
+        let bytes_end = content_start + cmp::min(range.end, self.text_summary.bytes);
+        let footer_height = if self.has_trailing_newline
+            && range.start <= self.text_summary.bytes
+            && range.end > self.text_summary.bytes
+        {
+            1
+        } else {
+            0
+        };
+        let content_bytes = self.buffer.bytes_in_range(bytes_start..bytes_end);
+
+        ExcerptBytes {
+            content_bytes,
+            footer_height,
+        }
+    }
+
+    fn clip_anchor(&self, text_anchor: text::Anchor) -> text::Anchor {
+        if text_anchor
+            .cmp(&self.range.start, &self.buffer)
+            .unwrap()
+            .is_lt()
+        {
+            self.range.start.clone()
+        } else if text_anchor
+            .cmp(&self.range.end, &self.buffer)
+            .unwrap()
+            .is_gt()
+        {
+            self.range.end.clone()
+        } else {
+            text_anchor
+        }
+    }
+}
+
+impl fmt::Debug for Excerpt {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Excerpt")
+            .field("id", &self.id)
+            .field("buffer_id", &self.buffer_id)
+            .field("range", &self.range)
+            .field("text_summary", &self.text_summary)
+            .field("has_trailing_newline", &self.has_trailing_newline)
+            .finish()
+    }
+}
+
+impl sum_tree::Item for Excerpt {
+    type Summary = ExcerptSummary;
+
+    fn summary(&self) -> Self::Summary {
+        let mut text = self.text_summary.clone();
+        if self.has_trailing_newline {
+            text += TextSummary::from("\n");
+        }
+        ExcerptSummary {
+            excerpt_id: self.id.clone(),
+            max_buffer_row: self.max_buffer_row,
+            text,
+        }
+    }
+}
+
+impl sum_tree::Summary for ExcerptSummary {
+    type Context = ();
+
+    fn add_summary(&mut self, summary: &Self, _: &()) {
+        debug_assert!(summary.excerpt_id > self.excerpt_id);
+        self.excerpt_id = summary.excerpt_id.clone();
+        self.text.add_summary(&summary.text, &());
+        self.max_buffer_row = cmp::max(self.max_buffer_row, summary.max_buffer_row);
+    }
+}
+
+impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for TextSummary {
+    fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) {
+        *self += &summary.text;
+    }
+}
+
+impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for usize {
+    fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) {
+        *self += summary.text.bytes;
+    }
+}
+
+impl<'a> sum_tree::SeekTarget<'a, ExcerptSummary, ExcerptSummary> for usize {
+    fn cmp(&self, cursor_location: &ExcerptSummary, _: &()) -> cmp::Ordering {
+        Ord::cmp(self, &cursor_location.text.bytes)
+    }
+}
+
+impl<'a> sum_tree::SeekTarget<'a, ExcerptSummary, ExcerptSummary> for Option<&'a ExcerptId> {
+    fn cmp(&self, cursor_location: &ExcerptSummary, _: &()) -> cmp::Ordering {
+        Ord::cmp(self, &Some(&cursor_location.excerpt_id))
+    }
+}
+
+impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Point {
+    fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) {
+        *self += summary.text.lines;
+    }
+}
+
+impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for PointUtf16 {
+    fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) {
+        *self += summary.text.lines_utf16
+    }
+}
+
+impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<&'a ExcerptId> {
+    fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) {
+        *self = Some(&summary.excerpt_id);
+    }
+}
+
+impl<'a> MultiBufferRows<'a> {
+    pub fn seek(&mut self, row: u32) {
+        self.buffer_row_range = 0..0;
+
+        self.excerpts
+            .seek_forward(&Point::new(row, 0), Bias::Right, &());
+        if self.excerpts.item().is_none() {
+            self.excerpts.prev(&());
+
+            if self.excerpts.item().is_none() && row == 0 {
+                self.buffer_row_range = 0..1;
+                return;
+            }
+        }
+
+        if let Some(excerpt) = self.excerpts.item() {
+            let overshoot = row - self.excerpts.start().row;
+            let excerpt_start = excerpt.range.start.to_point(&excerpt.buffer).row;
+            self.buffer_row_range.start = excerpt_start + overshoot;
+            self.buffer_row_range.end = excerpt_start + excerpt.text_summary.lines.row + 1;
+        }
+    }
+}
+
+impl<'a> Iterator for MultiBufferRows<'a> {
+    type Item = Option<u32>;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        loop {
+            if !self.buffer_row_range.is_empty() {
+                let row = Some(self.buffer_row_range.start);
+                self.buffer_row_range.start += 1;
+                return Some(row);
+            }
+            self.excerpts.item()?;
+            self.excerpts.next(&());
+            let excerpt = self.excerpts.item()?;
+            self.buffer_row_range.start = excerpt.range.start.to_point(&excerpt.buffer).row;
+            self.buffer_row_range.end =
+                self.buffer_row_range.start + excerpt.text_summary.lines.row + 1;
+        }
+    }
+}
+
+impl<'a> MultiBufferChunks<'a> {
+    pub fn offset(&self) -> usize {
+        self.range.start
+    }
+
+    pub fn seek(&mut self, offset: usize) {
+        self.range.start = offset;
+        self.excerpts.seek(&offset, Bias::Right, &());
+        if let Some(excerpt) = self.excerpts.item() {
+            self.excerpt_chunks = Some(excerpt.chunks_in_range(
+                self.range.start - self.excerpts.start()..self.range.end - self.excerpts.start(),
+                self.theme,
+            ));
+        } else {
+            self.excerpt_chunks = None;
+        }
+    }
+}
+
+impl<'a> Iterator for MultiBufferChunks<'a> {
+    type Item = Chunk<'a>;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        if self.range.is_empty() {
+            None
+        } else if let Some(chunk) = self.excerpt_chunks.as_mut()?.next() {
+            self.range.start += chunk.text.len();
+            Some(chunk)
+        } else {
+            self.excerpts.next(&());
+            let excerpt = self.excerpts.item()?;
+            self.excerpt_chunks = Some(
+                excerpt.chunks_in_range(0..self.range.end - self.excerpts.start(), self.theme),
+            );
+            self.next()
+        }
+    }
+}
+
+impl<'a> MultiBufferBytes<'a> {
+    fn consume(&mut self, len: usize) {
+        self.range.start += len;
+        self.chunk = &self.chunk[len..];
+
+        if !self.range.is_empty() && self.chunk.is_empty() {
+            if let Some(chunk) = self.excerpt_bytes.as_mut().and_then(|bytes| bytes.next()) {
+                self.chunk = chunk;
+            } else {
+                self.excerpts.next(&());
+                if let Some(excerpt) = self.excerpts.item() {
+                    let mut excerpt_bytes =
+                        excerpt.bytes_in_range(0..self.range.end - self.excerpts.start());
+                    self.chunk = excerpt_bytes.next().unwrap();
+                    self.excerpt_bytes = Some(excerpt_bytes);
+                }
+            }
+        }
+    }
+}
+
+impl<'a> Iterator for MultiBufferBytes<'a> {
+    type Item = &'a [u8];
+
+    fn next(&mut self) -> Option<Self::Item> {
+        let chunk = self.chunk;
+        if chunk.is_empty() {
+            None
+        } else {
+            self.consume(chunk.len());
+            Some(chunk)
+        }
+    }
+}
+
+impl<'a> io::Read for MultiBufferBytes<'a> {
+    fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
+        let len = cmp::min(buf.len(), self.chunk.len());
+        buf[..len].copy_from_slice(&self.chunk[..len]);
+        if len > 0 {
+            self.consume(len);
+        }
+        Ok(len)
+    }
+}
+
+impl<'a> Iterator for ExcerptBytes<'a> {
+    type Item = &'a [u8];
+
+    fn next(&mut self) -> Option<Self::Item> {
+        if let Some(chunk) = self.content_bytes.next() {
+            if !chunk.is_empty() {
+                return Some(chunk);
+            }
+        }
+
+        if self.footer_height > 0 {
+            let result = &NEWLINES[..self.footer_height];
+            self.footer_height = 0;
+            return Some(result);
+        }
+
+        None
+    }
+}
+
+impl<'a> Iterator for ExcerptChunks<'a> {
+    type Item = Chunk<'a>;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        if let Some(chunk) = self.content_chunks.next() {
+            return Some(chunk);
+        }
+
+        if self.footer_height > 0 {
+            let text = unsafe { str::from_utf8_unchecked(&NEWLINES[..self.footer_height]) };
+            self.footer_height = 0;
+            return Some(Chunk {
+                text,
+                ..Default::default()
+            });
+        }
+
+        None
+    }
+}
+
+impl ToOffset for Point {
+    fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize {
+        snapshot.point_to_offset(*self)
+    }
+}
+
+impl ToOffset for PointUtf16 {
+    fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize {
+        snapshot.point_utf16_to_offset(*self)
+    }
+}
+
+impl ToOffset for usize {
+    fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize {
+        assert!(*self <= snapshot.len(), "offset is out of range");
+        *self
+    }
+}
+
+impl ToPoint for usize {
+    fn to_point<'a>(&self, snapshot: &MultiBufferSnapshot) -> Point {
+        snapshot.offset_to_point(*self)
+    }
+}
+
+impl ToPoint for Point {
+    fn to_point<'a>(&self, _: &MultiBufferSnapshot) -> Point {
+        *self
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use gpui::MutableAppContext;
+    use language::{Buffer, Rope};
+    use rand::prelude::*;
+    use std::env;
+    use text::{Point, RandomCharIter};
+    use util::test::sample_text;
+
+    #[gpui::test]
+    fn test_singleton_multibuffer(cx: &mut MutableAppContext) {
+        let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx));
+        let multibuffer = cx.add_model(|cx| MultiBuffer::singleton(buffer.clone(), cx));
+
+        let snapshot = multibuffer.read(cx).snapshot(cx);
+        assert_eq!(snapshot.text(), buffer.read(cx).text());
+
+        assert_eq!(
+            snapshot.buffer_rows(0).collect::<Vec<_>>(),
+            (0..buffer.read(cx).row_count())
+                .map(Some)
+                .collect::<Vec<_>>()
+        );
+
+        buffer.update(cx, |buffer, cx| buffer.edit([1..3], "XXX\n", cx));
+        let snapshot = multibuffer.read(cx).snapshot(cx);
+
+        assert_eq!(snapshot.text(), buffer.read(cx).text());
+        assert_eq!(
+            snapshot.buffer_rows(0).collect::<Vec<_>>(),
+            (0..buffer.read(cx).row_count())
+                .map(Some)
+                .collect::<Vec<_>>()
+        );
+    }
+
+    #[gpui::test]
+    fn test_excerpt_buffer(cx: &mut MutableAppContext) {
+        let buffer_1 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx));
+        let buffer_2 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'g'), cx));
+        let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
+
+        let subscription = multibuffer.update(cx, |multibuffer, cx| {
+            let subscription = multibuffer.subscribe();
+            multibuffer.push_excerpt(
+                ExcerptProperties {
+                    buffer: &buffer_1,
+                    range: Point::new(1, 2)..Point::new(2, 5),
+                },
+                cx,
+            );
+            assert_eq!(
+                subscription.consume().into_inner(),
+                [Edit {
+                    old: 0..0,
+                    new: 0..10
+                }]
+            );
+
+            multibuffer.push_excerpt(
+                ExcerptProperties {
+                    buffer: &buffer_1,
+                    range: Point::new(3, 3)..Point::new(4, 4),
+                },
+                cx,
+            );
+            multibuffer.push_excerpt(
+                ExcerptProperties {
+                    buffer: &buffer_2,
+                    range: Point::new(3, 1)..Point::new(3, 3),
+                },
+                cx,
+            );
+            assert_eq!(
+                subscription.consume().into_inner(),
+                [Edit {
+                    old: 10..10,
+                    new: 10..22
+                }]
+            );
+
+            subscription
+        });
+
+        let snapshot = multibuffer.read(cx).snapshot(cx);
+        assert_eq!(
+            snapshot.text(),
+            concat!(
+                "bbbb\n",  // Preserve newlines
+                "ccccc\n", //
+                "ddd\n",   //
+                "eeee\n",  //
+                "jj"       //
+            )
+        );
+        assert_eq!(
+            snapshot.buffer_rows(0).collect::<Vec<_>>(),
+            [Some(1), Some(2), Some(3), Some(4), Some(3)]
+        );
+        assert_eq!(
+            snapshot.buffer_rows(2).collect::<Vec<_>>(),
+            [Some(3), Some(4), Some(3)]
+        );
+        assert_eq!(snapshot.buffer_rows(4).collect::<Vec<_>>(), [Some(3)]);
+        assert_eq!(snapshot.buffer_rows(5).collect::<Vec<_>>(), []);
+
+        buffer_1.update(cx, |buffer, cx| {
+            buffer.edit(
+                [
+                    Point::new(0, 0)..Point::new(0, 0),
+                    Point::new(2, 1)..Point::new(2, 3),
+                ],
+                "\n",
+                cx,
+            );
+        });
+
+        let snapshot = multibuffer.read(cx).snapshot(cx);
+        assert_eq!(
+            snapshot.text(),
+            concat!(
+                "bbbb\n", // Preserve newlines
+                "c\n",    //
+                "cc\n",   //
+                "ddd\n",  //
+                "eeee\n", //
+                "jj"      //
+            )
+        );
+
+        assert_eq!(
+            subscription.consume().into_inner(),
+            [Edit {
+                old: 6..8,
+                new: 6..7
+            }]
+        );
+
+        let snapshot = multibuffer.read(cx).snapshot(cx);
+        assert_eq!(
+            snapshot.clip_point(Point::new(0, 5), Bias::Left),
+            Point::new(0, 4)
+        );
+        assert_eq!(
+            snapshot.clip_point(Point::new(0, 5), Bias::Right),
+            Point::new(0, 4)
+        );
+        assert_eq!(
+            snapshot.clip_point(Point::new(5, 1), Bias::Right),
+            Point::new(5, 1)
+        );
+        assert_eq!(
+            snapshot.clip_point(Point::new(5, 2), Bias::Right),
+            Point::new(5, 2)
+        );
+        assert_eq!(
+            snapshot.clip_point(Point::new(5, 3), Bias::Right),
+            Point::new(5, 2)
+        );
+
+        let snapshot = multibuffer.update(cx, |multibuffer, cx| {
+            let buffer_2_excerpt_id = multibuffer.excerpt_ids_for_buffer(&buffer_2)[0].clone();
+            multibuffer.remove_excerpts(&[buffer_2_excerpt_id], cx);
+            multibuffer.snapshot(cx)
+        });
+
+        assert_eq!(
+            snapshot.text(),
+            concat!(
+                "bbbb\n", // Preserve newlines
+                "c\n",    //
+                "cc\n",   //
+                "ddd\n",  //
+                "eeee",   //
+            )
+        );
+    }
+
+    #[gpui::test]
+    fn test_empty_excerpt_buffer(cx: &mut MutableAppContext) {
+        let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
+
+        let snapshot = multibuffer.read(cx).snapshot(cx);
+        assert_eq!(snapshot.text(), "");
+        assert_eq!(snapshot.buffer_rows(0).collect::<Vec<_>>(), &[Some(0)]);
+        assert_eq!(snapshot.buffer_rows(1).collect::<Vec<_>>(), &[]);
+    }
+
+    #[gpui::test]
+    fn test_singleton_multibuffer_anchors(cx: &mut MutableAppContext) {
+        let buffer = cx.add_model(|cx| Buffer::new(0, "abcd", cx));
+        let multibuffer = cx.add_model(|cx| MultiBuffer::singleton(buffer.clone(), cx));
+        let old_snapshot = multibuffer.read(cx).snapshot(cx);
+        buffer.update(cx, |buffer, cx| {
+            buffer.edit([0..0], "X", cx);
+            buffer.edit([5..5], "Y", cx);
+        });
+        let new_snapshot = multibuffer.read(cx).snapshot(cx);
+
+        assert_eq!(old_snapshot.text(), "abcd");
+        assert_eq!(new_snapshot.text(), "XabcdY");
+
+        assert_eq!(old_snapshot.anchor_before(0).to_offset(&new_snapshot), 0);
+        assert_eq!(old_snapshot.anchor_after(0).to_offset(&new_snapshot), 1);
+        assert_eq!(old_snapshot.anchor_before(4).to_offset(&new_snapshot), 5);
+        assert_eq!(old_snapshot.anchor_after(4).to_offset(&new_snapshot), 6);
+    }
+
+    #[gpui::test]
+    fn test_multibuffer_anchors(cx: &mut MutableAppContext) {
+        let buffer_1 = cx.add_model(|cx| Buffer::new(0, "abcd", cx));
+        let buffer_2 = cx.add_model(|cx| Buffer::new(0, "efghi", cx));
+        let multibuffer = cx.add_model(|cx| {
+            let mut multibuffer = MultiBuffer::new(0);
+            multibuffer.push_excerpt(
+                ExcerptProperties {
+                    buffer: &buffer_1,
+                    range: 0..4,
+                },
+                cx,
+            );
+            multibuffer.push_excerpt(
+                ExcerptProperties {
+                    buffer: &buffer_2,
+                    range: 0..5,
+                },
+                cx,
+            );
+            multibuffer
+        });
+        let old_snapshot = multibuffer.read(cx).snapshot(cx);
+
+        assert_eq!(old_snapshot.anchor_before(0).to_offset(&old_snapshot), 0);
+        assert_eq!(old_snapshot.anchor_after(0).to_offset(&old_snapshot), 0);
+        assert_eq!(Anchor::min().to_offset(&old_snapshot), 0);
+        assert_eq!(Anchor::min().to_offset(&old_snapshot), 0);
+        assert_eq!(Anchor::max().to_offset(&old_snapshot), 10);
+        assert_eq!(Anchor::max().to_offset(&old_snapshot), 10);
+
+        buffer_1.update(cx, |buffer, cx| {
+            buffer.edit([0..0], "W", cx);
+            buffer.edit([5..5], "X", cx);
+        });
+        buffer_2.update(cx, |buffer, cx| {
+            buffer.edit([0..0], "Y", cx);
+            buffer.edit([6..0], "Z", cx);
+        });
+        let new_snapshot = multibuffer.read(cx).snapshot(cx);
+
+        assert_eq!(old_snapshot.text(), "abcd\nefghi");
+        assert_eq!(new_snapshot.text(), "WabcdX\nYefghiZ");
+
+        assert_eq!(old_snapshot.anchor_before(0).to_offset(&new_snapshot), 0);
+        assert_eq!(old_snapshot.anchor_after(0).to_offset(&new_snapshot), 1);
+        assert_eq!(old_snapshot.anchor_before(1).to_offset(&new_snapshot), 2);
+        assert_eq!(old_snapshot.anchor_after(1).to_offset(&new_snapshot), 2);
+        assert_eq!(old_snapshot.anchor_before(2).to_offset(&new_snapshot), 3);
+        assert_eq!(old_snapshot.anchor_after(2).to_offset(&new_snapshot), 3);
+        assert_eq!(old_snapshot.anchor_before(5).to_offset(&new_snapshot), 7);
+        assert_eq!(old_snapshot.anchor_after(5).to_offset(&new_snapshot), 8);
+        assert_eq!(old_snapshot.anchor_before(10).to_offset(&new_snapshot), 13);
+        assert_eq!(old_snapshot.anchor_after(10).to_offset(&new_snapshot), 14);
+    }
+
+    #[gpui::test(iterations = 100)]
+    fn test_random_excerpts(cx: &mut MutableAppContext, mut rng: StdRng) {
+        let operations = env::var("OPERATIONS")
+            .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+            .unwrap_or(10);
+
+        let mut buffers: Vec<ModelHandle<Buffer>> = Vec::new();
+        let list = cx.add_model(|_| MultiBuffer::new(0));
+        let mut excerpt_ids = Vec::new();
+        let mut expected_excerpts = Vec::<(ModelHandle<Buffer>, Range<text::Anchor>)>::new();
+        let mut old_versions = Vec::new();
+
+        for _ in 0..operations {
+            match rng.gen_range(0..100) {
+                0..=19 if !buffers.is_empty() => {
+                    let buffer = buffers.choose(&mut rng).unwrap();
+                    buffer.update(cx, |buf, cx| buf.randomly_edit(&mut rng, 5, cx));
+                }
+                20..=29 if !expected_excerpts.is_empty() => {
+                    let mut ids_to_remove = vec![];
+                    for _ in 0..rng.gen_range(1..=3) {
+                        if expected_excerpts.is_empty() {
+                            break;
+                        }
+
+                        let ix = rng.gen_range(0..expected_excerpts.len());
+                        ids_to_remove.push(excerpt_ids.remove(ix));
+                        let (buffer, range) = expected_excerpts.remove(ix);
+                        let buffer = buffer.read(cx);
+                        log::info!(
+                            "Removing excerpt {}: {:?}",
+                            ix,
+                            buffer
+                                .text_for_range(range.to_offset(&buffer))
+                                .collect::<String>(),
+                        );
+                    }
+                    ids_to_remove.sort_unstable();
+                    list.update(cx, |list, cx| list.remove_excerpts(&ids_to_remove, cx));
+                }
+                _ => {
+                    let buffer_handle = if buffers.is_empty() || rng.gen_bool(0.4) {
+                        let base_text = RandomCharIter::new(&mut rng).take(10).collect::<String>();
+                        buffers.push(cx.add_model(|cx| Buffer::new(0, base_text, cx)));
+                        buffers.last().unwrap()
+                    } else {
+                        buffers.choose(&mut rng).unwrap()
+                    };
+
+                    let buffer = buffer_handle.read(cx);
+                    let end_ix = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Right);
+                    let start_ix = buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left);
+                    let anchor_range = buffer.anchor_before(start_ix)..buffer.anchor_after(end_ix);
+                    let prev_excerpt_ix = rng.gen_range(0..=expected_excerpts.len());
+                    let prev_excerpt_id = excerpt_ids
+                        .get(prev_excerpt_ix)
+                        .cloned()
+                        .unwrap_or(ExcerptId::max());
+                    let excerpt_ix = (prev_excerpt_ix + 1).min(expected_excerpts.len());
+
+                    log::info!(
+                        "Inserting excerpt at {} of {} for buffer {}: {:?}[{:?}] = {:?}",
+                        excerpt_ix,
+                        expected_excerpts.len(),
+                        buffer_handle.id(),
+                        buffer.text(),
+                        start_ix..end_ix,
+                        &buffer.text()[start_ix..end_ix]
+                    );
+
+                    let excerpt_id = list.update(cx, |list, cx| {
+                        list.insert_excerpt_after(
+                            &prev_excerpt_id,
+                            ExcerptProperties {
+                                buffer: &buffer_handle,
+                                range: start_ix..end_ix,
+                            },
+                            cx,
+                        )
+                    });
+
+                    excerpt_ids.insert(excerpt_ix, excerpt_id);
+                    expected_excerpts.insert(excerpt_ix, (buffer_handle.clone(), anchor_range));
+                }
+            }
+
+            if rng.gen_bool(0.3) {
+                list.update(cx, |list, cx| {
+                    old_versions.push((list.snapshot(cx), list.subscribe()));
+                })
+            }
+
+            let snapshot = list.read(cx).snapshot(cx);
+
+            let mut excerpt_starts = Vec::new();
+            let mut expected_text = String::new();
+            let mut expected_buffer_rows = Vec::new();
+            for (buffer, range) in &expected_excerpts {
+                let buffer = buffer.read(cx);
+                let buffer_range = range.to_offset(buffer);
+
+                excerpt_starts.push(TextSummary::from(expected_text.as_str()));
+                expected_text.extend(buffer.text_for_range(buffer_range.clone()));
+                expected_text.push('\n');
+
+                let buffer_row_range = buffer.offset_to_point(buffer_range.start).row
+                    ..=buffer.offset_to_point(buffer_range.end).row;
+                for row in buffer_row_range {
+                    expected_buffer_rows.push(Some(row));
+                }
+            }
+            // Remove final trailing newline.
+            if !expected_excerpts.is_empty() {
+                expected_text.pop();
+            }
+
+            // Always report one buffer row
+            if expected_buffer_rows.is_empty() {
+                expected_buffer_rows.push(Some(0));
+            }
+
+            assert_eq!(snapshot.text(), expected_text);
+            log::info!("MultiBuffer text: {:?}", expected_text);
+
+            assert_eq!(
+                snapshot.buffer_rows(0).collect::<Vec<_>>(),
+                expected_buffer_rows,
+            );
+
+            for _ in 0..5 {
+                let start_row = rng.gen_range(0..=expected_buffer_rows.len());
+                assert_eq!(
+                    snapshot.buffer_rows(start_row as u32).collect::<Vec<_>>(),
+                    &expected_buffer_rows[start_row..],
+                    "buffer_rows({})",
+                    start_row
+                );
+            }
+
+            assert_eq!(
+                snapshot.max_buffer_row(),
+                expected_buffer_rows
+                    .into_iter()
+                    .filter_map(|r| r)
+                    .max()
+                    .unwrap()
+            );
+
+            let mut excerpt_starts = excerpt_starts.into_iter();
+            for (buffer, range) in &expected_excerpts {
+                let buffer_id = buffer.id();
+                let buffer = buffer.read(cx);
+                let buffer_range = range.to_offset(buffer);
+                let buffer_start_point = buffer.offset_to_point(buffer_range.start);
+                let buffer_start_point_utf16 =
+                    buffer.text_summary_for_range::<PointUtf16, _>(0..buffer_range.start);
+
+                let excerpt_start = excerpt_starts.next().unwrap();
+                let mut offset = excerpt_start.bytes;
+                let mut buffer_offset = buffer_range.start;
+                let mut point = excerpt_start.lines;
+                let mut buffer_point = buffer_start_point;
+                let mut point_utf16 = excerpt_start.lines_utf16;
+                let mut buffer_point_utf16 = buffer_start_point_utf16;
+                for ch in buffer
+                    .snapshot()
+                    .chunks(buffer_range.clone(), None)
+                    .flat_map(|c| c.text.chars())
+                {
+                    for _ in 0..ch.len_utf8() {
+                        let left_offset = snapshot.clip_offset(offset, Bias::Left);
+                        let right_offset = snapshot.clip_offset(offset, Bias::Right);
+                        let buffer_left_offset = buffer.clip_offset(buffer_offset, Bias::Left);
+                        let buffer_right_offset = buffer.clip_offset(buffer_offset, Bias::Right);
+                        assert_eq!(
+                            left_offset,
+                            excerpt_start.bytes + (buffer_left_offset - buffer_range.start),
+                            "clip_offset({:?}, Left). buffer: {:?}, buffer offset: {:?}",
+                            offset,
+                            buffer_id,
+                            buffer_offset,
+                        );
+                        assert_eq!(
+                            right_offset,
+                            excerpt_start.bytes + (buffer_right_offset - buffer_range.start),
+                            "clip_offset({:?}, Right). buffer: {:?}, buffer offset: {:?}",
+                            offset,
+                            buffer_id,
+                            buffer_offset,
+                        );
+
+                        let left_point = snapshot.clip_point(point, Bias::Left);
+                        let right_point = snapshot.clip_point(point, Bias::Right);
+                        let buffer_left_point = buffer.clip_point(buffer_point, Bias::Left);
+                        let buffer_right_point = buffer.clip_point(buffer_point, Bias::Right);
+                        assert_eq!(
+                            left_point,
+                            excerpt_start.lines + (buffer_left_point - buffer_start_point),
+                            "clip_point({:?}, Left). buffer: {:?}, buffer point: {:?}",
+                            point,
+                            buffer_id,
+                            buffer_point,
+                        );
+                        assert_eq!(
+                            right_point,
+                            excerpt_start.lines + (buffer_right_point - buffer_start_point),
+                            "clip_point({:?}, Right). buffer: {:?}, buffer point: {:?}",
+                            point,
+                            buffer_id,
+                            buffer_point,
+                        );
+
+                        assert_eq!(
+                            snapshot.point_to_offset(left_point),
+                            left_offset,
+                            "point_to_offset({:?})",
+                            left_point,
+                        );
+                        assert_eq!(
+                            snapshot.offset_to_point(left_offset),
+                            left_point,
+                            "offset_to_point({:?})",
+                            left_offset,
+                        );
+
+                        offset += 1;
+                        buffer_offset += 1;
+                        if ch == '\n' {
+                            point += Point::new(1, 0);
+                            buffer_point += Point::new(1, 0);
+                        } else {
+                            point += Point::new(0, 1);
+                            buffer_point += Point::new(0, 1);
+                        }
+                    }
+
+                    for _ in 0..ch.len_utf16() {
+                        let left_point_utf16 = snapshot.clip_point_utf16(point_utf16, Bias::Left);
+                        let right_point_utf16 = snapshot.clip_point_utf16(point_utf16, Bias::Right);
+                        let buffer_left_point_utf16 =
+                            buffer.clip_point_utf16(buffer_point_utf16, Bias::Left);
+                        let buffer_right_point_utf16 =
+                            buffer.clip_point_utf16(buffer_point_utf16, Bias::Right);
+                        assert_eq!(
+                            left_point_utf16,
+                            excerpt_start.lines_utf16
+                                + (buffer_left_point_utf16 - buffer_start_point_utf16),
+                            "clip_point_utf16({:?}, Left). buffer: {:?}, buffer point_utf16: {:?}",
+                            point_utf16,
+                            buffer_id,
+                            buffer_point_utf16,
+                        );
+                        assert_eq!(
+                            right_point_utf16,
+                            excerpt_start.lines_utf16
+                                + (buffer_right_point_utf16 - buffer_start_point_utf16),
+                            "clip_point_utf16({:?}, Right). buffer: {:?}, buffer point_utf16: {:?}",
+                            point_utf16,
+                            buffer_id,
+                            buffer_point_utf16,
+                        );
+
+                        if ch == '\n' {
+                            point_utf16 += PointUtf16::new(1, 0);
+                            buffer_point_utf16 += PointUtf16::new(1, 0);
+                        } else {
+                            point_utf16 += PointUtf16::new(0, 1);
+                            buffer_point_utf16 += PointUtf16::new(0, 1);
+                        }
+                    }
+                }
+            }
+
+            for (row, line) in expected_text.split('\n').enumerate() {
+                assert_eq!(
+                    snapshot.line_len(row as u32),
+                    line.len() as u32,
+                    "line_len({}).",
+                    row
+                );
+            }
+
+            let text_rope = Rope::from(expected_text.as_str());
+            for _ in 0..10 {
+                let end_ix = text_rope.clip_offset(rng.gen_range(0..=text_rope.len()), Bias::Right);
+                let start_ix = text_rope.clip_offset(rng.gen_range(0..=end_ix), Bias::Left);
+
+                assert_eq!(
+                    snapshot
+                        .text_for_range(start_ix..end_ix)
+                        .collect::<String>(),
+                    &expected_text[start_ix..end_ix],
+                    "incorrect text for range {:?}",
+                    start_ix..end_ix
+                );
+
+                let expected_summary = TextSummary::from(&expected_text[start_ix..end_ix]);
+                assert_eq!(
+                    snapshot.text_summary_for_range::<TextSummary, _>(start_ix..end_ix),
+                    expected_summary,
+                    "incorrect summary for range {:?}",
+                    start_ix..end_ix
+                );
+            }
+
+            for _ in 0..10 {
+                let end_ix = text_rope.clip_offset(rng.gen_range(0..=text_rope.len()), Bias::Right);
+                assert_eq!(
+                    snapshot.reversed_chars_at(end_ix).collect::<String>(),
+                    expected_text[..end_ix].chars().rev().collect::<String>(),
+                );
+            }
+
+            for _ in 0..10 {
+                let end_ix = rng.gen_range(0..=text_rope.len());
+                let start_ix = rng.gen_range(0..=end_ix);
+                assert_eq!(
+                    snapshot
+                        .bytes_in_range(start_ix..end_ix)
+                        .flatten()
+                        .copied()
+                        .collect::<Vec<_>>(),
+                    expected_text.as_bytes()[start_ix..end_ix].to_vec(),
+                    "bytes_in_range({:?})",
+                    start_ix..end_ix,
+                );
+            }
+        }
+
+        let snapshot = list.read(cx).snapshot(cx);
+        for (old_snapshot, subscription) in old_versions {
+            let edits = subscription.consume().into_inner();
+
+            log::info!(
+                "applying subscription edits to old text: {:?}: {:?}",
+                old_snapshot.text(),
+                edits,
+            );
+
+            let mut text = old_snapshot.text();
+            for edit in edits {
+                let new_text: String = snapshot.text_for_range(edit.new.clone()).collect();
+                text.replace_range(edit.new.start..edit.new.start + edit.old.len(), &new_text);
+            }
+            assert_eq!(text.to_string(), snapshot.text());
+        }
+    }
+
+    #[gpui::test]
+    fn test_history(cx: &mut MutableAppContext) {
+        let buffer_1 = cx.add_model(|cx| Buffer::new(0, "1234", cx));
+        let buffer_2 = cx.add_model(|cx| Buffer::new(0, "5678", cx));
+        let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
+        let group_interval = multibuffer.read(cx).history.group_interval;
+        multibuffer.update(cx, |multibuffer, cx| {
+            multibuffer.push_excerpt(
+                ExcerptProperties {
+                    buffer: &buffer_1,
+                    range: 0..buffer_1.read(cx).len(),
+                },
+                cx,
+            );
+            multibuffer.push_excerpt(
+                ExcerptProperties {
+                    buffer: &buffer_2,
+                    range: 0..buffer_2.read(cx).len(),
+                },
+                cx,
+            );
+        });
+
+        let mut now = Instant::now();
+
+        multibuffer.update(cx, |multibuffer, cx| {
+            multibuffer.start_transaction_at(now, cx);
+            multibuffer.edit(
+                [
+                    Point::new(0, 0)..Point::new(0, 0),
+                    Point::new(1, 0)..Point::new(1, 0),
+                ],
+                "A",
+                cx,
+            );
+            multibuffer.edit(
+                [
+                    Point::new(0, 1)..Point::new(0, 1),
+                    Point::new(1, 1)..Point::new(1, 1),
+                ],
+                "B",
+                cx,
+            );
+            multibuffer.end_transaction_at(now, cx);
+            assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678");
+
+            now += 2 * group_interval;
+            multibuffer.start_transaction_at(now, cx);
+            multibuffer.edit([2..2], "C", cx);
+            multibuffer.end_transaction_at(now, cx);
+            assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678");
+
+            multibuffer.undo(cx);
+            assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678");
+
+            multibuffer.undo(cx);
+            assert_eq!(multibuffer.read(cx).text(), "1234\n5678");
+
+            multibuffer.redo(cx);
+            assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678");
+
+            multibuffer.redo(cx);
+            assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678");
+
+            buffer_1.update(cx, |buffer_1, cx| buffer_1.undo(cx));
+            assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678");
+
+            multibuffer.undo(cx);
+            assert_eq!(multibuffer.read(cx).text(), "1234\n5678");
+
+            multibuffer.redo(cx);
+            assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678");
+
+            multibuffer.redo(cx);
+            assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678");
+
+            multibuffer.undo(cx);
+            assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678");
+
+            buffer_1.update(cx, |buffer_1, cx| buffer_1.redo(cx));
+            assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678");
+
+            multibuffer.undo(cx);
+            assert_eq!(multibuffer.read(cx).text(), "C1234\n5678");
+        });
+    }
+}

crates/editor/src/multi_buffer/anchor.rs 🔗

@@ -0,0 +1,118 @@
+use super::{ExcerptId, MultiBufferSnapshot, ToOffset, ToPoint};
+use anyhow::{anyhow, Result};
+use std::{
+    cmp::Ordering,
+    ops::{Range, Sub},
+};
+use sum_tree::Bias;
+use text::{rope::TextDimension, Point};
+
+#[derive(Clone, Eq, PartialEq, Debug, Hash)]
+pub struct Anchor {
+    pub(crate) excerpt_id: ExcerptId,
+    pub(crate) text_anchor: text::Anchor,
+}
+
+impl Anchor {
+    pub fn min() -> Self {
+        Self {
+            excerpt_id: ExcerptId::min(),
+            text_anchor: text::Anchor::min(),
+        }
+    }
+
+    pub fn max() -> Self {
+        Self {
+            excerpt_id: ExcerptId::max(),
+            text_anchor: text::Anchor::max(),
+        }
+    }
+
+    pub fn excerpt_id(&self) -> &ExcerptId {
+        &self.excerpt_id
+    }
+
+    pub fn cmp<'a>(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Result<Ordering> {
+        let excerpt_id_cmp = self.excerpt_id.cmp(&other.excerpt_id);
+        if excerpt_id_cmp.is_eq() {
+            if self.excerpt_id == ExcerptId::min() || self.excerpt_id == ExcerptId::max() {
+                Ok(Ordering::Equal)
+            } else {
+                self.text_anchor.cmp(
+                    &other.text_anchor,
+                    snapshot
+                        .buffer_snapshot_for_excerpt(&self.excerpt_id)
+                        .ok_or_else(|| anyhow!("excerpt {:?} not found", self.excerpt_id))?,
+                )
+            }
+        } else {
+            return Ok(excerpt_id_cmp);
+        }
+    }
+
+    pub fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
+        if self.text_anchor.bias != Bias::Left {
+            if let Some(buffer_snapshot) = snapshot.buffer_snapshot_for_excerpt(&self.excerpt_id) {
+                return Self {
+                    excerpt_id: self.excerpt_id.clone(),
+                    text_anchor: self.text_anchor.bias_left(buffer_snapshot),
+                };
+            }
+        }
+        self.clone()
+    }
+
+    pub fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
+        if self.text_anchor.bias != Bias::Right {
+            if let Some(buffer_snapshot) = snapshot.buffer_snapshot_for_excerpt(&self.excerpt_id) {
+                return Self {
+                    excerpt_id: self.excerpt_id.clone(),
+                    text_anchor: self.text_anchor.bias_right(buffer_snapshot),
+                };
+            }
+        }
+        self.clone()
+    }
+
+    pub fn summary<D>(&self, snapshot: &MultiBufferSnapshot) -> D
+    where
+        D: TextDimension + Ord + Sub<D, Output = D>,
+    {
+        snapshot.summary_for_anchor(self)
+    }
+}
+
+impl ToOffset for Anchor {
+    fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize {
+        self.summary(snapshot)
+    }
+}
+
+impl ToPoint for Anchor {
+    fn to_point<'a>(&self, snapshot: &MultiBufferSnapshot) -> Point {
+        self.summary(snapshot)
+    }
+}
+
+pub trait AnchorRangeExt {
+    fn cmp(&self, b: &Range<Anchor>, buffer: &MultiBufferSnapshot) -> Result<Ordering>;
+    fn to_offset(&self, content: &MultiBufferSnapshot) -> Range<usize>;
+    fn to_point(&self, content: &MultiBufferSnapshot) -> Range<Point>;
+}
+
+impl AnchorRangeExt for Range<Anchor> {
+    fn cmp(&self, other: &Range<Anchor>, buffer: &MultiBufferSnapshot) -> Result<Ordering> {
+        Ok(match self.start.cmp(&other.start, buffer)? {
+            Ordering::Equal => other.end.cmp(&self.end, buffer)?,
+            ord @ _ => ord,
+        })
+    }
+
+    fn to_offset(&self, content: &MultiBufferSnapshot) -> Range<usize> {
+        self.start.to_offset(&content)..self.end.to_offset(&content)
+    }
+
+    fn to_point(&self, content: &MultiBufferSnapshot) -> Range<Point> {
+        self.start.to_point(&content)..self.end.to_point(&content)
+    }
+}

crates/editor/src/test.rs 🔗

@@ -9,19 +9,6 @@ fn init_logger() {
     env_logger::init();
 }
 
-pub fn sample_text(rows: usize, cols: usize) -> String {
-    let mut text = String::new();
-    for row in 0..rows {
-        let c: char = ('a' as u32 + row as u32) as u8 as char;
-        let mut line = c.to_string().repeat(cols);
-        if row < rows - 1 {
-            line.push('\n');
-        }
-        text += &line;
-    }
-    text
-}
-
 pub struct Observer<T>(PhantomData<T>);
 
 impl<T: 'static> Entity for Observer<T> {

crates/file_finder/src/file_finder.rs 🔗

@@ -270,14 +270,14 @@ impl FileFinder {
             Editor::single_line(
                 {
                     let settings = settings.clone();
-                    move |_| {
+                    Arc::new(move |_| {
                         let settings = settings.borrow();
                         EditorSettings {
                             style: settings.theme.selector.input_editor.as_editor(),
                             tab_size: settings.tab_size,
                             soft_wrap: editor::SoftWrap::None,
                         }
-                    }
+                    })
                 },
                 cx,
             )

crates/go_to_line/src/go_to_line.rs 🔗

@@ -1,10 +1,11 @@
-use text::{Bias, Point, Selection};
 use editor::{display_map::ToDisplayPoint, Autoscroll, Editor, EditorSettings};
 use gpui::{
     action, elements::*, geometry::vector::Vector2F, keymap::Binding, Axis, Entity,
     MutableAppContext, RenderContext, View, ViewContext, ViewHandle,
 };
 use postage::watch;
+use std::sync::Arc;
+use text::{Bias, Point, Selection};
 use workspace::{Settings, Workspace};
 
 action!(Toggle);
@@ -49,14 +50,14 @@ impl GoToLine {
             Editor::single_line(
                 {
                     let settings = settings.clone();
-                    move |_| {
+                    Arc::new(move |_| {
                         let settings = settings.borrow();
                         EditorSettings {
                             tab_size: settings.tab_size,
                             style: settings.theme.selector.input_editor.as_editor(),
                             soft_wrap: editor::SoftWrap::None,
                         }
-                    }
+                    })
                 },
                 cx,
             )
@@ -67,13 +68,14 @@ impl GoToLine {
         let (restore_state, cursor_point, max_point) = active_editor.update(cx, |editor, cx| {
             let restore_state = Some(RestoreState {
                 scroll_position: editor.scroll_position(cx),
-                selections: editor.selections::<usize>(cx).collect(),
+                selections: editor.local_selections::<usize>(cx),
             });
 
+            let buffer = editor.buffer().read(cx).read(cx);
             (
                 restore_state,
-                editor.newest_selection(cx).head(),
-                editor.buffer().read(cx).max_point(),
+                editor.newest_selection(&buffer).head(),
+                buffer.max_point(),
             )
         });
 
@@ -127,7 +129,7 @@ impl GoToLine {
         match event {
             editor::Event::Blurred => cx.emit(Event::Dismissed),
             editor::Event::Edited => {
-                let line_editor = self.line_editor.read(cx).buffer().read(cx).text();
+                let line_editor = self.line_editor.read(cx).buffer().read(cx).read(cx).text();
                 let mut components = line_editor.trim().split(&[',', ':'][..]);
                 let row = components.next().and_then(|row| row.parse::<u32>().ok());
                 let column = components.next().and_then(|row| row.parse::<u32>().ok());
@@ -143,7 +145,7 @@ impl GoToLine {
                         let display_point = point.to_display_point(&snapshot);
                         active_editor.select_ranges([point..point], Some(Autoscroll::Center), cx);
                         active_editor.set_highlighted_row(Some(display_point.row()));
-                        Some(active_editor.newest_selection(cx))
+                        Some(active_editor.newest_selection(&snapshot.buffer_snapshot))
                     });
                     cx.notify();
                 }
@@ -162,7 +164,9 @@ impl Entity for GoToLine {
         self.active_editor.update(cx, |editor, cx| {
             editor.set_highlighted_row(None);
             if let Some((line_selection, restore_state)) = line_selection.zip(restore_state) {
-                if line_selection.id == editor.newest_selection::<usize>(cx).id {
+                let newest_selection =
+                    editor.newest_selection::<usize>(&editor.buffer().read(cx).read(cx));
+                if line_selection.id == newest_selection.id {
                     editor.set_scroll_position(restore_state.scroll_position, cx);
                     editor.update_selections(restore_state.selections, None, cx);
                 }

crates/gpui/src/executor.rs 🔗

@@ -54,6 +54,7 @@ type AnyLocalTask = async_task::Task<Box<dyn Any + 'static>>;
 
 #[must_use]
 pub enum Task<T> {
+    Ready(Option<T>),
     Local {
         any_task: AnyLocalTask,
         result_type: PhantomData<T>,
@@ -594,6 +595,10 @@ pub fn deterministic(seed: u64) -> (Rc<Foreground>, Arc<Background>) {
 }
 
 impl<T> Task<T> {
+    pub fn ready(value: T) -> Self {
+        Self::Ready(Some(value))
+    }
+
     fn local(any_task: AnyLocalTask) -> Self {
         Self::Local {
             any_task,
@@ -603,6 +608,7 @@ impl<T> Task<T> {
 
     pub fn detach(self) {
         match self {
+            Task::Ready(_) => {}
             Task::Local { any_task, .. } => any_task.detach(),
             Task::Send { any_task, .. } => any_task.detach(),
         }
@@ -621,6 +627,7 @@ impl<T: Send> Task<T> {
 impl<T: fmt::Debug> fmt::Debug for Task<T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match self {
+            Task::Ready(value) => value.fmt(f),
             Task::Local { any_task, .. } => any_task.fmt(f),
             Task::Send { any_task, .. } => any_task.fmt(f),
         }
@@ -632,6 +639,7 @@ impl<T: 'static> Future for Task<T> {
 
     fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
         match unsafe { self.get_unchecked_mut() } {
+            Task::Ready(value) => Poll::Ready(value.take().unwrap()),
             Task::Local { any_task, .. } => {
                 any_task.poll(cx).map(|value| *value.downcast().unwrap())
             }

crates/journal/src/journal.rs 🔗

@@ -57,7 +57,7 @@ pub fn new_journal_entry(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
             if let Some(Some(Ok(item))) = opened.first() {
                 if let Some(editor) = item.to_any().downcast::<Editor>() {
                     editor.update(&mut cx, |editor, cx| {
-                        let len = editor.buffer().read(cx).len();
+                        let len = editor.buffer().read(cx).read(cx).len();
                         editor.select_ranges([len..len], Some(Autoscroll::Center), cx);
                         if len > 0 {
                             editor.insert("\n\n", cx);

crates/language/Cargo.toml 🔗

@@ -1,7 +1,7 @@
 [package]
 name = "language"
 version = "0.1.0"
-edition = "2018"
+edition = "2021"
 
 [lib]
 path = "src/language.rs"
@@ -9,20 +9,25 @@ path = "src/language.rs"
 [features]
 test-support = [
     "rand",
-    "text/test-support",
+    "collections/test-support",
     "lsp/test-support",
+    "text/test-support",
     "tree-sitter-rust",
+    "util/test-support",
 ]
 
 [dependencies]
-text = { path = "../text" }
 clock = { path = "../clock" }
+collections = { path = "../collections" }
 gpui = { path = "../gpui" }
 lsp = { path = "../lsp" }
 rpc = { path = "../rpc" }
+sum_tree = { path = "../sum_tree" }
+text = { path = "../text" }
 theme = { path = "../theme" }
 util = { path = "../util" }
 anyhow = "1.0.38"
+async-trait = "0.1"
 futures = "0.3"
 lazy_static = "1.4"
 log = "0.4"
@@ -31,14 +36,19 @@ postage = { version = "0.4.1", features = ["futures-traits"] }
 rand = { version = "0.8.3", optional = true }
 serde = { version = "1", features = ["derive"] }
 similar = "1.3"
+smallvec = { version = "1.6", features = ["union"] }
 smol = "1.2"
 tree-sitter = "0.20.0"
 tree-sitter-rust = { version = "0.20.0", optional = true }
 
 [dev-dependencies]
-text = { path = "../text", features = ["test-support"] }
+collections = { path = "../collections", features = ["test-support"] }
 gpui = { path = "../gpui", features = ["test-support"] }
 lsp = { path = "../lsp", features = ["test-support"] }
+text = { path = "../text", features = ["test-support"] }
+util = { path = "../util", features = ["test-support"] }
+ctor = "0.1"
+env_logger = "0.8"
 rand = "0.8.3"
 tree-sitter-rust = "0.20.0"
 unindent = "0.1.7"

crates/language/src/buffer.rs 🔗

@@ -1,4 +1,6 @@
+use crate::diagnostic_set::{DiagnosticEntry, DiagnosticGroup};
 pub use crate::{
+    diagnostic_set::DiagnosticSet,
     highlight_map::{HighlightId, HighlightMap},
     proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
     PLAIN_TEXT,
@@ -16,18 +18,20 @@ use smol::future::yield_now;
 use std::{
     any::Any,
     cell::RefCell,
-    cmp,
-    collections::{BTreeMap, HashMap, HashSet},
+    cmp::{self, Ordering},
+    collections::{BTreeMap, HashMap},
     ffi::OsString,
     future::Future,
     iter::{Iterator, Peekable},
-    ops::{Deref, DerefMut, Range},
+    ops::{Deref, DerefMut, Range, Sub},
     path::{Path, PathBuf},
     str,
     sync::Arc,
     time::{Duration, Instant, SystemTime, UNIX_EPOCH},
     vec,
 };
+use sum_tree::TreeMap;
+use text::{operation_queue::OperationQueue, rope::TextDimension};
 pub use text::{Buffer as TextBuffer, Operation as _, *};
 use theme::SyntaxTheme;
 use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
@@ -61,29 +65,41 @@ pub struct Buffer {
     syntax_tree: Mutex<Option<SyntaxTree>>,
     parsing_in_background: bool,
     parse_count: usize,
-    diagnostics: AnchorRangeMultimap<Diagnostic>,
+    remote_selections: TreeMap<ReplicaId, Arc<[Selection<Anchor>]>>,
+    diagnostic_sets: Vec<DiagnosticSet>,
     diagnostics_update_count: usize,
     language_server: Option<LanguageServerState>,
+    deferred_ops: OperationQueue<Operation>,
     #[cfg(test)]
     pub(crate) operations: Vec<Operation>,
 }
 
-pub struct Snapshot {
-    text: text::Snapshot,
+pub struct BufferSnapshot {
+    text: text::BufferSnapshot,
     tree: Option<Tree>,
-    diagnostics: AnchorRangeMultimap<Diagnostic>,
+    diagnostic_sets: Vec<DiagnosticSet>,
+    remote_selections: TreeMap<ReplicaId, Arc<[Selection<Anchor>]>>,
     diagnostics_update_count: usize,
     is_parsing: bool,
     language: Option<Arc<Language>>,
     parse_count: usize,
 }
 
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct GroupId {
+    source: Arc<str>,
+    id: usize,
+}
+
 #[derive(Clone, Debug, PartialEq, Eq)]
 pub struct Diagnostic {
+    pub code: Option<String>,
     pub severity: DiagnosticSeverity,
     pub message: String,
     pub group_id: usize,
+    pub is_valid: bool,
     pub is_primary: bool,
+    pub is_disk_based: bool,
 }
 
 struct LanguageServerState {
@@ -96,15 +112,28 @@ struct LanguageServerState {
 
 #[derive(Clone)]
 struct LanguageServerSnapshot {
-    buffer_snapshot: text::Snapshot,
+    buffer_snapshot: text::BufferSnapshot,
     version: usize,
     path: Arc<Path>,
 }
 
-#[derive(Clone)]
+#[derive(Clone, Debug)]
 pub enum Operation {
     Buffer(text::Operation),
-    UpdateDiagnostics(AnchorRangeMultimap<Diagnostic>),
+    UpdateDiagnostics {
+        provider_name: String,
+        diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
+        lamport_timestamp: clock::Lamport,
+    },
+    UpdateSelections {
+        replica_id: ReplicaId,
+        selections: Arc<[Selection<Anchor>]>,
+        lamport_timestamp: clock::Lamport,
+    },
+    RemoveSelections {
+        replica_id: ReplicaId,
+        lamport_timestamp: clock::Lamport,
+    },
 }
 
 #[derive(Clone, Debug, Eq, PartialEq)]
@@ -171,10 +200,9 @@ struct SyntaxTree {
 
 #[derive(Clone)]
 struct AutoindentRequest {
-    selection_set_ids: HashSet<SelectionSetId>,
-    before_edit: Snapshot,
-    edited: AnchorSet,
-    inserted: Option<AnchorRangeSet>,
+    before_edit: BufferSnapshot,
+    edited: Vec<Anchor>,
+    inserted: Option<Vec<Range<Anchor>>>,
 }
 
 #[derive(Debug)]
@@ -185,7 +213,7 @@ struct IndentSuggestion {
 
 struct TextProvider<'a>(&'a Rope);
 
-struct Highlights<'a> {
+struct BufferChunkHighlights<'a> {
     captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
     next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
     stack: Vec<(usize, HighlightId)>,
@@ -194,7 +222,7 @@ struct Highlights<'a> {
     _query_cursor: QueryCursorHandle,
 }
 
-pub struct Chunks<'a> {
+pub struct BufferChunks<'a> {
     range: Range<usize>,
     chunks: rope::Chunks<'a>,
     diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
@@ -202,7 +230,7 @@ pub struct Chunks<'a> {
     warning_depth: usize,
     information_depth: usize,
     hint_depth: usize,
-    highlights: Option<Highlights<'a>>,
+    highlights: Option<BufferChunkHighlights<'a>>,
 }
 
 #[derive(Clone, Copy, Debug, Default)]
@@ -270,14 +298,26 @@ impl Buffer {
             .into_iter()
             .map(|op| text::Operation::Edit(proto::deserialize_edit_operation(op)));
         buffer.apply_ops(ops)?;
-        for set in message.selections {
-            let set = proto::deserialize_selection_set(set);
-            buffer.add_raw_selection_set(set.id, set);
-        }
         let mut this = Self::build(buffer, file);
-        if let Some(diagnostics) = message.diagnostics {
-            this.apply_diagnostic_update(proto::deserialize_diagnostics(diagnostics), cx);
+        for selection_set in message.selections {
+            this.remote_selections.insert(
+                selection_set.replica_id as ReplicaId,
+                proto::deserialize_selections(selection_set.selections),
+            );
+        }
+        let snapshot = this.snapshot();
+        for diagnostic_set in message.diagnostic_sets {
+            let (provider_name, entries) = proto::deserialize_diagnostic_set(diagnostic_set);
+            this.apply_diagnostic_update(
+                DiagnosticSet::from_sorted_entries(
+                    provider_name,
+                    entries.into_iter().cloned(),
+                    &snapshot,
+                ),
+                cx,
+            );
         }
+
         Ok(this)
     }
 
@@ -291,10 +331,20 @@ impl Buffer {
                 .map(proto::serialize_edit_operation)
                 .collect(),
             selections: self
-                .selection_sets()
-                .map(|(_, set)| proto::serialize_selection_set(set))
+                .remote_selections
+                .iter()
+                .map(|(replica_id, selections)| proto::SelectionSet {
+                    replica_id: *replica_id as u32,
+                    selections: proto::serialize_selections(selections),
+                })
+                .collect(),
+            diagnostic_sets: self
+                .diagnostic_sets
+                .iter()
+                .map(|set| {
+                    proto::serialize_diagnostic_set(set.provider_name().to_string(), set.iter())
+                })
                 .collect(),
-            diagnostics: Some(proto::serialize_diagnostics(&self.diagnostics)),
         }
     }
 
@@ -328,19 +378,22 @@ impl Buffer {
             autoindent_requests: Default::default(),
             pending_autoindent: Default::default(),
             language: None,
-            diagnostics: Default::default(),
+            remote_selections: Default::default(),
+            diagnostic_sets: Default::default(),
             diagnostics_update_count: 0,
             language_server: None,
+            deferred_ops: OperationQueue::new(),
             #[cfg(test)]
             operations: Default::default(),
         }
     }
 
-    pub fn snapshot(&self) -> Snapshot {
-        Snapshot {
+    pub fn snapshot(&self) -> BufferSnapshot {
+        BufferSnapshot {
             text: self.text.snapshot(),
             tree: self.syntax_tree(),
-            diagnostics: self.diagnostics.clone(),
+            remote_selections: self.remote_selections.clone(),
+            diagnostic_sets: self.diagnostic_sets.clone(),
             diagnostics_update_count: self.diagnostics_update_count,
             is_parsing: self.parsing_in_background,
             language: self.language.clone(),
@@ -570,6 +623,10 @@ impl Buffer {
         self.parse_count
     }
 
+    pub fn diagnostics_update_count(&self) -> usize {
+        self.diagnostics_update_count
+    }
+
     pub(crate) fn syntax_tree(&self) -> Option<Tree> {
         if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
             self.interpolate_tree(syntax_tree);
@@ -684,15 +741,30 @@ impl Buffer {
         cx.notify();
     }
 
-    pub fn update_diagnostics(
+    pub fn update_diagnostics<T>(
         &mut self,
+        provider_name: Arc<str>,
         version: Option<i32>,
-        mut diagnostics: Vec<lsp::Diagnostic>,
+        mut diagnostics: Vec<DiagnosticEntry<T>>,
         cx: &mut ModelContext<Self>,
-    ) -> Result<Operation> {
+    ) -> Result<Operation>
+    where
+        T: Copy + Ord + TextDimension + Sub<Output = T> + Clip + ToPoint,
+    {
+        fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
+            Ordering::Equal
+                .then_with(|| b.is_primary.cmp(&a.is_primary))
+                .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
+                .then_with(|| a.severity.cmp(&b.severity))
+                .then_with(|| a.message.cmp(&b.message))
+        }
+
         let version = version.map(|version| version as usize);
         let content = if let Some(version) = version {
             let language_server = self.language_server.as_mut().unwrap();
+            language_server
+                .pending_snapshots
+                .retain(|&v, _| v >= version);
             let snapshot = language_server
                 .pending_snapshots
                 .get(&version)
@@ -701,146 +773,73 @@ impl Buffer {
         } else {
             self.deref()
         };
-        let abs_path = self.file.as_ref().and_then(|f| f.abs_path());
 
-        let empty_set = HashSet::new();
-        let disk_based_sources = self
-            .language
-            .as_ref()
-            .and_then(|language| language.disk_based_diagnostic_sources())
-            .unwrap_or(&empty_set);
-
-        diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end));
-        self.diagnostics = {
-            let mut edits_since_save = content
-                .edits_since::<PointUtf16>(&self.saved_version)
-                .peekable();
-            let mut last_edit_old_end = PointUtf16::zero();
-            let mut last_edit_new_end = PointUtf16::zero();
-            let mut group_ids_by_diagnostic_range = HashMap::new();
-            let mut diagnostics_by_group_id = HashMap::new();
-            let mut next_group_id = 0;
-            'outer: for diagnostic in &diagnostics {
-                let mut start = diagnostic.range.start.to_point_utf16();
-                let mut end = diagnostic.range.end.to_point_utf16();
-                let source = diagnostic.source.as_ref();
-                let code = diagnostic.code.as_ref();
-                let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref())
-                    .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range)))
-                    .copied()
-                    .unwrap_or_else(|| {
-                        let group_id = post_inc(&mut next_group_id);
-                        for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) {
-                            group_ids_by_diagnostic_range.insert((source, code, range), group_id);
-                        }
-                        group_id
-                    });
-
-                if diagnostic
-                    .source
-                    .as_ref()
-                    .map_or(false, |source| disk_based_sources.contains(source))
-                {
-                    while let Some(edit) = edits_since_save.peek() {
-                        if edit.old.end <= start {
-                            last_edit_old_end = edit.old.end;
-                            last_edit_new_end = edit.new.end;
-                            edits_since_save.next();
-                        } else if edit.old.start <= end && edit.old.end >= start {
-                            continue 'outer;
-                        } else {
-                            break;
-                        }
+        diagnostics.sort_unstable_by(|a, b| {
+            Ordering::Equal
+                .then_with(|| a.range.start.cmp(&b.range.start))
+                .then_with(|| b.range.end.cmp(&a.range.end))
+                .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
+        });
+
+        let mut sanitized_diagnostics = Vec::new();
+        let mut edits_since_save = content.edits_since::<T>(&self.saved_version).peekable();
+        let mut last_edit_old_end = T::default();
+        let mut last_edit_new_end = T::default();
+        'outer: for entry in diagnostics {
+            let mut start = entry.range.start;
+            let mut end = entry.range.end;
+
+            // Some diagnostics are based on files on disk instead of buffers'
+            // current contents. Adjust these diagnostics' ranges to reflect
+            // any unsaved edits.
+            if entry.diagnostic.is_disk_based {
+                while let Some(edit) = edits_since_save.peek() {
+                    if edit.old.end <= start {
+                        last_edit_old_end = edit.old.end;
+                        last_edit_new_end = edit.new.end;
+                        edits_since_save.next();
+                    } else if edit.old.start <= end && edit.old.end >= start {
+                        continue 'outer;
+                    } else {
+                        break;
                     }
-
-                    start = last_edit_new_end + (start - last_edit_old_end);
-                    end = last_edit_new_end + (end - last_edit_old_end);
                 }
 
-                let mut range = content.clip_point_utf16(start, Bias::Left)
-                    ..content.clip_point_utf16(end, Bias::Right);
-                if range.start == range.end {
-                    range.end.column += 1;
-                    range.end = content.clip_point_utf16(range.end, Bias::Right);
-                    if range.start == range.end && range.end.column > 0 {
-                        range.start.column -= 1;
-                        range.start = content.clip_point_utf16(range.start, Bias::Left);
-                    }
-                }
+                let start_overshoot = start - last_edit_old_end;
+                start = last_edit_new_end;
+                start.add_assign(&start_overshoot);
 
-                diagnostics_by_group_id
-                    .entry(group_id)
-                    .or_insert(Vec::new())
-                    .push((
-                        range,
-                        Diagnostic {
-                            severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
-                            message: diagnostic.message.clone(),
-                            group_id,
-                            is_primary: false,
-                        },
-                    ));
+                let end_overshoot = end - last_edit_old_end;
+                end = last_edit_new_end;
+                end.add_assign(&end_overshoot);
             }
 
-            content.anchor_range_multimap(
-                Bias::Left,
-                Bias::Right,
-                diagnostics_by_group_id
-                    .into_values()
-                    .flat_map(|mut diagnostics| {
-                        let primary_diagnostic =
-                            diagnostics.iter_mut().min_by_key(|d| d.1.severity).unwrap();
-                        primary_diagnostic.1.is_primary = true;
-                        diagnostics
-                    }),
-            )
-        };
-
-        if let Some(version) = version {
-            let language_server = self.language_server.as_mut().unwrap();
-            let versions_to_delete = language_server
-                .pending_snapshots
-                .range(..version)
-                .map(|(v, _)| *v)
-                .collect::<Vec<_>>();
-            for version in versions_to_delete {
-                language_server.pending_snapshots.remove(&version);
+            let range = start.clip(Bias::Left, content)..end.clip(Bias::Right, content);
+            let mut range = range.start.to_point(content)..range.end.to_point(content);
+            // Expand empty ranges by one character
+            if range.start == range.end {
+                range.end.column += 1;
+                range.end = content.clip_point(range.end, Bias::Right);
+                if range.start == range.end && range.end.column > 0 {
+                    range.start.column -= 1;
+                    range.start = content.clip_point(range.start, Bias::Left);
+                }
             }
-        }
-
-        self.diagnostics_update_count += 1;
-        cx.notify();
-        cx.emit(Event::DiagnosticsUpdated);
-        Ok(Operation::UpdateDiagnostics(self.diagnostics.clone()))
-    }
-
-    pub fn diagnostics_in_range<'a, T, O>(
-        &'a self,
-        search_range: Range<T>,
-    ) -> impl Iterator<Item = (Range<O>, &Diagnostic)> + 'a
-    where
-        T: 'a + ToOffset,
-        O: 'a + FromAnchor,
-    {
-        self.diagnostics
-            .intersecting_ranges(search_range, self, true)
-            .map(move |(_, range, diagnostic)| (range, diagnostic))
-    }
-
-    pub fn diagnostic_group<'a, O>(
-        &'a self,
-        group_id: usize,
-    ) -> impl Iterator<Item = (Range<O>, &Diagnostic)> + 'a
-    where
-        O: 'a + FromAnchor,
-    {
-        self.diagnostics
-            .filter(self, move |diagnostic| diagnostic.group_id == group_id)
-            .map(move |(_, range, diagnostic)| (range, diagnostic))
-    }
 
-    pub fn diagnostics_update_count(&self) -> usize {
-        self.diagnostics_update_count
+            sanitized_diagnostics.push(DiagnosticEntry {
+                range,
+                diagnostic: entry.diagnostic,
+            });
+        }
+        drop(edits_since_save);
+
+        let set = DiagnosticSet::new(provider_name, sanitized_diagnostics, content);
+        self.apply_diagnostic_update(set.clone(), cx);
+        Ok(Operation::UpdateDiagnostics {
+            provider_name: set.provider_name().to_string(),
+            diagnostics: set.iter().cloned().collect(),
+            lamport_timestamp: self.text.lamport_clock.tick(),
+        })
     }
 
     fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
@@ -879,13 +878,13 @@ impl Buffer {
             for request in autoindent_requests {
                 let old_to_new_rows = request
                     .edited
-                    .iter::<Point>(&request.before_edit)
-                    .map(|point| point.row)
+                    .iter()
+                    .map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
                     .zip(
                         request
                             .edited
-                            .iter::<Point>(&snapshot)
-                            .map(|point| point.row),
+                            .iter()
+                            .map(|anchor| anchor.summary::<Point>(&snapshot).row),
                     )
                     .collect::<BTreeMap<u32, u32>>();
 
@@ -947,7 +946,8 @@ impl Buffer {
                 if let Some(inserted) = request.inserted.as_ref() {
                     let inserted_row_ranges = contiguous_ranges(
                         inserted
-                            .ranges::<Point>(&snapshot)
+                            .iter()
+                            .map(|range| range.to_point(&snapshot))
                             .flat_map(|range| range.start.row..range.end.row + 1),
                         max_rows_between_yields,
                     );
@@ -980,51 +980,11 @@ impl Buffer {
         indent_columns: BTreeMap<u32, u32>,
         cx: &mut ModelContext<Self>,
     ) {
-        let selection_set_ids = self
-            .autoindent_requests
-            .drain(..)
-            .flat_map(|req| req.selection_set_ids.clone())
-            .collect::<HashSet<_>>();
-
-        self.start_transaction(selection_set_ids.iter().copied())
-            .unwrap();
+        self.start_transaction();
         for (row, indent_column) in &indent_columns {
             self.set_indent_column_for_line(*row, *indent_column, cx);
         }
-
-        for selection_set_id in &selection_set_ids {
-            if let Ok(set) = self.selection_set(*selection_set_id) {
-                let new_selections = set
-                    .selections::<Point>(&*self)
-                    .map(|selection| {
-                        if selection.start.column == 0 {
-                            let delta = Point::new(
-                                0,
-                                indent_columns
-                                    .get(&selection.start.row)
-                                    .copied()
-                                    .unwrap_or(0),
-                            );
-                            if delta.column > 0 {
-                                return Selection {
-                                    id: selection.id,
-                                    goal: selection.goal,
-                                    reversed: selection.reversed,
-                                    start: selection.start + delta,
-                                    end: selection.end + delta,
-                                };
-                            }
-                        }
-                        selection
-                    })
-                    .collect::<Vec<_>>();
-                self.update_selection_set(*selection_set_id, &new_selections, cx)
-                    .unwrap();
-            }
-        }
-
-        self.end_transaction(selection_set_ids.iter().copied(), cx)
-            .unwrap();
+        self.end_transaction(cx);
     }
 
     fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext<Self>) {
@@ -1045,47 +1005,6 @@ impl Buffer {
         }
     }
 
-    pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
-        if let Some(tree) = self.syntax_tree() {
-            let root = tree.root_node();
-            let range = range.start.to_offset(self)..range.end.to_offset(self);
-            let mut node = root.descendant_for_byte_range(range.start, range.end);
-            while node.map_or(false, |n| n.byte_range() == range) {
-                node = node.unwrap().parent();
-            }
-            node.map(|n| n.byte_range())
-        } else {
-            None
-        }
-    }
-
-    pub fn enclosing_bracket_ranges<T: ToOffset>(
-        &self,
-        range: Range<T>,
-    ) -> Option<(Range<usize>, Range<usize>)> {
-        let (grammar, tree) = self.grammar().zip(self.syntax_tree())?;
-        let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
-        let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
-
-        // Find bracket pairs that *inclusively* contain the given range.
-        let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
-        let mut cursor = QueryCursorHandle::new();
-        let matches = cursor.set_byte_range(range).matches(
-            &grammar.brackets_query,
-            tree.root_node(),
-            TextProvider(self.as_rope()),
-        );
-
-        // Get the ranges of the innermost pair of brackets.
-        matches
-            .filter_map(|mat| {
-                let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
-                let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
-                Some((open.byte_range(), close.byte_range()))
-            })
-            .min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
-    }
-
     pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
         // TODO: it would be nice to not allocate here.
         let old_text = self.text();
@@ -1105,7 +1024,7 @@ impl Buffer {
 
     pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> bool {
         if self.version == diff.base_version {
-            self.start_transaction(None).unwrap();
+            self.start_transaction();
             let mut offset = 0;
             for (tag, len) in diff.changes {
                 let range = offset..(offset + len);
@@ -1118,7 +1037,7 @@ impl Buffer {
                     }
                 }
             }
-            self.end_transaction(None, cx).unwrap();
+            self.end_transaction(cx);
             true
         } else {
             false
@@ -1142,40 +1061,59 @@ impl Buffer {
         self.text.subscribe()
     }
 
-    pub fn start_transaction(
-        &mut self,
-        selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
-    ) -> Result<()> {
-        self.start_transaction_at(selection_set_ids, Instant::now())
+    pub fn start_transaction(&mut self) -> Option<TransactionId> {
+        self.start_transaction_at(Instant::now())
     }
 
-    pub(crate) fn start_transaction_at(
-        &mut self,
-        selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
-        now: Instant,
-    ) -> Result<()> {
-        self.text.start_transaction_at(selection_set_ids, now)
+    pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
+        self.text.start_transaction_at(now)
     }
 
-    pub fn end_transaction(
-        &mut self,
-        selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
-        cx: &mut ModelContext<Self>,
-    ) -> Result<()> {
-        self.end_transaction_at(selection_set_ids, Instant::now(), cx)
+    pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
+        self.end_transaction_at(Instant::now(), cx)
     }
 
-    pub(crate) fn end_transaction_at(
+    pub fn end_transaction_at(
         &mut self,
-        selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
         now: Instant,
         cx: &mut ModelContext<Self>,
-    ) -> Result<()> {
-        if let Some(start_version) = self.text.end_transaction_at(selection_set_ids, now) {
+    ) -> Option<TransactionId> {
+        if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
             let was_dirty = start_version != self.saved_version;
             self.did_edit(&start_version, was_dirty, cx);
+            Some(transaction_id)
+        } else {
+            None
         }
-        Ok(())
+    }
+
+    pub fn set_active_selections(
+        &mut self,
+        selections: Arc<[Selection<Anchor>]>,
+        cx: &mut ModelContext<Self>,
+    ) {
+        let lamport_timestamp = self.text.lamport_clock.tick();
+        self.remote_selections
+            .insert(self.text.replica_id(), selections.clone());
+        self.send_operation(
+            Operation::UpdateSelections {
+                replica_id: self.text.replica_id(),
+                selections,
+                lamport_timestamp,
+            },
+            cx,
+        );
+    }
+
+    pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
+        let lamport_timestamp = self.text.lamport_clock.tick();
+        self.send_operation(
+            Operation::RemoveSelections {
+                replica_id: self.text.replica_id(),
+                lamport_timestamp,
+            },
+            cx,
+        );
     }
 
     fn update_language_server(&mut self) {
@@ -1260,21 +1198,21 @@ impl Buffer {
             return;
         }
 
-        self.start_transaction(None).unwrap();
+        self.start_transaction();
         self.pending_autoindent.take();
         let autoindent_request = if autoindent && self.language.is_some() {
             let before_edit = self.snapshot();
-            let edited = self.anchor_set(
-                Bias::Left,
-                ranges.iter().filter_map(|range| {
+            let edited = ranges
+                .iter()
+                .filter_map(|range| {
                     let start = range.start.to_point(self);
                     if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
                         None
                     } else {
-                        Some(range.start)
+                        Some(self.anchor_before(range.start))
                     }
-                }),
-            );
+                })
+                .collect();
             Some((before_edit, edited))
         } else {
             None
@@ -1289,34 +1227,29 @@ impl Buffer {
             let mut inserted = None;
             if let Some(first_newline_ix) = first_newline_ix {
                 let mut delta = 0isize;
-                inserted = Some(self.anchor_range_set(
-                    Bias::Left,
-                    Bias::Right,
-                    ranges.iter().map(|range| {
-                        let start = (delta + range.start as isize) as usize + first_newline_ix + 1;
-                        let end = (delta + range.start as isize) as usize + new_text_len;
-                        delta +=
-                            (range.end as isize - range.start as isize) + new_text_len as isize;
-                        start..end
-                    }),
-                ));
+                inserted = Some(
+                    ranges
+                        .iter()
+                        .map(|range| {
+                            let start =
+                                (delta + range.start as isize) as usize + first_newline_ix + 1;
+                            let end = (delta + range.start as isize) as usize + new_text_len;
+                            delta +=
+                                (range.end as isize - range.start as isize) + new_text_len as isize;
+                            self.anchor_before(start)..self.anchor_after(end)
+                        })
+                        .collect(),
+                );
             }
 
-            let selection_set_ids = self
-                .text
-                .peek_undo_stack()
-                .unwrap()
-                .starting_selection_set_ids()
-                .collect();
             self.autoindent_requests.push(Arc::new(AutoindentRequest {
-                selection_set_ids,
                 before_edit,
                 edited,
                 inserted,
             }));
         }
 
-        self.end_transaction(None, cx).unwrap();
+        self.end_transaction(cx);
         self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx);
     }
 
@@ -1344,55 +1277,6 @@ impl Buffer {
         self.language.as_ref().and_then(|l| l.grammar.as_ref())
     }
 
-    pub fn add_selection_set<T: ToOffset>(
-        &mut self,
-        selections: &[Selection<T>],
-        cx: &mut ModelContext<Self>,
-    ) -> SelectionSetId {
-        let operation = self.text.add_selection_set(selections);
-        if let text::Operation::UpdateSelections { set_id, .. } = &operation {
-            let set_id = *set_id;
-            cx.notify();
-            self.send_operation(Operation::Buffer(operation), cx);
-            set_id
-        } else {
-            unreachable!()
-        }
-    }
-
-    pub fn update_selection_set<T: ToOffset>(
-        &mut self,
-        set_id: SelectionSetId,
-        selections: &[Selection<T>],
-        cx: &mut ModelContext<Self>,
-    ) -> Result<()> {
-        let operation = self.text.update_selection_set(set_id, selections)?;
-        cx.notify();
-        self.send_operation(Operation::Buffer(operation), cx);
-        Ok(())
-    }
-
-    pub fn set_active_selection_set(
-        &mut self,
-        set_id: Option<SelectionSetId>,
-        cx: &mut ModelContext<Self>,
-    ) -> Result<()> {
-        let operation = self.text.set_active_selection_set(set_id)?;
-        self.send_operation(Operation::Buffer(operation), cx);
-        Ok(())
-    }
-
-    pub fn remove_selection_set(
-        &mut self,
-        set_id: SelectionSetId,
-        cx: &mut ModelContext<Self>,
-    ) -> Result<()> {
-        let operation = self.text.remove_selection_set(set_id)?;
-        cx.notify();
-        self.send_operation(Operation::Buffer(operation), cx);
-        Ok(())
-    }
-
     pub fn apply_ops<I: IntoIterator<Item = Operation>>(
         &mut self,
         ops: I,
@@ -1401,17 +1285,23 @@ impl Buffer {
         self.pending_autoindent.take();
         let was_dirty = self.is_dirty();
         let old_version = self.version.clone();
+        let mut deferred_ops = Vec::new();
         let buffer_ops = ops
             .into_iter()
             .filter_map(|op| match op {
                 Operation::Buffer(op) => Some(op),
-                Operation::UpdateDiagnostics(diagnostics) => {
-                    self.apply_diagnostic_update(diagnostics, cx);
+                _ => {
+                    if self.can_apply_op(&op) {
+                        self.apply_op(op, cx);
+                    } else {
+                        deferred_ops.push(op);
+                    }
                     None
                 }
             })
             .collect::<Vec<_>>();
         self.text.apply_ops(buffer_ops)?;
+        self.flush_deferred_ops(cx);
         self.did_edit(&old_version, was_dirty, cx);
         // Notify independently of whether the buffer was edited as the operations could include a
         // selection update.
@@ -1419,14 +1309,87 @@ impl Buffer {
         Ok(())
     }
 
-    fn apply_diagnostic_update(
-        &mut self,
-        diagnostics: AnchorRangeMultimap<Diagnostic>,
-        cx: &mut ModelContext<Self>,
-    ) {
-        self.diagnostics = diagnostics;
+    fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
+        let mut deferred_ops = Vec::new();
+        for op in self.deferred_ops.drain().iter().cloned() {
+            if self.can_apply_op(&op) {
+                self.apply_op(op, cx);
+            } else {
+                deferred_ops.push(op);
+            }
+        }
+        self.deferred_ops.insert(deferred_ops);
+    }
+
+    fn can_apply_op(&self, operation: &Operation) -> bool {
+        match operation {
+            Operation::Buffer(_) => {
+                unreachable!("buffer operations should never be applied at this layer")
+            }
+            Operation::UpdateDiagnostics {
+                diagnostics: diagnostic_set,
+                ..
+            } => diagnostic_set.iter().all(|diagnostic| {
+                self.text.can_resolve(&diagnostic.range.start)
+                    && self.text.can_resolve(&diagnostic.range.end)
+            }),
+            Operation::UpdateSelections { selections, .. } => selections
+                .iter()
+                .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
+            Operation::RemoveSelections { .. } => true,
+        }
+    }
+
+    fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
+        match operation {
+            Operation::Buffer(_) => {
+                unreachable!("buffer operations should never be applied at this layer")
+            }
+            Operation::UpdateDiagnostics {
+                provider_name,
+                diagnostics: diagnostic_set,
+                ..
+            } => {
+                let snapshot = self.snapshot();
+                self.apply_diagnostic_update(
+                    DiagnosticSet::from_sorted_entries(
+                        provider_name,
+                        diagnostic_set.iter().cloned(),
+                        &snapshot,
+                    ),
+                    cx,
+                );
+            }
+            Operation::UpdateSelections {
+                replica_id,
+                selections,
+                lamport_timestamp,
+            } => {
+                self.remote_selections.insert(replica_id, selections);
+                self.text.lamport_clock.observe(lamport_timestamp);
+            }
+            Operation::RemoveSelections {
+                replica_id,
+                lamport_timestamp,
+            } => {
+                self.remote_selections.remove(&replica_id);
+                self.text.lamport_clock.observe(lamport_timestamp);
+            }
+        }
+    }
+
+    fn apply_diagnostic_update(&mut self, set: DiagnosticSet, cx: &mut ModelContext<Self>) {
+        match self
+            .diagnostic_sets
+            .binary_search_by_key(&set.provider_name(), |set| set.provider_name())
+        {
+            Ok(ix) => self.diagnostic_sets[ix] = set.clone(),
+            Err(ix) => self.diagnostic_sets.insert(ix, set.clone()),
+        }
+
         self.diagnostics_update_count += 1;
         cx.notify();
+        cx.emit(Event::DiagnosticsUpdated);
     }
 
     #[cfg(not(test))]
@@ -1442,30 +1405,68 @@ impl Buffer {
     }
 
     pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
-        self.text.remove_peer(replica_id);
+        self.remote_selections.remove(&replica_id);
         cx.notify();
     }
 
-    pub fn undo(&mut self, cx: &mut ModelContext<Self>) {
+    pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
         let was_dirty = self.is_dirty();
         let old_version = self.version.clone();
 
-        for operation in self.text.undo() {
+        if let Some((transaction_id, operation)) = self.text.undo() {
             self.send_operation(Operation::Buffer(operation), cx);
+            self.did_edit(&old_version, was_dirty, cx);
+            Some(transaction_id)
+        } else {
+            None
         }
+    }
 
-        self.did_edit(&old_version, was_dirty, cx);
+    pub fn undo_transaction(
+        &mut self,
+        transaction_id: TransactionId,
+        cx: &mut ModelContext<Self>,
+    ) -> bool {
+        let was_dirty = self.is_dirty();
+        let old_version = self.version.clone();
+
+        if let Some(operation) = self.text.undo_transaction(transaction_id) {
+            self.send_operation(Operation::Buffer(operation), cx);
+            self.did_edit(&old_version, was_dirty, cx);
+            true
+        } else {
+            false
+        }
     }
 
-    pub fn redo(&mut self, cx: &mut ModelContext<Self>) {
+    pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
         let was_dirty = self.is_dirty();
         let old_version = self.version.clone();
 
-        for operation in self.text.redo() {
+        if let Some((transaction_id, operation)) = self.text.redo() {
             self.send_operation(Operation::Buffer(operation), cx);
+            self.did_edit(&old_version, was_dirty, cx);
+            Some(transaction_id)
+        } else {
+            None
         }
+    }
 
-        self.did_edit(&old_version, was_dirty, cx);
+    pub fn redo_transaction(
+        &mut self,
+        transaction_id: TransactionId,
+        cx: &mut ModelContext<Self>,
+    ) -> bool {
+        let was_dirty = self.is_dirty();
+        let old_version = self.version.clone();
+
+        if let Some(operation) = self.text.redo_transaction(transaction_id) {
+            self.send_operation(Operation::Buffer(operation), cx);
+            self.did_edit(&old_version, was_dirty, cx);
+            true
+        } else {
+            false
+        }
     }
 }
 

crates/language/src/diagnostic_set.rs 🔗

@@ -0,0 +1,223 @@
+use crate::Diagnostic;
+use collections::HashMap;
+use std::{
+    cmp::{Ordering, Reverse},
+    iter,
+    ops::Range,
+    sync::Arc,
+};
+use sum_tree::{self, Bias, SumTree};
+use text::{Anchor, FromAnchor, Point, ToOffset};
+
+#[derive(Clone, Debug)]
+pub struct DiagnosticSet {
+    provider_name: Arc<str>,
+    diagnostics: SumTree<DiagnosticEntry<Anchor>>,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct DiagnosticEntry<T> {
+    pub range: Range<T>,
+    pub diagnostic: Diagnostic,
+}
+
+pub struct DiagnosticGroup<T> {
+    pub entries: Vec<DiagnosticEntry<T>>,
+    pub primary_ix: usize,
+}
+
+#[derive(Clone, Debug)]
+pub struct Summary {
+    start: Anchor,
+    end: Anchor,
+    min_start: Anchor,
+    max_end: Anchor,
+    count: usize,
+}
+
+impl DiagnosticSet {
+    pub fn provider_name(&self) -> &str {
+        &self.provider_name
+    }
+
+    pub fn from_sorted_entries<I>(
+        provider_name: impl Into<Arc<str>>,
+        iter: I,
+        buffer: &text::BufferSnapshot,
+    ) -> Self
+    where
+        I: IntoIterator<Item = DiagnosticEntry<Anchor>>,
+    {
+        Self {
+            provider_name: provider_name.into(),
+            diagnostics: SumTree::from_iter(iter, buffer),
+        }
+    }
+
+    pub fn new<I>(provider_name: Arc<str>, iter: I, buffer: &text::BufferSnapshot) -> Self
+    where
+        I: IntoIterator<Item = DiagnosticEntry<Point>>,
+    {
+        let mut entries = iter.into_iter().collect::<Vec<_>>();
+        entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end)));
+        Self {
+            provider_name,
+            diagnostics: SumTree::from_iter(
+                entries.into_iter().map(|entry| DiagnosticEntry {
+                    range: buffer.anchor_before(entry.range.start)
+                        ..buffer.anchor_after(entry.range.end),
+                    diagnostic: entry.diagnostic,
+                }),
+                buffer,
+            ),
+        }
+    }
+
+    pub fn iter(&self) -> impl Iterator<Item = &DiagnosticEntry<Anchor>> {
+        self.diagnostics.iter()
+    }
+
+    pub fn range<'a, T, O>(
+        &'a self,
+        range: Range<T>,
+        buffer: &'a text::BufferSnapshot,
+        inclusive: bool,
+    ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
+    where
+        T: 'a + ToOffset,
+        O: FromAnchor,
+    {
+        let end_bias = if inclusive { Bias::Right } else { Bias::Left };
+        let range = buffer.anchor_before(range.start)..buffer.anchor_at(range.end, end_bias);
+        let mut cursor = self.diagnostics.filter::<_, ()>(
+            {
+                move |summary: &Summary| {
+                    let start_cmp = range.start.cmp(&summary.max_end, buffer).unwrap();
+                    let end_cmp = range.end.cmp(&summary.min_start, buffer).unwrap();
+                    if inclusive {
+                        start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal
+                    } else {
+                        start_cmp == Ordering::Less && end_cmp == Ordering::Greater
+                    }
+                }
+            },
+            buffer,
+        );
+
+        iter::from_fn({
+            move || {
+                if let Some(diagnostic) = cursor.item() {
+                    cursor.next(buffer);
+                    Some(diagnostic.resolve(buffer))
+                } else {
+                    None
+                }
+            }
+        })
+    }
+
+    pub fn groups(&self, output: &mut Vec<DiagnosticGroup<Anchor>>, buffer: &text::BufferSnapshot) {
+        let mut groups = HashMap::default();
+        for entry in self.diagnostics.iter() {
+            groups
+                .entry(entry.diagnostic.group_id)
+                .or_insert(Vec::new())
+                .push(entry.clone());
+        }
+
+        let start_ix = output.len();
+        output.extend(groups.into_values().filter_map(|mut entries| {
+            entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start, buffer).unwrap());
+            entries
+                .iter()
+                .position(|entry| entry.diagnostic.is_primary)
+                .map(|primary_ix| DiagnosticGroup {
+                    entries,
+                    primary_ix,
+                })
+        }));
+        output[start_ix..].sort_unstable_by(|a, b| {
+            a.entries[a.primary_ix]
+                .range
+                .start
+                .cmp(&b.entries[b.primary_ix].range.start, buffer)
+                .unwrap()
+        });
+    }
+
+    pub fn group<'a, O: FromAnchor>(
+        &'a self,
+        group_id: usize,
+        buffer: &'a text::BufferSnapshot,
+    ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>> {
+        self.iter()
+            .filter(move |entry| entry.diagnostic.group_id == group_id)
+            .map(|entry| entry.resolve(buffer))
+    }
+}
+
+impl Default for DiagnosticSet {
+    fn default() -> Self {
+        Self {
+            provider_name: "".into(),
+            diagnostics: Default::default(),
+        }
+    }
+}
+
+impl sum_tree::Item for DiagnosticEntry<Anchor> {
+    type Summary = Summary;
+
+    fn summary(&self) -> Self::Summary {
+        Summary {
+            start: self.range.start.clone(),
+            end: self.range.end.clone(),
+            min_start: self.range.start.clone(),
+            max_end: self.range.end.clone(),
+            count: 1,
+        }
+    }
+}
+
+impl DiagnosticEntry<Anchor> {
+    pub fn resolve<O: FromAnchor>(&self, buffer: &text::BufferSnapshot) -> DiagnosticEntry<O> {
+        DiagnosticEntry {
+            range: O::from_anchor(&self.range.start, buffer)
+                ..O::from_anchor(&self.range.end, buffer),
+            diagnostic: self.diagnostic.clone(),
+        }
+    }
+}
+
+impl Default for Summary {
+    fn default() -> Self {
+        Self {
+            start: Anchor::min(),
+            end: Anchor::max(),
+            min_start: Anchor::max(),
+            max_end: Anchor::min(),
+            count: 0,
+        }
+    }
+}
+
+impl sum_tree::Summary for Summary {
+    type Context = text::BufferSnapshot;
+
+    fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
+        if other
+            .min_start
+            .cmp(&self.min_start, buffer)
+            .unwrap()
+            .is_lt()
+        {
+            self.min_start = other.min_start.clone();
+        }
+        if other.max_end.cmp(&self.max_end, buffer).unwrap().is_gt() {
+            self.max_end = other.max_end.clone();
+        }
+        self.start = other.start.clone();
+        self.end = other.end.clone();
+        self.count += other.count;
+    }
+}

crates/language/src/language.rs 🔗

@@ -1,19 +1,22 @@
 mod buffer;
+mod diagnostic_set;
 mod highlight_map;
 pub mod proto;
 #[cfg(test)]
 mod tests;
 
 use anyhow::{anyhow, Result};
+use async_trait::async_trait;
 pub use buffer::Operation;
 pub use buffer::*;
-use gpui::{executor::Background, AppContext};
+use collections::{HashMap, HashSet};
+pub use diagnostic_set::DiagnosticEntry;
+use gpui::AppContext;
 use highlight_map::HighlightMap;
 use lazy_static::lazy_static;
-use lsp::LanguageServer;
 use parking_lot::Mutex;
 use serde::Deserialize;
-use std::{collections::HashSet, path::Path, str, sync::Arc};
+use std::{path::Path, str, sync::Arc};
 use theme::SyntaxTheme;
 use tree_sitter::{self, Query};
 pub use tree_sitter::{Parser, Tree};
@@ -46,7 +49,7 @@ pub struct LanguageServerConfig {
     pub disk_based_diagnostic_sources: HashSet<String>,
     #[cfg(any(test, feature = "test-support"))]
     #[serde(skip)]
-    pub fake_server: Option<(Arc<LanguageServer>, Arc<std::sync::atomic::AtomicBool>)>,
+    pub fake_server: Option<(Arc<lsp::LanguageServer>, Arc<std::sync::atomic::AtomicBool>)>,
 }
 
 #[derive(Clone, Debug, Deserialize)]
@@ -57,9 +60,18 @@ pub struct BracketPair {
     pub newline: bool,
 }
 
+#[async_trait]
+pub trait DiagnosticProvider: 'static + Send + Sync {
+    async fn diagnose(
+        &self,
+        path: Arc<Path>,
+    ) -> Result<HashMap<Arc<Path>, Vec<DiagnosticEntry<usize>>>>;
+}
+
 pub struct Language {
     pub(crate) config: LanguageConfig,
     pub(crate) grammar: Option<Arc<Grammar>>,
+    pub(crate) diagnostic_provider: Option<Arc<dyn DiagnosticProvider>>,
 }
 
 pub struct Grammar {
@@ -124,6 +136,7 @@ impl Language {
                     highlight_map: Default::default(),
                 })
             }),
+            diagnostic_provider: None,
         }
     }
 
@@ -157,6 +170,11 @@ impl Language {
         Ok(self)
     }
 
+    pub fn with_diagnostic_provider(mut self, source: impl DiagnosticProvider) -> Self {
+        self.diagnostic_provider = Some(Arc::new(source));
+        self
+    }
+
     pub fn name(&self) -> &str {
         self.config.name.as_str()
     }
@@ -190,6 +208,10 @@ impl Language {
         }
     }
 
+    pub fn diagnostic_provider(&self) -> Option<&Arc<dyn DiagnosticProvider>> {
+        self.diagnostic_provider.as_ref()
+    }
+
     pub fn disk_based_diagnostic_sources(&self) -> Option<&HashSet<String>> {
         self.config
             .language_server
@@ -217,7 +239,9 @@ impl Grammar {
 
 #[cfg(any(test, feature = "test-support"))]
 impl LanguageServerConfig {
-    pub async fn fake(executor: Arc<Background>) -> (Self, lsp::FakeLanguageServer) {
+    pub async fn fake(
+        executor: Arc<gpui::executor::Background>,
+    ) -> (Self, lsp::FakeLanguageServer) {
         let (server, fake) = lsp::LanguageServer::fake(executor).await;
         fake.started
             .store(false, std::sync::atomic::Ordering::SeqCst);

crates/language/src/proto.rs 🔗

@@ -1,13 +1,12 @@
-use std::sync::Arc;
-
-use crate::{Diagnostic, Operation};
+use crate::{diagnostic_set::DiagnosticEntry, Diagnostic, Operation};
 use anyhow::{anyhow, Result};
 use clock::ReplicaId;
 use lsp::DiagnosticSeverity;
 use rpc::proto;
+use std::sync::Arc;
 use text::*;
 
-pub use proto::Buffer;
+pub use proto::{Buffer, SelectionSet};
 
 pub fn serialize_operation(operation: &Operation) -> proto::Operation {
     proto::Operation {
@@ -41,46 +40,34 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation {
                     .collect(),
                 version: From::from(&undo.version),
             }),
-            Operation::Buffer(text::Operation::UpdateSelections {
-                set_id,
+            Operation::UpdateSelections {
+                replica_id,
                 selections,
                 lamport_timestamp,
-            }) => proto::operation::Variant::UpdateSelections(proto::operation::UpdateSelections {
-                replica_id: set_id.replica_id as u32,
-                local_timestamp: set_id.value,
+            } => proto::operation::Variant::UpdateSelections(proto::operation::UpdateSelections {
+                replica_id: *replica_id as u32,
                 lamport_timestamp: lamport_timestamp.value,
-                version: selections.version().into(),
-                selections: selections
-                    .full_offset_ranges()
-                    .map(|(range, state)| proto::Selection {
-                        id: state.id as u64,
-                        start: range.start.0 as u64,
-                        end: range.end.0 as u64,
-                        reversed: state.reversed,
-                    })
-                    .collect(),
+                selections: serialize_selections(selections),
             }),
-            Operation::Buffer(text::Operation::RemoveSelections {
-                set_id,
+            Operation::RemoveSelections {
+                replica_id,
                 lamport_timestamp,
-            }) => proto::operation::Variant::RemoveSelections(proto::operation::RemoveSelections {
-                replica_id: set_id.replica_id as u32,
-                local_timestamp: set_id.value,
+            } => proto::operation::Variant::RemoveSelections(proto::operation::RemoveSelections {
+                replica_id: *replica_id as u32,
                 lamport_timestamp: lamport_timestamp.value,
             }),
-            Operation::Buffer(text::Operation::SetActiveSelections {
-                set_id,
+            Operation::UpdateDiagnostics {
+                provider_name,
+                diagnostics,
                 lamport_timestamp,
-            }) => proto::operation::Variant::SetActiveSelections(
-                proto::operation::SetActiveSelections {
-                    replica_id: lamport_timestamp.replica_id as u32,
-                    local_timestamp: set_id.map(|set_id| set_id.value),
-                    lamport_timestamp: lamport_timestamp.value,
-                },
-            ),
-            Operation::UpdateDiagnostics(diagnostic_set) => {
-                proto::operation::Variant::UpdateDiagnostics(serialize_diagnostics(diagnostic_set))
-            }
+            } => proto::operation::Variant::UpdateDiagnosticSet(proto::UpdateDiagnosticSet {
+                replica_id: lamport_timestamp.replica_id as u32,
+                lamport_timestamp: lamport_timestamp.value,
+                diagnostic_set: Some(serialize_diagnostic_set(
+                    provider_name.clone(),
+                    diagnostics.iter(),
+                )),
+            }),
         }),
     }
 }
@@ -104,48 +91,59 @@ pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation::
     }
 }
 
-pub fn serialize_selection_set(set: &SelectionSet) -> proto::SelectionSet {
-    let version = set.selections.version();
-    let entries = set.selections.full_offset_ranges();
-    proto::SelectionSet {
-        replica_id: set.id.replica_id as u32,
-        lamport_timestamp: set.id.value as u32,
-        is_active: set.active,
-        version: version.into(),
-        selections: entries
-            .map(|(range, state)| proto::Selection {
-                id: state.id as u64,
-                start: range.start.0 as u64,
-                end: range.end.0 as u64,
-                reversed: state.reversed,
-            })
-            .collect(),
-    }
+pub fn serialize_selections(selections: &Arc<[Selection<Anchor>]>) -> Vec<proto::Selection> {
+    selections
+        .iter()
+        .map(|selection| proto::Selection {
+            id: selection.id as u64,
+            start: Some(serialize_anchor(&selection.start)),
+            end: Some(serialize_anchor(&selection.end)),
+            reversed: selection.reversed,
+        })
+        .collect()
 }
 
-pub fn serialize_diagnostics(map: &AnchorRangeMultimap<Diagnostic>) -> proto::DiagnosticSet {
+pub fn serialize_diagnostic_set<'a>(
+    provider_name: String,
+    diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<Anchor>>,
+) -> proto::DiagnosticSet {
     proto::DiagnosticSet {
-        version: map.version().into(),
-        diagnostics: map
-            .full_offset_ranges()
-            .map(|(range, diagnostic)| proto::Diagnostic {
-                start: range.start.0 as u64,
-                end: range.end.0 as u64,
-                message: diagnostic.message.clone(),
-                severity: match diagnostic.severity {
+        provider_name,
+        diagnostics: diagnostics
+            .into_iter()
+            .map(|entry| proto::Diagnostic {
+                start: Some(serialize_anchor(&entry.range.start)),
+                end: Some(serialize_anchor(&entry.range.end)),
+                message: entry.diagnostic.message.clone(),
+                severity: match entry.diagnostic.severity {
                     DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error,
                     DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning,
                     DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information,
                     DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint,
                     _ => proto::diagnostic::Severity::None,
                 } as i32,
-                group_id: diagnostic.group_id as u64,
-                is_primary: diagnostic.is_primary,
+                group_id: entry.diagnostic.group_id as u64,
+                is_primary: entry.diagnostic.is_primary,
+                is_valid: entry.diagnostic.is_valid,
+                code: entry.diagnostic.code.clone(),
+                is_disk_based: entry.diagnostic.is_disk_based,
             })
             .collect(),
     }
 }
 
+fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
+    proto::Anchor {
+        replica_id: anchor.timestamp.replica_id as u32,
+        local_timestamp: anchor.timestamp.value,
+        offset: anchor.offset as u64,
+        bias: match anchor.bias {
+            Bias::Left => proto::Bias::Left as i32,
+            Bias::Right => proto::Bias::Right as i32,
+        },
+    }
+}
+
 pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
     Ok(
         match message
@@ -187,66 +185,50 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
                 },
             }),
             proto::operation::Variant::UpdateSelections(message) => {
-                let version = message.version.into();
-                let entries = message
+                let selections = message
                     .selections
-                    .iter()
-                    .map(|selection| {
-                        let range = FullOffset(selection.start as usize)
-                            ..FullOffset(selection.end as usize);
-                        let state = SelectionState {
+                    .into_iter()
+                    .filter_map(|selection| {
+                        Some(Selection {
                             id: selection.id as usize,
+                            start: deserialize_anchor(selection.start?)?,
+                            end: deserialize_anchor(selection.end?)?,
                             reversed: selection.reversed,
                             goal: SelectionGoal::None,
-                        };
-                        (range, state)
+                        })
                     })
-                    .collect();
-                let selections = AnchorRangeMap::from_full_offset_ranges(
-                    version,
-                    Bias::Left,
-                    Bias::Left,
-                    entries,
-                );
+                    .collect::<Vec<_>>();
 
-                Operation::Buffer(text::Operation::UpdateSelections {
-                    set_id: clock::Lamport {
-                        replica_id: message.replica_id as ReplicaId,
-                        value: message.local_timestamp,
-                    },
+                Operation::UpdateSelections {
+                    replica_id: message.replica_id as ReplicaId,
                     lamport_timestamp: clock::Lamport {
                         replica_id: message.replica_id as ReplicaId,
                         value: message.lamport_timestamp,
                     },
                     selections: Arc::from(selections),
-                })
+                }
             }
-            proto::operation::Variant::RemoveSelections(message) => {
-                Operation::Buffer(text::Operation::RemoveSelections {
-                    set_id: clock::Lamport {
-                        replica_id: message.replica_id as ReplicaId,
-                        value: message.local_timestamp,
-                    },
-                    lamport_timestamp: clock::Lamport {
-                        replica_id: message.replica_id as ReplicaId,
-                        value: message.lamport_timestamp,
-                    },
-                })
-            }
-            proto::operation::Variant::SetActiveSelections(message) => {
-                Operation::Buffer(text::Operation::SetActiveSelections {
-                    set_id: message.local_timestamp.map(|value| clock::Lamport {
-                        replica_id: message.replica_id as ReplicaId,
-                        value,
-                    }),
+            proto::operation::Variant::RemoveSelections(message) => Operation::RemoveSelections {
+                replica_id: message.replica_id as ReplicaId,
+                lamport_timestamp: clock::Lamport {
+                    replica_id: message.replica_id as ReplicaId,
+                    value: message.lamport_timestamp,
+                },
+            },
+            proto::operation::Variant::UpdateDiagnosticSet(message) => {
+                let (provider_name, diagnostics) = deserialize_diagnostic_set(
+                    message
+                        .diagnostic_set
+                        .ok_or_else(|| anyhow!("missing diagnostic set"))?,
+                );
+                Operation::UpdateDiagnostics {
+                    provider_name,
+                    diagnostics,
                     lamport_timestamp: clock::Lamport {
                         replica_id: message.replica_id as ReplicaId,
                         value: message.lamport_timestamp,
                     },
-                })
-            }
-            proto::operation::Variant::UpdateDiagnostics(message) => {
-                Operation::UpdateDiagnostics(deserialize_diagnostics(message))
+                }
             }
         },
     )
@@ -270,55 +252,69 @@ pub fn deserialize_edit_operation(edit: proto::operation::Edit) -> EditOperation
     }
 }
 
-pub fn deserialize_selection_set(set: proto::SelectionSet) -> SelectionSet {
-    SelectionSet {
-        id: clock::Lamport {
-            replica_id: set.replica_id as u16,
-            value: set.lamport_timestamp,
-        },
-        active: set.is_active,
-        selections: Arc::new(AnchorRangeMap::from_full_offset_ranges(
-            set.version.into(),
-            Bias::Left,
-            Bias::Left,
-            set.selections
-                .into_iter()
-                .map(|selection| {
-                    let range =
-                        FullOffset(selection.start as usize)..FullOffset(selection.end as usize);
-                    let state = SelectionState {
-                        id: selection.id as usize,
-                        reversed: selection.reversed,
-                        goal: SelectionGoal::None,
-                    };
-                    (range, state)
+pub fn deserialize_selections(selections: Vec<proto::Selection>) -> Arc<[Selection<Anchor>]> {
+    Arc::from(
+        selections
+            .into_iter()
+            .filter_map(|selection| {
+                Some(Selection {
+                    id: selection.id as usize,
+                    start: deserialize_anchor(selection.start?)?,
+                    end: deserialize_anchor(selection.end?)?,
+                    reversed: selection.reversed,
+                    goal: SelectionGoal::None,
                 })
-                .collect(),
-        )),
-    }
+            })
+            .collect::<Vec<_>>(),
+    )
 }
 
-pub fn deserialize_diagnostics(message: proto::DiagnosticSet) -> AnchorRangeMultimap<Diagnostic> {
-    AnchorRangeMultimap::from_full_offset_ranges(
-        message.version.into(),
-        Bias::Left,
-        Bias::Right,
-        message.diagnostics.into_iter().filter_map(|diagnostic| {
-            Some((
-                FullOffset(diagnostic.start as usize)..FullOffset(diagnostic.end as usize),
-                Diagnostic {
-                    severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)? {
-                        proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR,
-                        proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING,
-                        proto::diagnostic::Severity::Information => DiagnosticSeverity::INFORMATION,
-                        proto::diagnostic::Severity::Hint => DiagnosticSeverity::HINT,
-                        proto::diagnostic::Severity::None => return None,
+pub fn deserialize_diagnostic_set(
+    message: proto::DiagnosticSet,
+) -> (String, Arc<[DiagnosticEntry<Anchor>]>) {
+    (
+        message.provider_name,
+        message
+            .diagnostics
+            .into_iter()
+            .filter_map(|diagnostic| {
+                Some(DiagnosticEntry {
+                    range: deserialize_anchor(diagnostic.start?)?
+                        ..deserialize_anchor(diagnostic.end?)?,
+                    diagnostic: Diagnostic {
+                        severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)?
+                        {
+                            proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR,
+                            proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING,
+                            proto::diagnostic::Severity::Information => {
+                                DiagnosticSeverity::INFORMATION
+                            }
+                            proto::diagnostic::Severity::Hint => DiagnosticSeverity::HINT,
+                            proto::diagnostic::Severity::None => return None,
+                        },
+                        message: diagnostic.message,
+                        group_id: diagnostic.group_id as usize,
+                        code: diagnostic.code,
+                        is_valid: diagnostic.is_valid,
+                        is_primary: diagnostic.is_primary,
+                        is_disk_based: diagnostic.is_disk_based,
                     },
-                    message: diagnostic.message,
-                    group_id: diagnostic.group_id as usize,
-                    is_primary: diagnostic.is_primary,
-                },
-            ))
-        }),
+                })
+            })
+            .collect(),
     )
 }
+
+fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
+    Some(Anchor {
+        timestamp: clock::Local {
+            replica_id: anchor.replica_id as ReplicaId,
+            value: anchor.local_timestamp,
+        },
+        offset: anchor.offset as usize,
+        bias: match proto::Bias::from_i32(anchor.bias)? {
+            proto::Bias::Left => Bias::Left,
+            proto::Bias::Right => Bias::Right,
+        },
+    })
+}

crates/language/src/tests.rs 🔗

@@ -1,17 +1,21 @@
 use super::*;
-use gpui::{ModelHandle, MutableAppContext, Task};
+use gpui::{ModelHandle, MutableAppContext};
 use std::{
-    any::Any,
     cell::RefCell,
-    ffi::OsString,
     iter::FromIterator,
     ops::Range,
-    path::PathBuf,
     rc::Rc,
-    time::{Duration, Instant, SystemTime},
+    time::{Duration, Instant},
 };
 use unindent::Unindent as _;
 
+#[cfg(test)]
+#[ctor::ctor]
+fn init_logger() {
+    // std::env::set_var("RUST_LOG", "info");
+    env_logger::init();
+}
+
 #[test]
 fn test_select_language() {
     let registry = LanguageRegistry {
@@ -85,15 +89,15 @@ fn test_edit_events(cx: &mut gpui::MutableAppContext) {
         buffer.edit(Some(2..4), "XYZ", cx);
 
         // An empty transaction does not emit any events.
-        buffer.start_transaction(None).unwrap();
-        buffer.end_transaction(None, cx).unwrap();
+        buffer.start_transaction();
+        buffer.end_transaction(cx);
 
         // A transaction containing two edits emits one edited event.
         now += Duration::from_secs(1);
-        buffer.start_transaction_at(None, now).unwrap();
+        buffer.start_transaction_at(now);
         buffer.edit(Some(5..5), "u", cx);
         buffer.edit(Some(6..6), "w", cx);
-        buffer.end_transaction_at(None, now, cx).unwrap();
+        buffer.end_transaction_at(now, cx);
 
         // Undoing a transaction emits one edited event.
         buffer.undo(cx);
@@ -160,7 +164,7 @@ async fn test_reparse(mut cx: gpui::TestAppContext) {
     // Perform some edits (add parameter and variable reference)
     // Parsing doesn't begin until the transaction is complete
     buffer.update(&mut cx, |buf, cx| {
-        buf.start_transaction(None).unwrap();
+        buf.start_transaction();
 
         let offset = buf.text().find(")").unwrap();
         buf.edit(vec![offset..offset], "b: C", cx);
@@ -170,7 +174,7 @@ async fn test_reparse(mut cx: gpui::TestAppContext) {
         buf.edit(vec![offset..offset], " d; ", cx);
         assert!(!buf.is_parsing());
 
-        buf.end_transaction(None, cx).unwrap();
+        buf.end_transaction(cx);
         assert_eq!(buf.text(), "fn a(b: C) { d; }");
         assert!(buf.is_parsing());
     });
@@ -326,58 +330,62 @@ fn test_edit_with_autoindent(cx: &mut MutableAppContext) {
     });
 }
 
-#[gpui::test]
-fn test_autoindent_moves_selections(cx: &mut MutableAppContext) {
-    cx.add_model(|cx| {
-        let text = "fn a() {}";
-
-        let mut buffer =
-            Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx);
-
-        let selection_set_id = buffer.add_selection_set::<usize>(&[], cx);
-        buffer.start_transaction(Some(selection_set_id)).unwrap();
-        buffer.edit_with_autoindent([5..5, 9..9], "\n\n", cx);
-        buffer
-            .update_selection_set(
-                selection_set_id,
-                &[
-                    Selection {
-                        id: 0,
-                        start: Point::new(1, 0),
-                        end: Point::new(1, 0),
-                        reversed: false,
-                        goal: SelectionGoal::None,
-                    },
-                    Selection {
-                        id: 1,
-                        start: Point::new(4, 0),
-                        end: Point::new(4, 0),
-                        reversed: false,
-                        goal: SelectionGoal::None,
-                    },
-                ],
-                cx,
-            )
-            .unwrap();
-        assert_eq!(buffer.text(), "fn a(\n\n) {}\n\n");
-
-        // Ending the transaction runs the auto-indent. The selection
-        // at the start of the auto-indented row is pushed to the right.
-        buffer.end_transaction(Some(selection_set_id), cx).unwrap();
-        assert_eq!(buffer.text(), "fn a(\n    \n) {}\n\n");
-        let selection_ranges = buffer
-            .selection_set(selection_set_id)
-            .unwrap()
-            .selections::<Point>(&buffer)
-            .map(|selection| selection.point_range(&buffer))
-            .collect::<Vec<_>>();
-
-        assert_eq!(selection_ranges[0], empty(Point::new(1, 4)));
-        assert_eq!(selection_ranges[1], empty(Point::new(4, 0)));
-
-        buffer
-    });
-}
+// We need another approach to managing selections with auto-indent
+
+// #[gpui::test]
+// fn test_autoindent_moves_selections(cx: &mut MutableAppContext) {
+//     cx.add_model(|cx| {
+//         let text = "fn a() {}";
+
+//         let mut buffer =
+//             Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx);
+
+//         let selection_set_id = buffer.add_selection_set::<usize>(&[], cx);
+//         buffer.start_transaction();
+//         buffer.edit_with_autoindent([5..5, 9..9], "\n\n", cx);
+//         buffer
+//             .update_selection_set(
+//                 selection_set_id,
+//                 &[
+//                     Selection {
+//                         id: 0,
+//                         start: Point::new(1, 0),
+//                         end: Point::new(1, 0),
+//                         reversed: false,
+//                         goal: SelectionGoal::None,
+//                     },
+//                     Selection {
+//                         id: 1,
+//                         start: Point::new(4, 0),
+//                         end: Point::new(4, 0),
+//                         reversed: false,
+//                         goal: SelectionGoal::None,
+//                     },
+//                 ],
+//                 cx,
+//             )
+//             .unwrap();
+//         assert_eq!(buffer.text(), "fn a(\n\n) {}\n\n");
+
+//         // TODO! Come up with a different approach to moving selections now that we don't manage selection sets in the buffer
+
+//         // Ending the transaction runs the auto-indent. The selection
+//         // at the start of the auto-indented row is pushed to the right.
+//         buffer.end_transaction(cx);
+//         assert_eq!(buffer.text(), "fn a(\n    \n) {}\n\n");
+//         let selection_ranges = buffer
+//             .selection_set(selection_set_id)
+//             .unwrap()
+//             .selections::<Point>(&buffer)
+//             .map(|selection| selection.start.to_point(&buffer)..selection.end.to_point(&buffer))
+//             .collect::<Vec<_>>();
+
+//         assert_eq!(selection_ranges[0], empty(Point::new(1, 4)));
+//         assert_eq!(selection_ranges[1], empty(Point::new(4, 0)));
+
+//         buffer
+//     });
+// }
 
 #[gpui::test]
 fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut MutableAppContext) {
@@ -504,25 +512,41 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
         // Receive diagnostics for an earlier version of the buffer.
         buffer
             .update_diagnostics(
+                "lsp".into(),
                 Some(open_notification.text_document.version),
                 vec![
-                    lsp::Diagnostic {
-                        range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
-                        severity: Some(lsp::DiagnosticSeverity::ERROR),
-                        message: "undefined variable 'A'".to_string(),
-                        ..Default::default()
+                    DiagnosticEntry {
+                        range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
+                        diagnostic: Diagnostic {
+                            severity: DiagnosticSeverity::ERROR,
+                            message: "undefined variable 'A'".to_string(),
+                            is_disk_based: true,
+                            group_id: 0,
+                            is_primary: true,
+                            ..Default::default()
+                        },
                     },
-                    lsp::Diagnostic {
-                        range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
-                        severity: Some(lsp::DiagnosticSeverity::ERROR),
-                        message: "undefined variable 'BB'".to_string(),
-                        ..Default::default()
+                    DiagnosticEntry {
+                        range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
+                        diagnostic: Diagnostic {
+                            severity: DiagnosticSeverity::ERROR,
+                            message: "undefined variable 'BB'".to_string(),
+                            is_disk_based: true,
+                            group_id: 1,
+                            is_primary: true,
+                            ..Default::default()
+                        },
                     },
-                    lsp::Diagnostic {
-                        range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
-                        severity: Some(lsp::DiagnosticSeverity::ERROR),
-                        message: "undefined variable 'CCC'".to_string(),
-                        ..Default::default()
+                    DiagnosticEntry {
+                        range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12),
+                        diagnostic: Diagnostic {
+                            severity: DiagnosticSeverity::ERROR,
+                            is_disk_based: true,
+                            message: "undefined variable 'CCC'".to_string(),
+                            group_id: 2,
+                            is_primary: true,
+                            ..Default::default()
+                        },
                     },
                 ],
                 cx,
@@ -532,25 +556,36 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
         // The diagnostics have moved down since they were created.
         assert_eq!(
             buffer
-                .diagnostics_in_range(Point::new(3, 0)..Point::new(5, 0))
+                .snapshot()
+                .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
                 .collect::<Vec<_>>(),
             &[
                 (
-                    Point::new(3, 9)..Point::new(3, 11),
-                    &Diagnostic {
-                        severity: DiagnosticSeverity::ERROR,
-                        message: "undefined variable 'BB'".to_string(),
-                        group_id: 1,
-                        is_primary: true,
-                    },
+                    "lsp",
+                    DiagnosticEntry {
+                        range: Point::new(3, 9)..Point::new(3, 11),
+                        diagnostic: Diagnostic {
+                            severity: DiagnosticSeverity::ERROR,
+                            message: "undefined variable 'BB'".to_string(),
+                            is_disk_based: true,
+                            group_id: 1,
+                            is_primary: true,
+                            ..Default::default()
+                        },
+                    }
                 ),
                 (
-                    Point::new(4, 9)..Point::new(4, 12),
-                    &Diagnostic {
-                        severity: DiagnosticSeverity::ERROR,
-                        message: "undefined variable 'CCC'".to_string(),
-                        group_id: 2,
-                        is_primary: true,
+                    "lsp",
+                    DiagnosticEntry {
+                        range: Point::new(4, 9)..Point::new(4, 12),
+                        diagnostic: Diagnostic {
+                            severity: DiagnosticSeverity::ERROR,
+                            message: "undefined variable 'CCC'".to_string(),
+                            is_disk_based: true,
+                            group_id: 2,
+                            is_primary: true,
+                            ..Default::default()
+                        }
                     }
                 )
             ]
@@ -579,19 +614,29 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
         // Ensure overlapping diagnostics are highlighted correctly.
         buffer
             .update_diagnostics(
+                "lsp".into(),
                 Some(open_notification.text_document.version),
                 vec![
-                    lsp::Diagnostic {
-                        range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
-                        severity: Some(lsp::DiagnosticSeverity::ERROR),
-                        message: "undefined variable 'A'".to_string(),
-                        ..Default::default()
+                    DiagnosticEntry {
+                        range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
+                        diagnostic: Diagnostic {
+                            severity: DiagnosticSeverity::ERROR,
+                            message: "undefined variable 'A'".to_string(),
+                            is_disk_based: true,
+                            group_id: 0,
+                            is_primary: true,
+                            ..Default::default()
+                        },
                     },
-                    lsp::Diagnostic {
-                        range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
-                        severity: Some(lsp::DiagnosticSeverity::WARNING),
-                        message: "unreachable statement".to_string(),
-                        ..Default::default()
+                    DiagnosticEntry {
+                        range: PointUtf16::new(0, 9)..PointUtf16::new(0, 12),
+                        diagnostic: Diagnostic {
+                            severity: DiagnosticSeverity::WARNING,
+                            message: "unreachable statement".to_string(),
+                            group_id: 1,
+                            is_primary: true,
+                            ..Default::default()
+                        },
                     },
                 ],
                 cx,
@@ -599,26 +644,36 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
             .unwrap();
         assert_eq!(
             buffer
-                .diagnostics_in_range(Point::new(2, 0)..Point::new(3, 0))
+                .snapshot()
+                .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
                 .collect::<Vec<_>>(),
             &[
                 (
-                    Point::new(2, 9)..Point::new(2, 12),
-                    &Diagnostic {
-                        severity: DiagnosticSeverity::WARNING,
-                        message: "unreachable statement".to_string(),
-                        group_id: 1,
-                        is_primary: true,
+                    "lsp",
+                    DiagnosticEntry {
+                        range: Point::new(2, 9)..Point::new(2, 12),
+                        diagnostic: Diagnostic {
+                            severity: DiagnosticSeverity::WARNING,
+                            message: "unreachable statement".to_string(),
+                            group_id: 1,
+                            is_primary: true,
+                            ..Default::default()
+                        }
                     }
                 ),
                 (
-                    Point::new(2, 9)..Point::new(2, 10),
-                    &Diagnostic {
-                        severity: DiagnosticSeverity::ERROR,
-                        message: "undefined variable 'A'".to_string(),
-                        group_id: 0,
-                        is_primary: true,
-                    },
+                    "lsp",
+                    DiagnosticEntry {
+                        range: Point::new(2, 9)..Point::new(2, 10),
+                        diagnostic: Diagnostic {
+                            severity: DiagnosticSeverity::ERROR,
+                            message: "undefined variable 'A'".to_string(),
+                            is_disk_based: true,
+                            group_id: 0,
+                            is_primary: true,
+                            ..Default::default()
+                        },
+                    }
                 )
             ]
         );
@@ -656,21 +711,30 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
     buffer.update(&mut cx, |buffer, cx| {
         buffer
             .update_diagnostics(
+                "lsp".into(),
                 Some(change_notification_2.text_document.version),
                 vec![
-                    lsp::Diagnostic {
-                        range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
-                        severity: Some(lsp::DiagnosticSeverity::ERROR),
-                        message: "undefined variable 'BB'".to_string(),
-                        source: Some("disk".to_string()),
-                        ..Default::default()
+                    DiagnosticEntry {
+                        range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
+                        diagnostic: Diagnostic {
+                            severity: DiagnosticSeverity::ERROR,
+                            message: "undefined variable 'BB'".to_string(),
+                            is_disk_based: true,
+                            group_id: 1,
+                            is_primary: true,
+                            ..Default::default()
+                        },
                     },
-                    lsp::Diagnostic {
-                        range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
-                        severity: Some(lsp::DiagnosticSeverity::ERROR),
-                        message: "undefined variable 'A'".to_string(),
-                        source: Some("disk".to_string()),
-                        ..Default::default()
+                    DiagnosticEntry {
+                        range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
+                        diagnostic: Diagnostic {
+                            severity: DiagnosticSeverity::ERROR,
+                            message: "undefined variable 'A'".to_string(),
+                            is_disk_based: true,
+                            group_id: 0,
+                            is_primary: true,
+                            ..Default::default()
+                        },
                     },
                 ],
                 cx,
@@ -678,26 +742,37 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
             .unwrap();
         assert_eq!(
             buffer
-                .diagnostics_in_range(0..buffer.len())
+                .snapshot()
+                .diagnostics_in_range::<_, Point>(0..buffer.len())
                 .collect::<Vec<_>>(),
             &[
                 (
-                    Point::new(2, 21)..Point::new(2, 22),
-                    &Diagnostic {
-                        severity: DiagnosticSeverity::ERROR,
-                        message: "undefined variable 'A'".to_string(),
-                        group_id: 0,
-                        is_primary: true,
+                    "lsp",
+                    DiagnosticEntry {
+                        range: Point::new(2, 21)..Point::new(2, 22),
+                        diagnostic: Diagnostic {
+                            severity: DiagnosticSeverity::ERROR,
+                            message: "undefined variable 'A'".to_string(),
+                            is_disk_based: true,
+                            group_id: 0,
+                            is_primary: true,
+                            ..Default::default()
+                        }
                     }
                 ),
                 (
-                    Point::new(3, 9)..Point::new(3, 11),
-                    &Diagnostic {
-                        severity: DiagnosticSeverity::ERROR,
-                        message: "undefined variable 'BB'".to_string(),
-                        group_id: 1,
-                        is_primary: true,
-                    },
+                    "lsp",
+                    DiagnosticEntry {
+                        range: Point::new(3, 9)..Point::new(3, 11),
+                        diagnostic: Diagnostic {
+                            severity: DiagnosticSeverity::ERROR,
+                            message: "undefined variable 'BB'".to_string(),
+                            is_disk_based: true,
+                            group_id: 1,
+                            is_primary: true,
+                            ..Default::default()
+                        },
+                    }
                 )
             ]
         );
@@ -717,25 +792,24 @@ async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) {
         buffer.set_language(Some(Arc::new(rust_lang())), None, cx);
         buffer
             .update_diagnostics(
+                "lsp".into(),
                 None,
                 vec![
-                    lsp::Diagnostic {
-                        range: lsp::Range::new(
-                            lsp::Position::new(0, 10),
-                            lsp::Position::new(0, 10),
-                        ),
-                        severity: Some(lsp::DiagnosticSeverity::ERROR),
-                        message: "syntax error 1".to_string(),
-                        ..Default::default()
+                    DiagnosticEntry {
+                        range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
+                        diagnostic: Diagnostic {
+                            severity: DiagnosticSeverity::ERROR,
+                            message: "syntax error 1".to_string(),
+                            ..Default::default()
+                        },
                     },
-                    lsp::Diagnostic {
-                        range: lsp::Range::new(
-                            lsp::Position::new(1, 10),
-                            lsp::Position::new(1, 10),
-                        ),
-                        severity: Some(lsp::DiagnosticSeverity::ERROR),
-                        message: "syntax error 2".to_string(),
-                        ..Default::default()
+                    DiagnosticEntry {
+                        range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
+                        diagnostic: Diagnostic {
+                            severity: DiagnosticSeverity::ERROR,
+                            message: "syntax error 2".to_string(),
+                            ..Default::default()
+                        },
                     },
                 ],
                 cx,
@@ -753,9 +827,9 @@ async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) {
                 .collect::<Vec<_>>(),
             &[
                 ("let one = ", None),
-                (";", Some(lsp::DiagnosticSeverity::ERROR)),
+                (";", Some(DiagnosticSeverity::ERROR)),
                 ("\nlet two =", None),
-                (" ", Some(lsp::DiagnosticSeverity::ERROR)),
+                (" ", Some(DiagnosticSeverity::ERROR)),
                 ("\nlet three = 3;\n", None)
             ]
         );
@@ -763,217 +837,6 @@ async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) {
     });
 }
 
-#[gpui::test]
-async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
-    cx.add_model(|cx| {
-        let text = "
-            fn foo(mut v: Vec<usize>) {
-                for x in &v {
-                    v.push(1);
-                }
-            }
-        "
-        .unindent();
-
-        let file = FakeFile::new("/example.rs");
-        let mut buffer = Buffer::from_file(0, text, Box::new(file.clone()), cx);
-        buffer.set_language(Some(Arc::new(rust_lang())), None, cx);
-        let diagnostics = vec![
-            lsp::Diagnostic {
-                range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
-                severity: Some(DiagnosticSeverity::WARNING),
-                message: "error 1".to_string(),
-                related_information: Some(vec![lsp::DiagnosticRelatedInformation {
-                    location: lsp::Location {
-                        uri: lsp::Url::from_file_path(&file.abs_path).unwrap(),
-                        range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
-                    },
-                    message: "error 1 hint 1".to_string(),
-                }]),
-                ..Default::default()
-            },
-            lsp::Diagnostic {
-                range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
-                severity: Some(DiagnosticSeverity::HINT),
-                message: "error 1 hint 1".to_string(),
-                related_information: Some(vec![lsp::DiagnosticRelatedInformation {
-                    location: lsp::Location {
-                        uri: lsp::Url::from_file_path(&file.abs_path).unwrap(),
-                        range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
-                    },
-                    message: "original diagnostic".to_string(),
-                }]),
-                ..Default::default()
-            },
-            lsp::Diagnostic {
-                range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
-                severity: Some(DiagnosticSeverity::ERROR),
-                message: "error 2".to_string(),
-                related_information: Some(vec![
-                    lsp::DiagnosticRelatedInformation {
-                        location: lsp::Location {
-                            uri: lsp::Url::from_file_path(&file.abs_path).unwrap(),
-                            range: lsp::Range::new(
-                                lsp::Position::new(1, 13),
-                                lsp::Position::new(1, 15),
-                            ),
-                        },
-                        message: "error 2 hint 1".to_string(),
-                    },
-                    lsp::DiagnosticRelatedInformation {
-                        location: lsp::Location {
-                            uri: lsp::Url::from_file_path(&file.abs_path).unwrap(),
-                            range: lsp::Range::new(
-                                lsp::Position::new(1, 13),
-                                lsp::Position::new(1, 15),
-                            ),
-                        },
-                        message: "error 2 hint 2".to_string(),
-                    },
-                ]),
-                ..Default::default()
-            },
-            lsp::Diagnostic {
-                range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
-                severity: Some(DiagnosticSeverity::HINT),
-                message: "error 2 hint 1".to_string(),
-                related_information: Some(vec![lsp::DiagnosticRelatedInformation {
-                    location: lsp::Location {
-                        uri: lsp::Url::from_file_path(&file.abs_path).unwrap(),
-                        range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
-                    },
-                    message: "original diagnostic".to_string(),
-                }]),
-                ..Default::default()
-            },
-            lsp::Diagnostic {
-                range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
-                severity: Some(DiagnosticSeverity::HINT),
-                message: "error 2 hint 2".to_string(),
-                related_information: Some(vec![lsp::DiagnosticRelatedInformation {
-                    location: lsp::Location {
-                        uri: lsp::Url::from_file_path(&file.abs_path).unwrap(),
-                        range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
-                    },
-                    message: "original diagnostic".to_string(),
-                }]),
-                ..Default::default()
-            },
-        ];
-        buffer.update_diagnostics(None, diagnostics, cx).unwrap();
-        assert_eq!(
-            buffer
-                .diagnostics_in_range::<_, Point>(0..buffer.len())
-                .collect::<Vec<_>>(),
-            &[
-                (
-                    Point::new(1, 8)..Point::new(1, 9),
-                    &Diagnostic {
-                        severity: DiagnosticSeverity::WARNING,
-                        message: "error 1".to_string(),
-                        group_id: 0,
-                        is_primary: true,
-                    }
-                ),
-                (
-                    Point::new(1, 8)..Point::new(1, 9),
-                    &Diagnostic {
-                        severity: DiagnosticSeverity::HINT,
-                        message: "error 1 hint 1".to_string(),
-                        group_id: 0,
-                        is_primary: false,
-                    }
-                ),
-                (
-                    Point::new(1, 13)..Point::new(1, 15),
-                    &Diagnostic {
-                        severity: DiagnosticSeverity::HINT,
-                        message: "error 2 hint 1".to_string(),
-                        group_id: 1,
-                        is_primary: false,
-                    }
-                ),
-                (
-                    Point::new(1, 13)..Point::new(1, 15),
-                    &Diagnostic {
-                        severity: DiagnosticSeverity::HINT,
-                        message: "error 2 hint 2".to_string(),
-                        group_id: 1,
-                        is_primary: false,
-                    }
-                ),
-                (
-                    Point::new(2, 8)..Point::new(2, 17),
-                    &Diagnostic {
-                        severity: DiagnosticSeverity::ERROR,
-                        message: "error 2".to_string(),
-                        group_id: 1,
-                        is_primary: true,
-                    }
-                )
-            ]
-        );
-
-        assert_eq!(
-            buffer.diagnostic_group(0).collect::<Vec<_>>(),
-            &[
-                (
-                    Point::new(1, 8)..Point::new(1, 9),
-                    &Diagnostic {
-                        severity: DiagnosticSeverity::WARNING,
-                        message: "error 1".to_string(),
-                        group_id: 0,
-                        is_primary: true,
-                    }
-                ),
-                (
-                    Point::new(1, 8)..Point::new(1, 9),
-                    &Diagnostic {
-                        severity: DiagnosticSeverity::HINT,
-                        message: "error 1 hint 1".to_string(),
-                        group_id: 0,
-                        is_primary: false,
-                    }
-                ),
-            ]
-        );
-        assert_eq!(
-            buffer.diagnostic_group(1).collect::<Vec<_>>(),
-            &[
-                (
-                    Point::new(1, 13)..Point::new(1, 15),
-                    &Diagnostic {
-                        severity: DiagnosticSeverity::HINT,
-                        message: "error 2 hint 1".to_string(),
-                        group_id: 1,
-                        is_primary: false,
-                    }
-                ),
-                (
-                    Point::new(1, 13)..Point::new(1, 15),
-                    &Diagnostic {
-                        severity: DiagnosticSeverity::HINT,
-                        message: "error 2 hint 2".to_string(),
-                        group_id: 1,
-                        is_primary: false,
-                    }
-                ),
-                (
-                    Point::new(2, 8)..Point::new(2, 17),
-                    &Diagnostic {
-                        severity: DiagnosticSeverity::ERROR,
-                        message: "error 2".to_string(),
-                        group_id: 1,
-                        is_primary: true,
-                    }
-                )
-            ]
-        );
-
-        buffer
-    });
-}
-
 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
     buffer: &Buffer,
     range: Range<T>,
@@ -995,13 +858,17 @@ fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
 #[test]
 fn test_contiguous_ranges() {
     assert_eq!(
-        contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12], 100).collect::<Vec<_>>(),
+        contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
         &[1..4, 5..7, 9..13]
     );
 
     // Respects the `max_len` parameter
     assert_eq!(
-        contiguous_ranges([2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31], 3).collect::<Vec<_>>(),
+        contiguous_ranges(
+            [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
+            3
+        )
+        .collect::<Vec<_>>(),
         &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
     );
 }
@@ -1011,11 +878,13 @@ impl Buffer {
         &self,
         range: Range<T>,
     ) -> Option<(Range<Point>, Range<Point>)> {
-        self.enclosing_bracket_ranges(range).map(|(start, end)| {
-            let point_start = start.start.to_point(self)..start.end.to_point(self);
-            let point_end = end.start.to_point(self)..end.end.to_point(self);
-            (point_start, point_end)
-        })
+        self.snapshot()
+            .enclosing_bracket_ranges(range)
+            .map(|(start, end)| {
+                let point_start = start.start.to_point(self)..start.end.to_point(self);
+                let point_end = end.start.to_point(self)..end.end.to_point(self);
+                (point_start, point_end)
+            })
     }
 }
 
@@ -1045,80 +914,3 @@ fn rust_lang() -> Language {
 fn empty(point: Point) -> Range<Point> {
     point..point
 }
-
-#[derive(Clone)]
-struct FakeFile {
-    abs_path: PathBuf,
-}
-
-impl FakeFile {
-    fn new(abs_path: impl Into<PathBuf>) -> Self {
-        Self {
-            abs_path: abs_path.into(),
-        }
-    }
-}
-
-impl File for FakeFile {
-    fn worktree_id(&self) -> usize {
-        todo!()
-    }
-
-    fn entry_id(&self) -> Option<usize> {
-        todo!()
-    }
-
-    fn mtime(&self) -> SystemTime {
-        SystemTime::now()
-    }
-
-    fn path(&self) -> &Arc<Path> {
-        todo!()
-    }
-
-    fn abs_path(&self) -> Option<PathBuf> {
-        Some(self.abs_path.clone())
-    }
-
-    fn full_path(&self) -> PathBuf {
-        todo!()
-    }
-
-    fn file_name(&self) -> Option<OsString> {
-        todo!()
-    }
-
-    fn is_deleted(&self) -> bool {
-        todo!()
-    }
-
-    fn save(
-        &self,
-        _: u64,
-        _: Rope,
-        _: clock::Global,
-        _: &mut MutableAppContext,
-    ) -> Task<Result<(clock::Global, SystemTime)>> {
-        todo!()
-    }
-
-    fn load_local(&self, _: &AppContext) -> Option<Task<Result<String>>> {
-        todo!()
-    }
-
-    fn buffer_updated(&self, _: u64, _: super::Operation, _: &mut MutableAppContext) {
-        todo!()
-    }
-
-    fn buffer_removed(&self, _: u64, _: &mut MutableAppContext) {
-        todo!()
-    }
-
-    fn boxed_clone(&self) -> Box<dyn File> {
-        todo!()
-    }
-
-    fn as_any(&self) -> &dyn Any {
-        todo!()
-    }
-}

crates/lsp/src/lsp.rs 🔗

@@ -226,7 +226,11 @@ impl LanguageServer {
             process_id: Default::default(),
             root_path: Default::default(),
             root_uri: Some(root_uri),
-            initialization_options: Default::default(),
+            initialization_options: Some(json!({
+                "checkOnSave": {
+                    "enable": false
+                },
+            })),
             capabilities: lsp_types::ClientCapabilities {
                 experimental: Some(json!({
                     "serverStatusNotification": true,

crates/project/Cargo.toml 🔗

@@ -13,6 +13,7 @@ test-support = ["language/test-support", "text/test-support"]
 text = { path = "../text" }
 client = { path = "../client" }
 clock = { path = "../clock" }
+collections = { path = "../collections" }
 fsevent = { path = "../fsevent" }
 fuzzy = { path = "../fuzzy" }
 gpui = { path = "../gpui" }
@@ -37,6 +38,7 @@ toml = "0.5"
 
 [dev-dependencies]
 client = { path = "../client", features = ["test-support"] }
+collections = { path = "../collections", features = ["test-support"] }
 gpui = { path = "../gpui", features = ["test-support"] }
 language = { path = "../language", features = ["test-support"] }
 lsp = { path = "../lsp", features = ["test-support"] }

crates/project/src/fs.rs 🔗

@@ -134,6 +134,7 @@ impl Fs for RealFs {
     }
 }
 
+#[cfg(any(test, feature = "test-support"))]
 #[derive(Clone, Debug)]
 struct FakeFsEntry {
     metadata: Metadata,

crates/project/src/project.rs 🔗

@@ -2,34 +2,65 @@ pub mod fs;
 mod ignore;
 mod worktree;
 
-use anyhow::Result;
-use client::{Client, UserStore};
+use anyhow::{anyhow, Result};
+use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
+use clock::ReplicaId;
+use collections::HashMap;
 use futures::Future;
 use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
-use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task};
-use language::LanguageRegistry;
+use gpui::{
+    AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
+};
+use language::{Buffer, DiagnosticEntry, LanguageRegistry};
+use lsp::DiagnosticSeverity;
+use postage::{prelude::Stream, watch};
 use std::{
     path::Path,
     sync::{atomic::AtomicBool, Arc},
 };
-use util::TryFutureExt as _;
+use util::{ResultExt, TryFutureExt as _};
 
 pub use fs::*;
 pub use worktree::*;
 
 pub struct Project {
     worktrees: Vec<ModelHandle<Worktree>>,
-    active_worktree: Option<usize>,
     active_entry: Option<ProjectEntry>,
     languages: Arc<LanguageRegistry>,
     client: Arc<client::Client>,
     user_store: ModelHandle<UserStore>,
     fs: Arc<dyn Fs>,
+    client_state: ProjectClientState,
+    collaborators: HashMap<PeerId, Collaborator>,
+    subscriptions: Vec<client::Subscription>,
+}
+
+enum ProjectClientState {
+    Local {
+        is_shared: bool,
+        remote_id_tx: watch::Sender<Option<u64>>,
+        remote_id_rx: watch::Receiver<Option<u64>>,
+        _maintain_remote_id_task: Task<Option<()>>,
+    },
+    Remote {
+        sharing_has_stopped: bool,
+        remote_id: u64,
+        replica_id: ReplicaId,
+    },
 }
 
+#[derive(Clone, Debug)]
+pub struct Collaborator {
+    pub user: Arc<User>,
+    pub peer_id: PeerId,
+    pub replica_id: ReplicaId,
+}
+
+#[derive(Debug)]
 pub enum Event {
     ActiveEntryChanged(Option<ProjectEntry>),
     WorktreeRemoved(usize),
+    DiagnosticsUpdated(ProjectPath),
 }
 
 #[derive(Clone, Debug, Eq, PartialEq, Hash)]
@@ -38,6 +69,39 @@ pub struct ProjectPath {
     pub path: Arc<Path>,
 }
 
+#[derive(Clone)]
+pub struct DiagnosticSummary {
+    pub error_count: usize,
+    pub warning_count: usize,
+    pub info_count: usize,
+    pub hint_count: usize,
+}
+
+impl DiagnosticSummary {
+    fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
+        let mut this = Self {
+            error_count: 0,
+            warning_count: 0,
+            info_count: 0,
+            hint_count: 0,
+        };
+
+        for entry in diagnostics {
+            if entry.diagnostic.is_primary {
+                match entry.diagnostic.severity {
+                    DiagnosticSeverity::ERROR => this.error_count += 1,
+                    DiagnosticSeverity::WARNING => this.warning_count += 1,
+                    DiagnosticSeverity::INFORMATION => this.info_count += 1,
+                    DiagnosticSeverity::HINT => this.hint_count += 1,
+                    _ => {}
+                }
+            }
+        }
+
+        this
+    }
+}
+
 #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 pub struct ProjectEntry {
     pub worktree_id: usize,
@@ -45,105 +109,388 @@ pub struct ProjectEntry {
 }
 
 impl Project {
-    pub fn new(
+    pub fn local(
+        client: Arc<Client>,
+        user_store: ModelHandle<UserStore>,
         languages: Arc<LanguageRegistry>,
+        fs: Arc<dyn Fs>,
+        cx: &mut MutableAppContext,
+    ) -> ModelHandle<Self> {
+        cx.add_model(|cx: &mut ModelContext<Self>| {
+            let (remote_id_tx, remote_id_rx) = watch::channel();
+            let _maintain_remote_id_task = cx.spawn_weak({
+                let rpc = client.clone();
+                move |this, mut cx| {
+                    async move {
+                        let mut status = rpc.status();
+                        while let Some(status) = status.recv().await {
+                            if let Some(this) = this.upgrade(&cx) {
+                                let remote_id = if let client::Status::Connected { .. } = status {
+                                    let response = rpc.request(proto::RegisterProject {}).await?;
+                                    Some(response.project_id)
+                                } else {
+                                    None
+                                };
+
+                                if let Some(project_id) = remote_id {
+                                    let mut registrations = Vec::new();
+                                    this.read_with(&cx, |this, cx| {
+                                        for worktree in &this.worktrees {
+                                            let worktree_id = worktree.id() as u64;
+                                            let worktree = worktree.read(cx).as_local().unwrap();
+                                            registrations.push(rpc.request(
+                                                proto::RegisterWorktree {
+                                                    project_id,
+                                                    worktree_id,
+                                                    root_name: worktree.root_name().to_string(),
+                                                    authorized_logins: worktree.authorized_logins(),
+                                                },
+                                            ));
+                                        }
+                                    });
+                                    for registration in registrations {
+                                        registration.await?;
+                                    }
+                                }
+                                this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
+                            }
+                        }
+                        Ok(())
+                    }
+                    .log_err()
+                }
+            });
+
+            Self {
+                worktrees: Default::default(),
+                collaborators: Default::default(),
+                client_state: ProjectClientState::Local {
+                    is_shared: false,
+                    remote_id_tx,
+                    remote_id_rx,
+                    _maintain_remote_id_task,
+                },
+                subscriptions: Vec::new(),
+                active_entry: None,
+                languages,
+                client,
+                user_store,
+                fs,
+            }
+        })
+    }
+
+    pub async fn remote(
+        remote_id: u64,
         client: Arc<Client>,
         user_store: ModelHandle<UserStore>,
+        languages: Arc<LanguageRegistry>,
         fs: Arc<dyn Fs>,
-    ) -> Self {
-        Self {
-            worktrees: Default::default(),
-            active_worktree: None,
+        cx: &mut AsyncAppContext,
+    ) -> Result<ModelHandle<Self>> {
+        client.authenticate_and_connect(&cx).await?;
+
+        let response = client
+            .request(proto::JoinProject {
+                project_id: remote_id,
+            })
+            .await?;
+
+        let replica_id = response.replica_id as ReplicaId;
+
+        let mut worktrees = Vec::new();
+        for worktree in response.worktrees {
+            worktrees.push(
+                Worktree::remote(
+                    remote_id,
+                    replica_id,
+                    worktree,
+                    client.clone(),
+                    user_store.clone(),
+                    languages.clone(),
+                    cx,
+                )
+                .await?,
+            );
+        }
+
+        let user_ids = response
+            .collaborators
+            .iter()
+            .map(|peer| peer.user_id)
+            .collect();
+        user_store
+            .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
+            .await?;
+        let mut collaborators = HashMap::default();
+        for message in response.collaborators {
+            let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
+            collaborators.insert(collaborator.peer_id, collaborator);
+        }
+
+        Ok(cx.add_model(|cx| Self {
+            worktrees,
             active_entry: None,
+            collaborators,
             languages,
-            client,
             user_store,
             fs,
+            subscriptions: vec![
+                client.subscribe_to_entity(remote_id, cx, Self::handle_unshare_project),
+                client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator),
+                client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator),
+                client.subscribe_to_entity(remote_id, cx, Self::handle_share_worktree),
+                client.subscribe_to_entity(remote_id, cx, Self::handle_unregister_worktree),
+                client.subscribe_to_entity(remote_id, cx, Self::handle_update_worktree),
+                client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer),
+                client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved),
+            ],
+            client,
+            client_state: ProjectClientState::Remote {
+                sharing_has_stopped: false,
+                remote_id,
+                replica_id,
+            },
+        }))
+    }
+
+    fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
+        if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
+            *remote_id_tx.borrow_mut() = remote_id;
+        }
+
+        self.subscriptions.clear();
+        if let Some(remote_id) = remote_id {
+            let client = &self.client;
+            self.subscriptions.extend([
+                client.subscribe_to_entity(remote_id, cx, Self::handle_open_buffer),
+                client.subscribe_to_entity(remote_id, cx, Self::handle_close_buffer),
+                client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator),
+                client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator),
+                client.subscribe_to_entity(remote_id, cx, Self::handle_update_worktree),
+                client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer),
+                client.subscribe_to_entity(remote_id, cx, Self::handle_save_buffer),
+                client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved),
+            ]);
+        }
+    }
+
+    pub fn remote_id(&self) -> Option<u64> {
+        match &self.client_state {
+            ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
+            ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
+        }
+    }
+
+    pub fn next_remote_id(&self) -> impl Future<Output = u64> {
+        let mut id = None;
+        let mut watch = None;
+        match &self.client_state {
+            ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
+            ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
+        }
+
+        async move {
+            if let Some(id) = id {
+                return id;
+            }
+            let mut watch = watch.unwrap();
+            loop {
+                let id = *watch.borrow();
+                if let Some(id) = id {
+                    return id;
+                }
+                watch.recv().await;
+            }
+        }
+    }
+
+    pub fn replica_id(&self) -> ReplicaId {
+        match &self.client_state {
+            ProjectClientState::Local { .. } => 0,
+            ProjectClientState::Remote { replica_id, .. } => *replica_id,
         }
     }
 
+    pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
+        &self.collaborators
+    }
+
     pub fn worktrees(&self) -> &[ModelHandle<Worktree>] {
         &self.worktrees
     }
 
-    pub fn worktree_for_id(&self, id: usize) -> Option<ModelHandle<Worktree>> {
+    pub fn worktree_for_id(&self, id: usize, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
         self.worktrees
             .iter()
-            .find(|worktree| worktree.id() == id)
+            .find(|worktree| worktree.read(cx).id() == id)
             .cloned()
     }
 
-    pub fn add_local_worktree(
-        &mut self,
-        abs_path: &Path,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<ModelHandle<Worktree>>> {
-        let fs = self.fs.clone();
-        let client = self.client.clone();
-        let user_store = self.user_store.clone();
-        let languages = self.languages.clone();
-        let path = Arc::from(abs_path);
+    pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
+        let rpc = self.client.clone();
         cx.spawn(|this, mut cx| async move {
-            let worktree =
-                Worktree::open_local(client, user_store, path, fs, languages, &mut cx).await?;
+            let project_id = this.update(&mut cx, |this, _| {
+                if let ProjectClientState::Local {
+                    is_shared,
+                    remote_id_rx,
+                    ..
+                } = &mut this.client_state
+                {
+                    *is_shared = true;
+                    remote_id_rx
+                        .borrow()
+                        .ok_or_else(|| anyhow!("no project id"))
+                } else {
+                    Err(anyhow!("can't share a remote project"))
+                }
+            })?;
+
+            rpc.request(proto::ShareProject { project_id }).await?;
+            let mut tasks = Vec::new();
             this.update(&mut cx, |this, cx| {
-                this.add_worktree(worktree.clone(), cx);
+                for worktree in &this.worktrees {
+                    worktree.update(cx, |worktree, cx| {
+                        let worktree = worktree.as_local_mut().unwrap();
+                        tasks.push(worktree.share(project_id, cx));
+                    });
+                }
             });
-            Ok(worktree)
+            for task in tasks {
+                task.await?;
+            }
+            this.update(&mut cx, |_, cx| cx.notify());
+            Ok(())
         })
     }
 
-    pub fn add_remote_worktree(
+    pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<anyhow::Result<()>> {
+        let rpc = self.client.clone();
+        cx.spawn(|this, mut cx| async move {
+            let project_id = this.update(&mut cx, |this, _| {
+                if let ProjectClientState::Local {
+                    is_shared,
+                    remote_id_rx,
+                    ..
+                } = &mut this.client_state
+                {
+                    *is_shared = false;
+                    remote_id_rx
+                        .borrow()
+                        .ok_or_else(|| anyhow!("no project id"))
+                } else {
+                    Err(anyhow!("can't share a remote project"))
+                }
+            })?;
+
+            rpc.send(proto::UnshareProject { project_id }).await?;
+            this.update(&mut cx, |this, cx| {
+                this.collaborators.clear();
+                cx.notify()
+            });
+            Ok(())
+        })
+    }
+
+    pub fn is_read_only(&self) -> bool {
+        match &self.client_state {
+            ProjectClientState::Local { .. } => false,
+            ProjectClientState::Remote {
+                sharing_has_stopped,
+                ..
+            } => *sharing_has_stopped,
+        }
+    }
+
+    pub fn is_local(&self) -> bool {
+        match &self.client_state {
+            ProjectClientState::Local { .. } => true,
+            ProjectClientState::Remote { .. } => false,
+        }
+    }
+
+    pub fn open_buffer(
+        &self,
+        path: ProjectPath,
+        cx: &mut ModelContext<Self>,
+    ) -> Task<Result<ModelHandle<Buffer>>> {
+        if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
+            worktree.update(cx, |worktree, cx| worktree.open_buffer(path.path, cx))
+        } else {
+            cx.spawn(|_, _| async move { Err(anyhow!("no such worktree")) })
+        }
+    }
+
+    pub fn is_shared(&self) -> bool {
+        match &self.client_state {
+            ProjectClientState::Local { is_shared, .. } => *is_shared,
+            ProjectClientState::Remote { .. } => false,
+        }
+    }
+
+    pub fn add_local_worktree(
         &mut self,
-        remote_id: u64,
+        abs_path: impl AsRef<Path>,
         cx: &mut ModelContext<Self>,
     ) -> Task<Result<ModelHandle<Worktree>>> {
-        let rpc = self.client.clone();
-        let languages = self.languages.clone();
+        let fs = self.fs.clone();
+        let client = self.client.clone();
         let user_store = self.user_store.clone();
-        cx.spawn(|this, mut cx| async move {
-            rpc.authenticate_and_connect(&cx).await?;
+        let languages = self.languages.clone();
+        let path = Arc::from(abs_path.as_ref());
+        cx.spawn(|project, mut cx| async move {
             let worktree =
-                Worktree::open_remote(rpc.clone(), remote_id, languages, user_store, &mut cx)
+                Worktree::open_local(client.clone(), user_store, path, fs, languages, &mut cx)
                     .await?;
-            this.update(&mut cx, |this, cx| {
-                cx.subscribe(&worktree, move |this, _, event, cx| match event {
-                    worktree::Event::Closed => {
-                        this.close_remote_worktree(remote_id, cx);
-                        cx.notify();
-                    }
-                })
-                .detach();
-                this.add_worktree(worktree.clone(), cx);
+
+            let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
+                project.add_worktree(worktree.clone(), cx);
+                (project.remote_id(), project.is_shared())
             });
+
+            if let Some(project_id) = remote_project_id {
+                let worktree_id = worktree.id() as u64;
+                let register_message = worktree.update(&mut cx, |worktree, _| {
+                    let worktree = worktree.as_local_mut().unwrap();
+                    proto::RegisterWorktree {
+                        project_id,
+                        worktree_id,
+                        root_name: worktree.root_name().to_string(),
+                        authorized_logins: worktree.authorized_logins(),
+                    }
+                });
+                client.request(register_message).await?;
+                if is_shared {
+                    worktree
+                        .update(&mut cx, |worktree, cx| {
+                            worktree.as_local_mut().unwrap().share(project_id, cx)
+                        })
+                        .await?;
+                }
+            }
+
             Ok(worktree)
         })
     }
 
     fn add_worktree(&mut self, worktree: ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
         cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
-        if self.active_worktree.is_none() {
-            self.set_active_worktree(Some(worktree.id()), cx);
-        }
+        cx.subscribe(&worktree, |_, worktree, event, cx| match event {
+            worktree::Event::DiagnosticsUpdated(path) => {
+                cx.emit(Event::DiagnosticsUpdated(ProjectPath {
+                    worktree_id: worktree.id(),
+                    path: path.clone(),
+                }));
+            }
+        })
+        .detach();
         self.worktrees.push(worktree);
         cx.notify();
     }
 
-    fn set_active_worktree(&mut self, worktree_id: Option<usize>, cx: &mut ModelContext<Self>) {
-        if self.active_worktree != worktree_id {
-            self.active_worktree = worktree_id;
-            cx.notify();
-        }
-    }
-
-    pub fn active_worktree(&self) -> Option<ModelHandle<Worktree>> {
-        self.active_worktree
-            .and_then(|worktree_id| self.worktree_for_id(worktree_id))
-    }
-
     pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
         let new_active_entry = entry.and_then(|project_path| {
-            let worktree = self.worktree_for_id(project_path.worktree_id)?;
+            let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
             let entry = worktree.read(cx).entry_for_path(project_path.path)?;
             Some(ProjectEntry {
                 worktree_id: project_path.worktree_id,
@@ -151,92 +498,257 @@ impl Project {
             })
         });
         if new_active_entry != self.active_entry {
-            if let Some(worktree_id) = new_active_entry.map(|e| e.worktree_id) {
-                self.set_active_worktree(Some(worktree_id), cx);
-            }
             self.active_entry = new_active_entry;
             cx.emit(Event::ActiveEntryChanged(new_active_entry));
         }
     }
 
+    pub fn diagnose(&self, cx: &mut ModelContext<Self>) {
+        for worktree_handle in &self.worktrees {
+            if let Some(worktree) = worktree_handle.read(cx).as_local() {
+                for language in worktree.languages() {
+                    if let Some(provider) = language.diagnostic_provider().cloned() {
+                        let worktree_path = worktree.abs_path().clone();
+                        let worktree_handle = worktree_handle.downgrade();
+                        cx.spawn_weak(|_, mut cx| async move {
+                            let diagnostics = provider.diagnose(worktree_path).await.log_err()?;
+                            let worktree_handle = worktree_handle.upgrade(&cx)?;
+                            worktree_handle.update(&mut cx, |worktree, cx| {
+                                for (path, diagnostics) in diagnostics {
+                                    worktree
+                                        .update_diagnostics_from_provider(
+                                            path.into(),
+                                            diagnostics,
+                                            cx,
+                                        )
+                                        .log_err()?;
+                                }
+                                Some(())
+                            })
+                        })
+                        .detach();
+                    }
+                }
+            }
+        }
+    }
+
+    pub fn diagnostic_summaries<'a>(
+        &'a self,
+        cx: &'a AppContext,
+    ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
+        self.worktrees.iter().flat_map(move |worktree| {
+            let worktree_id = worktree.id();
+            worktree
+                .read(cx)
+                .diagnostic_summaries()
+                .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
+        })
+    }
+
     pub fn active_entry(&self) -> Option<ProjectEntry> {
         self.active_entry
     }
 
-    pub fn share_worktree(&self, remote_id: u64, cx: &mut ModelContext<Self>) {
-        let rpc = self.client.clone();
+    // RPC message handlers
+
+    fn handle_unshare_project(
+        &mut self,
+        _: TypedEnvelope<proto::UnshareProject>,
+        _: Arc<Client>,
+        cx: &mut ModelContext<Self>,
+    ) -> Result<()> {
+        if let ProjectClientState::Remote {
+            sharing_has_stopped,
+            ..
+        } = &mut self.client_state
+        {
+            *sharing_has_stopped = true;
+            self.collaborators.clear();
+            cx.notify();
+            Ok(())
+        } else {
+            unreachable!()
+        }
+    }
+
+    fn handle_add_collaborator(
+        &mut self,
+        mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
+        _: Arc<Client>,
+        cx: &mut ModelContext<Self>,
+    ) -> Result<()> {
+        let user_store = self.user_store.clone();
+        let collaborator = envelope
+            .payload
+            .collaborator
+            .take()
+            .ok_or_else(|| anyhow!("empty collaborator"))?;
+
         cx.spawn(|this, mut cx| {
             async move {
-                rpc.authenticate_and_connect(&cx).await?;
-
-                let task = this.update(&mut cx, |this, cx| {
-                    for worktree in &this.worktrees {
-                        let task = worktree.update(cx, |worktree, cx| {
-                            worktree.as_local_mut().and_then(|worktree| {
-                                if worktree.remote_id() == Some(remote_id) {
-                                    Some(worktree.share(cx))
-                                } else {
-                                    None
-                                }
-                            })
-                        });
-                        if task.is_some() {
-                            return task;
-                        }
-                    }
-                    None
+                let collaborator =
+                    Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
+                this.update(&mut cx, |this, cx| {
+                    this.collaborators
+                        .insert(collaborator.peer_id, collaborator);
+                    cx.notify();
                 });
-
-                if let Some(task) = task {
-                    task.await?;
-                }
-
                 Ok(())
             }
             .log_err()
         })
         .detach();
+
+        Ok(())
     }
 
-    pub fn unshare_worktree(&mut self, remote_id: u64, cx: &mut ModelContext<Self>) {
+    fn handle_remove_collaborator(
+        &mut self,
+        envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
+        _: Arc<Client>,
+        cx: &mut ModelContext<Self>,
+    ) -> Result<()> {
+        let peer_id = PeerId(envelope.payload.peer_id);
+        let replica_id = self
+            .collaborators
+            .remove(&peer_id)
+            .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
+            .replica_id;
         for worktree in &self.worktrees {
-            if worktree.update(cx, |worktree, cx| {
-                if let Some(worktree) = worktree.as_local_mut() {
-                    if worktree.remote_id() == Some(remote_id) {
-                        worktree.unshare(cx);
-                        return true;
-                    }
-                }
-                false
-            }) {
-                break;
-            }
+            worktree.update(cx, |worktree, cx| {
+                worktree.remove_collaborator(peer_id, replica_id, cx);
+            })
         }
+        Ok(())
     }
 
-    pub fn close_remote_worktree(&mut self, id: u64, cx: &mut ModelContext<Self>) {
-        let mut reset_active = None;
-        self.worktrees.retain(|worktree| {
-            let keep = worktree.update(cx, |worktree, cx| {
-                if let Some(worktree) = worktree.as_remote_mut() {
-                    if worktree.remote_id() == id {
-                        worktree.close_all_buffers(cx);
-                        return false;
-                    }
-                }
-                true
-            });
-            if !keep {
-                cx.emit(Event::WorktreeRemoved(worktree.id()));
-                reset_active = Some(worktree.id());
+    fn handle_share_worktree(
+        &mut self,
+        envelope: TypedEnvelope<proto::ShareWorktree>,
+        client: Arc<Client>,
+        cx: &mut ModelContext<Self>,
+    ) -> Result<()> {
+        let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
+        let replica_id = self.replica_id();
+        let worktree = envelope
+            .payload
+            .worktree
+            .ok_or_else(|| anyhow!("invalid worktree"))?;
+        let user_store = self.user_store.clone();
+        let languages = self.languages.clone();
+        cx.spawn(|this, mut cx| {
+            async move {
+                let worktree = Worktree::remote(
+                    remote_id, replica_id, worktree, client, user_store, languages, &mut cx,
+                )
+                .await?;
+                this.update(&mut cx, |this, cx| this.add_worktree(worktree, cx));
+                Ok(())
             }
-            keep
+            .log_err()
+        })
+        .detach();
+        Ok(())
+    }
+
+    fn handle_unregister_worktree(
+        &mut self,
+        envelope: TypedEnvelope<proto::UnregisterWorktree>,
+        _: Arc<Client>,
+        cx: &mut ModelContext<Self>,
+    ) -> Result<()> {
+        self.worktrees.retain(|worktree| {
+            worktree.read(cx).as_remote().unwrap().remote_id() != envelope.payload.worktree_id
         });
+        cx.notify();
+        Ok(())
+    }
 
-        if self.active_worktree == reset_active {
-            self.active_worktree = self.worktrees.first().map(|w| w.id());
-            cx.notify();
+    fn handle_update_worktree(
+        &mut self,
+        envelope: TypedEnvelope<proto::UpdateWorktree>,
+        _: Arc<Client>,
+        cx: &mut ModelContext<Self>,
+    ) -> Result<()> {
+        if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize, cx) {
+            worktree.update(cx, |worktree, cx| {
+                let worktree = worktree.as_remote_mut().unwrap();
+                worktree.update_from_remote(envelope, cx)
+            })?;
+        }
+        Ok(())
+    }
+
+    pub fn handle_update_buffer(
+        &mut self,
+        envelope: TypedEnvelope<proto::UpdateBuffer>,
+        _: Arc<Client>,
+        cx: &mut ModelContext<Self>,
+    ) -> Result<()> {
+        if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize, cx) {
+            worktree.update(cx, |worktree, cx| {
+                worktree.handle_update_buffer(envelope, cx)
+            })?;
+        }
+        Ok(())
+    }
+
+    pub fn handle_save_buffer(
+        &mut self,
+        envelope: TypedEnvelope<proto::SaveBuffer>,
+        rpc: Arc<Client>,
+        cx: &mut ModelContext<Self>,
+    ) -> Result<()> {
+        if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize, cx) {
+            worktree.update(cx, |worktree, cx| {
+                worktree.handle_save_buffer(envelope, rpc, cx)
+            })?;
+        }
+        Ok(())
+    }
+
+    pub fn handle_open_buffer(
+        &mut self,
+        envelope: TypedEnvelope<proto::OpenBuffer>,
+        rpc: Arc<Client>,
+        cx: &mut ModelContext<Self>,
+    ) -> anyhow::Result<()> {
+        if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize, cx) {
+            return worktree.update(cx, |worktree, cx| {
+                worktree.handle_open_buffer(envelope, rpc, cx)
+            });
+        } else {
+            Err(anyhow!("no such worktree"))
+        }
+    }
+
+    pub fn handle_close_buffer(
+        &mut self,
+        envelope: TypedEnvelope<proto::CloseBuffer>,
+        rpc: Arc<Client>,
+        cx: &mut ModelContext<Self>,
+    ) -> anyhow::Result<()> {
+        if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize, cx) {
+            worktree.update(cx, |worktree, cx| {
+                worktree.handle_close_buffer(envelope, rpc, cx)
+            })?;
         }
+        Ok(())
+    }
+
+    pub fn handle_buffer_saved(
+        &mut self,
+        envelope: TypedEnvelope<proto::BufferSaved>,
+        _: Arc<Client>,
+        cx: &mut ModelContext<Self>,
+    ) -> Result<()> {
+        if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize, cx) {
+            worktree.update(cx, |worktree, cx| {
+                worktree.handle_buffer_saved(envelope, cx)
+            })?;
+        }
+        Ok(())
     }
 
     pub fn match_paths<'a>(
@@ -335,6 +847,52 @@ impl<'a> Iterator for CandidateSetIter<'a> {
 
 impl Entity for Project {
     type Event = Event;
+
+    fn release(&mut self, cx: &mut gpui::MutableAppContext) {
+        match &self.client_state {
+            ProjectClientState::Local { remote_id_rx, .. } => {
+                if let Some(project_id) = *remote_id_rx.borrow() {
+                    let rpc = self.client.clone();
+                    cx.spawn(|_| async move {
+                        if let Err(err) = rpc.send(proto::UnregisterProject { project_id }).await {
+                            log::error!("error unregistering project: {}", err);
+                        }
+                    })
+                    .detach();
+                }
+            }
+            ProjectClientState::Remote { remote_id, .. } => {
+                let rpc = self.client.clone();
+                let project_id = *remote_id;
+                cx.spawn(|_| async move {
+                    if let Err(err) = rpc.send(proto::LeaveProject { project_id }).await {
+                        log::error!("error leaving project: {}", err);
+                    }
+                })
+                .detach();
+            }
+        }
+    }
+}
+
+impl Collaborator {
+    fn from_proto(
+        message: proto::Collaborator,
+        user_store: &ModelHandle<UserStore>,
+        cx: &mut AsyncAppContext,
+    ) -> impl Future<Output = Result<Self>> {
+        let user = user_store.update(cx, |user_store, cx| {
+            user_store.fetch_user(message.user_id, cx)
+        });
+
+        async move {
+            Ok(Self {
+                peer_id: PeerId(message.peer_id),
+                user: user.await?,
+                replica_id: message.replica_id as ReplicaId,
+            })
+        }
+    }
 }
 
 #[cfg(test)]
@@ -449,6 +1007,6 @@ mod tests {
         let client = client::Client::new();
         let http_client = FakeHttpClient::new(|_| async move { Ok(ServerResponse::new(404)) });
         let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
-        cx.add_model(|_| Project::new(languages, client, user_store, fs))
+        cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
     }
 }

crates/project/src/worktree.rs 🔗

@@ -1,18 +1,24 @@
 use super::{
     fs::{self, Fs},
     ignore::IgnoreStack,
+    DiagnosticSummary,
 };
 use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
 use anyhow::{anyhow, Context, Result};
-use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
+use client::{proto, Client, PeerId, TypedEnvelope, UserStore};
 use clock::ReplicaId;
+use collections::{hash_map, HashMap};
+use collections::{BTreeMap, HashSet};
 use futures::{Stream, StreamExt};
 use fuzzy::CharBag;
 use gpui::{
     executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext,
     Task, UpgradeModelHandle, WeakModelHandle,
 };
-use language::{Buffer, Language, LanguageRegistry, Operation, Rope};
+use language::{
+    Buffer, Diagnostic, DiagnosticEntry, DiagnosticSeverity, File as _, Language, LanguageRegistry,
+    Operation, PointUtf16, Rope,
+};
 use lazy_static::lazy_static;
 use lsp::LanguageServer;
 use parking_lot::Mutex;
@@ -25,12 +31,12 @@ use smol::channel::{self, Sender};
 use std::{
     any::Any,
     cmp::{self, Ordering},
-    collections::HashMap,
     convert::{TryFrom, TryInto},
     ffi::{OsStr, OsString},
     fmt,
     future::Future,
-    ops::Deref,
+    mem,
+    ops::{Deref, Range},
     path::{Path, PathBuf},
     sync::{
         atomic::{AtomicUsize, Ordering::SeqCst},
@@ -40,10 +46,12 @@ use std::{
 };
 use sum_tree::Bias;
 use sum_tree::{Edit, SeekTarget, SumTree};
-use util::{ResultExt, TryFutureExt};
+use util::{post_inc, ResultExt, TryFutureExt};
 
 lazy_static! {
     static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore");
+    static ref DIAGNOSTIC_PROVIDER_NAME: Arc<str> = Arc::from("diagnostic_source");
+    static ref LSP_PROVIDER_NAME: Arc<str> = Arc::from("lsp");
 }
 
 #[derive(Clone, Debug)]
@@ -58,66 +66,14 @@ pub enum Worktree {
     Remote(RemoteWorktree),
 }
 
+#[derive(Debug)]
 pub enum Event {
-    Closed,
-}
-
-#[derive(Clone, Debug)]
-pub struct Collaborator {
-    pub user: Arc<User>,
-    pub peer_id: PeerId,
-    pub replica_id: ReplicaId,
-}
-
-impl Collaborator {
-    fn from_proto(
-        message: proto::Collaborator,
-        user_store: &ModelHandle<UserStore>,
-        cx: &mut AsyncAppContext,
-    ) -> impl Future<Output = Result<Self>> {
-        let user = user_store.update(cx, |user_store, cx| {
-            user_store.fetch_user(message.user_id, cx)
-        });
-
-        async move {
-            Ok(Self {
-                peer_id: PeerId(message.peer_id),
-                user: user.await?,
-                replica_id: message.replica_id as ReplicaId,
-            })
-        }
-    }
+    DiagnosticsUpdated(Arc<Path>),
 }
 
 impl Entity for Worktree {
     type Event = Event;
 
-    fn release(&mut self, cx: &mut MutableAppContext) {
-        match self {
-            Self::Local(tree) => {
-                if let Some(worktree_id) = *tree.remote_id.borrow() {
-                    let rpc = tree.client.clone();
-                    cx.spawn(|_| async move {
-                        if let Err(err) = rpc.send(proto::CloseWorktree { worktree_id }).await {
-                            log::error!("error closing worktree: {}", err);
-                        }
-                    })
-                    .detach();
-                }
-            }
-            Self::Remote(tree) => {
-                let rpc = tree.client.clone();
-                let worktree_id = tree.remote_id;
-                cx.spawn(|_| async move {
-                    if let Err(err) = rpc.send(proto::LeaveWorktree { worktree_id }).await {
-                        log::error!("error closing worktree: {}", err);
-                    }
-                })
-                .detach();
-            }
-        }
-    }
-
     fn app_will_quit(
         &mut self,
         _: &mut MutableAppContext,
@@ -168,32 +124,16 @@ impl Worktree {
         Ok(tree)
     }
 
-    pub async fn open_remote(
-        client: Arc<Client>,
-        id: u64,
-        languages: Arc<LanguageRegistry>,
-        user_store: ModelHandle<UserStore>,
-        cx: &mut AsyncAppContext,
-    ) -> Result<ModelHandle<Self>> {
-        let response = client
-            .request(proto::JoinWorktree { worktree_id: id })
-            .await?;
-        Worktree::remote(response, client, user_store, languages, cx).await
-    }
-
-    async fn remote(
-        join_response: proto::JoinWorktreeResponse,
+    pub async fn remote(
+        project_remote_id: u64,
+        replica_id: ReplicaId,
+        worktree: proto::Worktree,
         client: Arc<Client>,
         user_store: ModelHandle<UserStore>,
         languages: Arc<LanguageRegistry>,
         cx: &mut AsyncAppContext,
     ) -> Result<ModelHandle<Self>> {
-        let worktree = join_response
-            .worktree
-            .ok_or_else(|| anyhow!("empty worktree"))?;
-
         let remote_id = worktree.id;
-        let replica_id = join_response.replica_id as ReplicaId;
         let root_char_bag: CharBag = worktree
             .root_name
             .chars()
@@ -228,24 +168,10 @@ impl Worktree {
             })
             .await;
 
-        let user_ids = join_response
-            .collaborators
-            .iter()
-            .map(|peer| peer.user_id)
-            .collect();
-        user_store
-            .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
-            .await?;
-        let mut collaborators = HashMap::with_capacity(join_response.collaborators.len());
-        for message in join_response.collaborators {
-            let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
-            collaborators.insert(collaborator.peer_id, collaborator);
-        }
-
         let worktree = cx.update(|cx| {
             cx.add_model(|cx: &mut ModelContext<Worktree>| {
                 let snapshot = Snapshot {
-                    id: cx.model_id(),
+                    id: remote_id as usize,
                     scan_id: 0,
                     abs_path: Path::new("").into(),
                     root_name,
@@ -286,28 +212,20 @@ impl Worktree {
                     .detach();
                 }
 
-                let _subscriptions = vec![
-                    client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator),
-                    client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator),
-                    client.subscribe_to_entity(remote_id, cx, Self::handle_update),
-                    client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer),
-                    client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved),
-                    client.subscribe_to_entity(remote_id, cx, Self::handle_unshare),
-                ];
-
                 Worktree::Remote(RemoteWorktree {
+                    project_id: project_remote_id,
                     remote_id,
                     replica_id,
                     snapshot,
                     snapshot_rx,
                     updates_tx,
                     client: client.clone(),
+                    loading_buffers: Default::default(),
                     open_buffers: Default::default(),
-                    collaborators,
+                    diagnostic_summaries: Default::default(),
                     queued_operations: Default::default(),
                     languages,
                     user_store,
-                    _subscriptions,
                 })
             })
         });
@@ -323,6 +241,14 @@ impl Worktree {
         }
     }
 
+    pub fn as_remote(&self) -> Option<&RemoteWorktree> {
+        if let Worktree::Remote(worktree) = self {
+            Some(worktree)
+        } else {
+            None
+        }
+    }
+
     pub fn as_local_mut(&mut self) -> Option<&mut LocalWorktree> {
         if let Worktree::Local(worktree) = self {
             Some(worktree)
@@ -353,9 +279,21 @@ impl Worktree {
         }
     }
 
+    pub fn remove_collaborator(
+        &mut self,
+        peer_id: PeerId,
+        replica_id: ReplicaId,
+        cx: &mut ModelContext<Self>,
+    ) {
+        match self {
+            Worktree::Local(worktree) => worktree.remove_collaborator(peer_id, replica_id, cx),
+            Worktree::Remote(worktree) => worktree.remove_collaborator(replica_id, cx),
+        }
+    }
+
     pub fn languages(&self) -> &Arc<LanguageRegistry> {
         match self {
-            Worktree::Local(worktree) => &worktree.languages,
+            Worktree::Local(worktree) => &worktree.language_registry,
             Worktree::Remote(worktree) => &worktree.languages,
         }
     }
@@ -367,59 +305,6 @@ impl Worktree {
         }
     }
 
-    pub fn handle_add_collaborator(
-        &mut self,
-        mut envelope: TypedEnvelope<proto::AddCollaborator>,
-        _: Arc<Client>,
-        cx: &mut ModelContext<Self>,
-    ) -> Result<()> {
-        let user_store = self.user_store().clone();
-        let collaborator = envelope
-            .payload
-            .collaborator
-            .take()
-            .ok_or_else(|| anyhow!("empty collaborator"))?;
-
-        cx.spawn(|this, mut cx| {
-            async move {
-                let collaborator =
-                    Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
-                this.update(&mut cx, |this, cx| match this {
-                    Worktree::Local(worktree) => worktree.add_collaborator(collaborator, cx),
-                    Worktree::Remote(worktree) => worktree.add_collaborator(collaborator, cx),
-                });
-                Ok(())
-            }
-            .log_err()
-        })
-        .detach();
-
-        Ok(())
-    }
-
-    pub fn handle_remove_collaborator(
-        &mut self,
-        envelope: TypedEnvelope<proto::RemoveCollaborator>,
-        _: Arc<Client>,
-        cx: &mut ModelContext<Self>,
-    ) -> Result<()> {
-        match self {
-            Worktree::Local(worktree) => worktree.remove_collaborator(envelope, cx),
-            Worktree::Remote(worktree) => worktree.remove_collaborator(envelope, cx),
-        }
-    }
-
-    pub fn handle_update(
-        &mut self,
-        envelope: TypedEnvelope<proto::UpdateWorktree>,
-        _: Arc<Client>,
-        cx: &mut ModelContext<Self>,
-    ) -> anyhow::Result<()> {
-        self.as_remote_mut()
-            .unwrap()
-            .update_from_remote(envelope, cx)
-    }
-
     pub fn handle_open_buffer(
         &mut self,
         envelope: TypedEnvelope<proto::OpenBuffer>,
@@ -457,10 +342,21 @@ impl Worktree {
             .close_remote_buffer(envelope, cx)
     }
 
-    pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
+    pub fn diagnostic_summaries<'a>(
+        &'a self,
+    ) -> impl Iterator<Item = (Arc<Path>, DiagnosticSummary)> + 'a {
+        match self {
+            Worktree::Local(worktree) => &worktree.diagnostic_summaries,
+            Worktree::Remote(worktree) => &worktree.diagnostic_summaries,
+        }
+        .iter()
+        .map(|(path, summary)| (path.clone(), summary.clone()))
+    }
+
+    pub fn loading_buffers<'a>(&'a mut self) -> &'a mut LoadingBuffers {
         match self {
-            Worktree::Local(worktree) => &worktree.collaborators,
-            Worktree::Remote(worktree) => &worktree.collaborators,
+            Worktree::Local(worktree) => &mut worktree.loading_buffers,
+            Worktree::Remote(worktree) => &mut worktree.loading_buffers,
         }
     }
 
@@ -469,10 +365,53 @@ impl Worktree {
         path: impl AsRef<Path>,
         cx: &mut ModelContext<Self>,
     ) -> Task<Result<ModelHandle<Buffer>>> {
-        match self {
-            Worktree::Local(worktree) => worktree.open_buffer(path.as_ref(), cx),
-            Worktree::Remote(worktree) => worktree.open_buffer(path.as_ref(), cx),
+        let path = path.as_ref();
+
+        // If there is already a buffer for the given path, then return it.
+        let existing_buffer = match self {
+            Worktree::Local(worktree) => worktree.get_open_buffer(path, cx),
+            Worktree::Remote(worktree) => worktree.get_open_buffer(path, cx),
+        };
+        if let Some(existing_buffer) = existing_buffer {
+            return cx.spawn(move |_, _| async move { Ok(existing_buffer) });
         }
+
+        let path: Arc<Path> = Arc::from(path);
+        let mut loading_watch = match self.loading_buffers().entry(path.clone()) {
+            // If the given path is already being loaded, then wait for that existing
+            // task to complete and return the same buffer.
+            hash_map::Entry::Occupied(e) => e.get().clone(),
+
+            // Otherwise, record the fact that this path is now being loaded.
+            hash_map::Entry::Vacant(entry) => {
+                let (mut tx, rx) = postage::watch::channel();
+                entry.insert(rx.clone());
+
+                let load_buffer = match self {
+                    Worktree::Local(worktree) => worktree.open_buffer(&path, cx),
+                    Worktree::Remote(worktree) => worktree.open_buffer(&path, cx),
+                };
+                cx.spawn(move |this, mut cx| async move {
+                    let result = load_buffer.await;
+
+                    // After the buffer loads, record the fact that it is no longer
+                    // loading.
+                    this.update(&mut cx, |this, _| this.loading_buffers().remove(&path));
+                    *tx.borrow_mut() = Some(result.map_err(|e| Arc::new(e)));
+                })
+                .detach();
+                rx
+            }
+        };
+
+        cx.spawn(|_, _| async move {
+            loop {
+                if let Some(result) = loading_watch.borrow().as_ref() {
+                    return result.clone().map_err(|e| anyhow!("{}", e));
+                }
+                loading_watch.recv().await;
+            }
+        })
     }
 
     #[cfg(feature = "test-support")]
@@ -505,7 +444,6 @@ impl Worktree {
     pub fn handle_update_buffer(
         &mut self,
         envelope: TypedEnvelope<proto::UpdateBuffer>,
-        _: Arc<Client>,
         cx: &mut ModelContext<Self>,
     ) -> Result<()> {
         let payload = envelope.payload.clone();
@@ -556,9 +494,14 @@ impl Worktree {
         cx: &mut ModelContext<Self>,
     ) -> Result<()> {
         let sender_id = envelope.original_sender_id()?;
-        let buffer = self
-            .as_local()
-            .unwrap()
+        let this = self.as_local().unwrap();
+        let project_id = this
+            .share
+            .as_ref()
+            .ok_or_else(|| anyhow!("can't save buffer while disconnected"))?
+            .project_id;
+
+        let buffer = this
             .shared_buffers
             .get(&sender_id)
             .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
@@ -579,6 +522,7 @@ impl Worktree {
                     rpc.respond(
                         receipt,
                         proto::BufferSaved {
+                            project_id,
                             worktree_id,
                             buffer_id,
                             version: (&version).into(),
@@ -599,7 +543,6 @@ impl Worktree {
     pub fn handle_buffer_saved(
         &mut self,
         envelope: TypedEnvelope<proto::BufferSaved>,
-        _: Arc<Client>,
         cx: &mut ModelContext<Self>,
     ) -> Result<()> {
         let payload = envelope.payload.clone();
@@ -622,16 +565,6 @@ impl Worktree {
         Ok(())
     }
 
-    pub fn handle_unshare(
-        &mut self,
-        _: TypedEnvelope<proto::UnshareWorktree>,
-        _: Arc<Client>,
-        cx: &mut ModelContext<Self>,
-    ) -> Result<()> {
-        cx.emit(Event::Closed);
-        Ok(())
-    }
-
     fn poll_snapshot(&mut self, cx: &mut ModelContext<Self>) {
         match self {
             Self::Local(worktree) => {
@@ -741,40 +674,146 @@ impl Worktree {
         }
     }
 
-    fn update_diagnostics(
+    pub fn update_diagnostics_from_lsp(
         &mut self,
-        params: lsp::PublishDiagnosticsParams,
+        mut params: lsp::PublishDiagnosticsParams,
+        disk_based_sources: &HashSet<String>,
         cx: &mut ModelContext<Worktree>,
     ) -> Result<()> {
         let this = self.as_local_mut().ok_or_else(|| anyhow!("not local"))?;
-        let file_path = params
+        let abs_path = params
             .uri
             .to_file_path()
-            .map_err(|_| anyhow!("URI is not a file"))?
-            .strip_prefix(&this.abs_path)
-            .context("path is not within worktree")?
-            .to_owned();
+            .map_err(|_| anyhow!("URI is not a file"))?;
+        let worktree_path = Arc::from(
+            abs_path
+                .strip_prefix(&this.abs_path)
+                .context("path is not within worktree")?,
+        );
+
+        let mut group_ids_by_diagnostic_range = HashMap::default();
+        let mut diagnostics_by_group_id = HashMap::default();
+        let mut next_group_id = 0;
+        for diagnostic in &mut params.diagnostics {
+            let source = diagnostic.source.as_ref();
+            let code = diagnostic.code.as_ref();
+            let group_id = diagnostic_ranges(&diagnostic, &abs_path)
+                .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range)))
+                .copied()
+                .unwrap_or_else(|| {
+                    let group_id = post_inc(&mut next_group_id);
+                    for range in diagnostic_ranges(&diagnostic, &abs_path) {
+                        group_ids_by_diagnostic_range.insert((source, code, range), group_id);
+                    }
+                    group_id
+                });
 
+            diagnostics_by_group_id
+                .entry(group_id)
+                .or_insert(Vec::new())
+                .push(DiagnosticEntry {
+                    range: diagnostic.range.start.to_point_utf16()
+                        ..diagnostic.range.end.to_point_utf16(),
+                    diagnostic: Diagnostic {
+                        code: diagnostic.code.clone().map(|code| match code {
+                            lsp::NumberOrString::Number(code) => code.to_string(),
+                            lsp::NumberOrString::String(code) => code,
+                        }),
+                        severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
+                        message: mem::take(&mut diagnostic.message),
+                        group_id,
+                        is_primary: false,
+                        is_valid: true,
+                        is_disk_based: diagnostic
+                            .source
+                            .as_ref()
+                            .map_or(false, |source| disk_based_sources.contains(source)),
+                    },
+                });
+        }
+
+        let diagnostics = diagnostics_by_group_id
+            .into_values()
+            .flat_map(|mut diagnostics| {
+                let primary = diagnostics
+                    .iter_mut()
+                    .min_by_key(|entry| entry.diagnostic.severity)
+                    .unwrap();
+                primary.diagnostic.is_primary = true;
+                diagnostics
+            })
+            .collect::<Vec<_>>();
+
+        let this = self.as_local_mut().unwrap();
         for buffer in this.open_buffers.values() {
             if let Some(buffer) = buffer.upgrade(cx) {
                 if buffer
                     .read(cx)
                     .file()
-                    .map_or(false, |file| file.path().as_ref() == file_path)
+                    .map_or(false, |file| *file.path() == worktree_path)
                 {
                     let (remote_id, operation) = buffer.update(cx, |buffer, cx| {
                         (
                             buffer.remote_id(),
-                            buffer.update_diagnostics(params.version, params.diagnostics, cx),
+                            buffer.update_diagnostics(
+                                LSP_PROVIDER_NAME.clone(),
+                                params.version,
+                                diagnostics.clone(),
+                                cx,
+                            ),
                         )
                     });
                     self.send_buffer_update(remote_id, operation?, cx);
-                    return Ok(());
+                    break;
                 }
             }
         }
 
-        this.diagnostics.insert(file_path, params.diagnostics);
+        let this = self.as_local_mut().unwrap();
+        this.diagnostic_summaries
+            .insert(worktree_path.clone(), DiagnosticSummary::new(&diagnostics));
+        this.lsp_diagnostics
+            .insert(worktree_path.clone(), diagnostics);
+        cx.emit(Event::DiagnosticsUpdated(worktree_path.clone()));
+        Ok(())
+    }
+
+    pub fn update_diagnostics_from_provider(
+        &mut self,
+        path: Arc<Path>,
+        diagnostics: Vec<DiagnosticEntry<usize>>,
+        cx: &mut ModelContext<Worktree>,
+    ) -> Result<()> {
+        let this = self.as_local_mut().unwrap();
+        for buffer in this.open_buffers.values() {
+            if let Some(buffer) = buffer.upgrade(cx) {
+                if buffer
+                    .read(cx)
+                    .file()
+                    .map_or(false, |file| *file.path() == path)
+                {
+                    let (remote_id, operation) = buffer.update(cx, |buffer, cx| {
+                        (
+                            buffer.remote_id(),
+                            buffer.update_diagnostics(
+                                DIAGNOSTIC_PROVIDER_NAME.clone(),
+                                None,
+                                diagnostics.clone(),
+                                cx,
+                            ),
+                        )
+                    });
+                    self.send_buffer_update(remote_id, operation?, cx);
+                    break;
+                }
+            }
+        }
+
+        let this = self.as_local_mut().unwrap();
+        this.diagnostic_summaries
+            .insert(path.clone(), DiagnosticSummary::new(&diagnostics));
+        this.provider_diagnostics.insert(path.clone(), diagnostics);
+        cx.emit(Event::DiagnosticsUpdated(path.clone()));
         Ok(())
     }
 
@@ -784,17 +823,25 @@ impl Worktree {
         operation: Operation,
         cx: &mut ModelContext<Self>,
     ) {
-        if let Some((rpc, remote_id)) = match self {
-            Worktree::Local(worktree) => worktree
-                .remote_id
-                .borrow()
-                .map(|id| (worktree.client.clone(), id)),
-            Worktree::Remote(worktree) => Some((worktree.client.clone(), worktree.remote_id)),
+        if let Some((project_id, worktree_id, rpc)) = match self {
+            Worktree::Local(worktree) => worktree.share.as_ref().map(|share| {
+                (
+                    share.project_id,
+                    worktree.id() as u64,
+                    worktree.client.clone(),
+                )
+            }),
+            Worktree::Remote(worktree) => Some((
+                worktree.project_id,
+                worktree.remote_id,
+                worktree.client.clone(),
+            )),
         } {
             cx.spawn(|worktree, mut cx| async move {
                 if let Err(error) = rpc
                     .request(proto::UpdateBuffer {
-                        worktree_id: remote_id,
+                        project_id,
+                        worktree_id,
                         buffer_id,
                         operations: vec![language::proto::serialize_operation(&operation)],
                     })
@@ -815,15 +862,18 @@ impl Worktree {
     }
 }
 
-impl Deref for Worktree {
-    type Target = Snapshot;
-
-    fn deref(&self) -> &Self::Target {
-        match self {
-            Worktree::Local(worktree) => &worktree.snapshot,
-            Worktree::Remote(worktree) => &worktree.snapshot,
-        }
-    }
+#[derive(Clone)]
+pub struct Snapshot {
+    id: usize,
+    scan_id: usize,
+    abs_path: Arc<Path>,
+    root_name: String,
+    root_char_bag: CharBag,
+    ignores: HashMap<Arc<Path>, (Arc<Gitignore>, usize)>,
+    entries_by_path: SumTree<Entry>,
+    entries_by_id: SumTree<PathEntry>,
+    removed_entry_ids: HashMap<u64, usize>,
+    next_entry_id: Arc<AtomicUsize>,
 }
 
 pub struct LocalWorktree {
@@ -832,22 +882,49 @@ pub struct LocalWorktree {
     background_snapshot: Arc<Mutex<Snapshot>>,
     last_scan_state_rx: watch::Receiver<ScanState>,
     _background_scanner_task: Option<Task<()>>,
-    _maintain_remote_id_task: Task<Option<()>>,
     poll_task: Option<Task<()>>,
-    remote_id: watch::Receiver<Option<u64>>,
     share: Option<ShareState>,
+    loading_buffers: LoadingBuffers,
     open_buffers: HashMap<usize, WeakModelHandle<Buffer>>,
     shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
-    diagnostics: HashMap<PathBuf, Vec<lsp::Diagnostic>>,
-    collaborators: HashMap<PeerId, Collaborator>,
+    lsp_diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<PointUtf16>>>,
+    provider_diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<usize>>>,
+    diagnostic_summaries: BTreeMap<Arc<Path>, DiagnosticSummary>,
     queued_operations: Vec<(u64, Operation)>,
-    languages: Arc<LanguageRegistry>,
+    language_registry: Arc<LanguageRegistry>,
     client: Arc<Client>,
     user_store: ModelHandle<UserStore>,
     fs: Arc<dyn Fs>,
+    languages: Vec<Arc<Language>>,
     language_servers: HashMap<String, Arc<LanguageServer>>,
 }
 
+struct ShareState {
+    project_id: u64,
+    snapshots_tx: Sender<Snapshot>,
+}
+
+pub struct RemoteWorktree {
+    project_id: u64,
+    remote_id: u64,
+    snapshot: Snapshot,
+    snapshot_rx: watch::Receiver<Snapshot>,
+    client: Arc<Client>,
+    updates_tx: postage::mpsc::Sender<proto::UpdateWorktree>,
+    replica_id: ReplicaId,
+    loading_buffers: LoadingBuffers,
+    open_buffers: HashMap<usize, RemoteBuffer>,
+    diagnostic_summaries: BTreeMap<Arc<Path>, DiagnosticSummary>,
+    languages: Arc<LanguageRegistry>,
+    user_store: ModelHandle<UserStore>,
+    queued_operations: Vec<(u64, Operation)>,
+}
+
+type LoadingBuffers = HashMap<
+    Arc<Path>,
+    postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
+>;
+
 #[derive(Default, Deserialize)]
 struct WorktreeConfig {
     collaborators: Vec<String>,
@@ -908,59 +985,26 @@ impl LocalWorktree {
                 );
             }
 
-            let (mut remote_id_tx, remote_id_rx) = watch::channel();
-            let _maintain_remote_id_task = cx.spawn_weak({
-                let rpc = client.clone();
-                move |this, cx| {
-                    async move {
-                        let mut status = rpc.status();
-                        while let Some(status) = status.recv().await {
-                            if let Some(this) = this.upgrade(&cx) {
-                                let remote_id = if let client::Status::Connected { .. } = status {
-                                    let authorized_logins = this.read_with(&cx, |this, _| {
-                                        this.as_local().unwrap().config.collaborators.clone()
-                                    });
-                                    let response = rpc
-                                        .request(proto::OpenWorktree {
-                                            root_name: root_name.clone(),
-                                            authorized_logins,
-                                        })
-                                        .await?;
-
-                                    Some(response.worktree_id)
-                                } else {
-                                    None
-                                };
-                                if remote_id_tx.send(remote_id).await.is_err() {
-                                    break;
-                                }
-                            }
-                        }
-                        Ok(())
-                    }
-                    .log_err()
-                }
-            });
-
             let tree = Self {
                 snapshot: snapshot.clone(),
                 config,
-                remote_id: remote_id_rx,
                 background_snapshot: Arc::new(Mutex::new(snapshot)),
                 last_scan_state_rx,
                 _background_scanner_task: None,
-                _maintain_remote_id_task,
                 share: None,
                 poll_task: None,
+                loading_buffers: Default::default(),
                 open_buffers: Default::default(),
                 shared_buffers: Default::default(),
-                diagnostics: Default::default(),
+                lsp_diagnostics: Default::default(),
+                provider_diagnostics: Default::default(),
+                diagnostic_summaries: Default::default(),
                 queued_operations: Default::default(),
-                collaborators: Default::default(),
-                languages,
+                language_registry: languages,
                 client,
                 user_store,
                 fs,
+                languages: Default::default(),
                 language_servers: Default::default(),
             };
 
@@ -997,15 +1041,27 @@ impl LocalWorktree {
         Ok((tree, scan_states_tx))
     }
 
-    pub fn languages(&self) -> &LanguageRegistry {
+    pub fn authorized_logins(&self) -> Vec<String> {
+        self.config.collaborators.clone()
+    }
+
+    pub fn language_registry(&self) -> &LanguageRegistry {
+        &self.language_registry
+    }
+
+    pub fn languages(&self) -> &[Arc<Language>] {
         &self.languages
     }
 
-    pub fn ensure_language_server(
+    pub fn register_language(
         &mut self,
-        language: &Language,
+        language: &Arc<Language>,
         cx: &mut ModelContext<Worktree>,
     ) -> Option<Arc<LanguageServer>> {
+        if !self.languages.iter().any(|l| Arc::ptr_eq(l, language)) {
+            self.languages.push(language.clone());
+        }
+
         if let Some(server) = self.language_servers.get(language.name()) {
             return Some(server.clone());
         }
@@ -1015,6 +1071,10 @@ impl LocalWorktree {
             .log_err()
             .flatten()
         {
+            let disk_based_sources = language
+                .disk_based_diagnostic_sources()
+                .cloned()
+                .unwrap_or_default();
             let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
             language_server
                 .on_notification::<lsp::notification::PublishDiagnostics, _>(move |params| {
@@ -1025,7 +1085,8 @@ impl LocalWorktree {
                 while let Ok(diagnostics) = diagnostics_rx.recv().await {
                     if let Some(handle) = cx.read(|cx| this.upgrade(cx)) {
                         handle.update(&mut cx, |this, cx| {
-                            this.update_diagnostics(diagnostics, cx).log_err();
+                            this.update_diagnostics_from_lsp(diagnostics, &disk_based_sources, cx)
+                                .log_err();
                         });
                     } else {
                         break;
@@ -1042,20 +1103,18 @@ impl LocalWorktree {
         }
     }
 
-    pub fn open_buffer(
+    fn get_open_buffer(
         &mut self,
         path: &Path,
         cx: &mut ModelContext<Worktree>,
-    ) -> Task<Result<ModelHandle<Buffer>>> {
-        let handle = cx.handle();
-
-        // If there is already a buffer for the given path, then return it.
-        let mut existing_buffer = None;
+    ) -> Option<ModelHandle<Buffer>> {
+        let worktree_id = self.id();
+        let mut result = None;
         self.open_buffers.retain(|_buffer_id, buffer| {
             if let Some(buffer) = buffer.upgrade(cx.as_ref()) {
                 if let Some(file) = buffer.read(cx.as_ref()).file() {
-                    if file.worktree_id() == handle.id() && file.path().as_ref() == path {
-                        existing_buffer = Some(buffer);
+                    if file.worktree_id() == worktree_id && file.path().as_ref() == path {
+                        result = Some(buffer);
                     }
                 }
                 true
@@ -1063,45 +1122,63 @@ impl LocalWorktree {
                 false
             }
         });
+        result
+    }
 
+    fn open_buffer(
+        &mut self,
+        path: &Path,
+        cx: &mut ModelContext<Worktree>,
+    ) -> Task<Result<ModelHandle<Buffer>>> {
         let path = Arc::from(path);
-        cx.spawn(|this, mut cx| async move {
-            if let Some(existing_buffer) = existing_buffer {
-                Ok(existing_buffer)
-            } else {
-                let (file, contents) = this
-                    .update(&mut cx, |this, cx| this.as_local().unwrap().load(&path, cx))
-                    .await?;
-                let language = this.read_with(&cx, |this, _| {
-                    use language::File;
-                    this.languages().select_language(file.full_path()).cloned()
-                });
-                let (diagnostics, language_server) = this.update(&mut cx, |this, cx| {
+        cx.spawn(move |this, mut cx| async move {
+            let (file, contents) = this
+                .update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))
+                .await?;
+
+            let (lsp_diagnostics, provider_diagnostics, language, language_server) =
+                this.update(&mut cx, |this, cx| {
                     let this = this.as_local_mut().unwrap();
-                    (
-                        this.diagnostics.remove(path.as_ref()),
-                        language
-                            .as_ref()
-                            .and_then(|language| this.ensure_language_server(language, cx)),
-                    )
-                });
-                let buffer = cx.add_model(|cx| {
-                    let mut buffer = Buffer::from_file(0, contents, Box::new(file), cx);
-                    buffer.set_language(language, language_server, cx);
-                    if let Some(diagnostics) = diagnostics {
-                        buffer.update_diagnostics(None, diagnostics, cx).unwrap();
-                    }
-                    buffer
+                    let lsp_diagnostics = this.lsp_diagnostics.remove(&path);
+                    let provider_diagnostics = this.provider_diagnostics.remove(&path);
+                    let language = this
+                        .language_registry
+                        .select_language(file.full_path())
+                        .cloned();
+                    let server = language
+                        .as_ref()
+                        .and_then(|language| this.register_language(language, cx));
+                    (lsp_diagnostics, provider_diagnostics, language, server)
                 });
-                this.update(&mut cx, |this, _| {
-                    let this = this
-                        .as_local_mut()
-                        .ok_or_else(|| anyhow!("must be a local worktree"))?;
 
-                    this.open_buffers.insert(buffer.id(), buffer.downgrade());
-                    Ok(buffer)
-                })
-            }
+            let mut buffer_operations = Vec::new();
+            let buffer = cx.add_model(|cx| {
+                let mut buffer = Buffer::from_file(0, contents, Box::new(file), cx);
+                buffer.set_language(language, language_server, cx);
+                if let Some(diagnostics) = lsp_diagnostics {
+                    let op = buffer
+                        .update_diagnostics(LSP_PROVIDER_NAME.clone(), None, diagnostics, cx)
+                        .unwrap();
+                    buffer_operations.push(op);
+                }
+                if let Some(diagnostics) = provider_diagnostics {
+                    let op = buffer
+                        .update_diagnostics(DIAGNOSTIC_PROVIDER_NAME.clone(), None, diagnostics, cx)
+                        .unwrap();
+                    buffer_operations.push(op);
+                }
+                buffer
+            });
+
+            this.update(&mut cx, |this, cx| {
+                for op in buffer_operations {
+                    this.send_buffer_update(buffer.read(cx).remote_id(), op, cx);
+                }
+                let this = this.as_local_mut().unwrap();
+                this.open_buffers.insert(buffer.id(), buffer.downgrade());
+            });
+
+            Ok(buffer)
         })
     }
 
@@ -1110,13 +1187,12 @@ impl LocalWorktree {
         envelope: TypedEnvelope<proto::OpenBuffer>,
         cx: &mut ModelContext<Worktree>,
     ) -> Task<Result<proto::OpenBufferResponse>> {
-        let peer_id = envelope.original_sender_id();
-        let path = Path::new(&envelope.payload.path);
-
-        let buffer = self.open_buffer(path, cx);
-
         cx.spawn(|this, mut cx| async move {
-            let buffer = buffer.await?;
+            let peer_id = envelope.original_sender_id();
+            let path = Path::new(&envelope.payload.path);
+            let buffer = this
+                .update(&mut cx, |this, cx| this.open_buffer(path, cx))
+                .await?;
             this.update(&mut cx, |this, cx| {
                 this.as_local_mut()
                     .unwrap()
@@ -1145,27 +1221,12 @@ impl LocalWorktree {
         Ok(())
     }
 
-    pub fn add_collaborator(
-        &mut self,
-        collaborator: Collaborator,
-        cx: &mut ModelContext<Worktree>,
-    ) {
-        self.collaborators
-            .insert(collaborator.peer_id, collaborator);
-        cx.notify();
-    }
-
     pub fn remove_collaborator(
         &mut self,
-        envelope: TypedEnvelope<proto::RemoveCollaborator>,
+        peer_id: PeerId,
+        replica_id: ReplicaId,
         cx: &mut ModelContext<Worktree>,
-    ) -> Result<()> {
-        let peer_id = PeerId(envelope.payload.peer_id);
-        let replica_id = self
-            .collaborators
-            .remove(&peer_id)
-            .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
-            .replica_id;
+    ) {
         self.shared_buffers.remove(&peer_id);
         for (_, buffer) in &self.open_buffers {
             if let Some(buffer) = buffer.upgrade(cx) {

crates/project_panel/src/project_panel.rs 🔗

@@ -118,7 +118,7 @@ impl ProjectPanel {
                 worktree_id,
                 entry_id,
             } => {
-                if let Some(worktree) = project.read(cx).worktree_for_id(*worktree_id) {
+                if let Some(worktree) = project.read(cx).worktree_for_id(*worktree_id, cx) {
                     if let Some(entry) = worktree.read(cx).entry_for_id(*entry_id) {
                         workspace
                             .open_entry(
@@ -307,7 +307,7 @@ impl ProjectPanel {
     fn selected_entry<'a>(&self, cx: &'a AppContext) -> Option<(&'a Worktree, &'a project::Entry)> {
         let selection = self.selection?;
         let project = self.project.read(cx);
-        let worktree = project.worktree_for_id(selection.worktree_id)?.read(cx);
+        let worktree = project.worktree_for_id(selection.worktree_id, cx)?.read(cx);
         Some((worktree, worktree.entry_for_id(selection.entry_id)?))
     }
 
@@ -374,7 +374,7 @@ impl ProjectPanel {
     fn expand_entry(&mut self, worktree_id: usize, entry_id: usize, cx: &mut ViewContext<Self>) {
         let project = self.project.read(cx);
         if let Some((worktree, expanded_dir_ids)) = project
-            .worktree_for_id(worktree_id)
+            .worktree_for_id(worktree_id, cx)
             .zip(self.expanded_dir_ids.get_mut(&worktree_id))
         {
             let worktree = worktree.read(cx);
@@ -617,17 +617,18 @@ mod tests {
         )
         .await;
 
-        let project = cx.add_model(|_| {
-            Project::new(
-                params.languages.clone(),
+        let project = cx.update(|cx| {
+            Project::local(
                 params.client.clone(),
                 params.user_store.clone(),
+                params.languages.clone(),
                 params.fs.clone(),
+                cx,
             )
         });
         let root1 = project
             .update(&mut cx, |project, cx| {
-                project.add_local_worktree("/root1".as_ref(), cx)
+                project.add_local_worktree("/root1", cx)
             })
             .await
             .unwrap();
@@ -636,7 +637,7 @@ mod tests {
             .await;
         let root2 = project
             .update(&mut cx, |project, cx| {
-                project.add_local_worktree("/root2".as_ref(), cx)
+                project.add_local_worktree("/root2", cx)
             })
             .await
             .unwrap();

crates/rpc/proto/zed.proto 🔗

@@ -9,37 +9,46 @@ message Envelope {
         Ack ack = 4;
         Error error = 5;
         Ping ping = 6;
-        ShareWorktree share_worktree = 7;
-        ShareWorktreeResponse share_worktree_response = 8;
-        JoinWorktree join_worktree = 9;
-        JoinWorktreeResponse join_worktree_response = 10;
-        UpdateWorktree update_worktree = 11;
-        CloseWorktree close_worktree = 12;
-        OpenBuffer open_buffer = 13;
-        OpenBufferResponse open_buffer_response = 14;
-        CloseBuffer close_buffer = 15;
-        UpdateBuffer update_buffer = 16;
-        SaveBuffer save_buffer = 17;
-        BufferSaved buffer_saved = 18;
-        AddCollaborator add_collaborator = 19;
-        RemoveCollaborator remove_collaborator = 20;
-        GetChannels get_channels = 21;
-        GetChannelsResponse get_channels_response = 22;
-        GetUsers get_users = 23;
-        GetUsersResponse get_users_response = 24;
-        JoinChannel join_channel = 25;
-        JoinChannelResponse join_channel_response = 26;
-        LeaveChannel leave_channel = 27;
-        SendChannelMessage send_channel_message = 28;
-        SendChannelMessageResponse send_channel_message_response = 29;
-        ChannelMessageSent channel_message_sent = 30;
-        GetChannelMessages get_channel_messages = 31;
-        GetChannelMessagesResponse get_channel_messages_response = 32;
-        OpenWorktree open_worktree = 33;
-        OpenWorktreeResponse open_worktree_response = 34;
-        UnshareWorktree unshare_worktree = 35;
-        UpdateContacts update_contacts = 36;
-        LeaveWorktree leave_worktree = 37;
+
+        RegisterProject register_project = 7;
+        RegisterProjectResponse register_project_response = 8;
+        UnregisterProject unregister_project = 9;
+        ShareProject share_project = 10;
+        UnshareProject unshare_project = 11;
+        JoinProject join_project = 12;
+        JoinProjectResponse join_project_response = 13;
+        LeaveProject leave_project = 14;
+        AddProjectCollaborator add_project_collaborator = 15;
+        RemoveProjectCollaborator remove_project_collaborator = 16;
+
+        RegisterWorktree register_worktree = 17;
+        UnregisterWorktree unregister_worktree = 18;
+        ShareWorktree share_worktree = 100;
+        UpdateWorktree update_worktree = 19;
+        UpdateDiagnosticSummary update_diagnostic_summary = 20;
+
+        OpenBuffer open_buffer = 22;
+        OpenBufferResponse open_buffer_response = 23;
+        CloseBuffer close_buffer = 24;
+        UpdateBuffer update_buffer = 25;
+        SaveBuffer save_buffer = 26;
+        BufferSaved buffer_saved = 27;
+
+        GetChannels get_channels = 28;
+        GetChannelsResponse get_channels_response = 29;
+        JoinChannel join_channel = 30;
+        JoinChannelResponse join_channel_response = 31;
+        LeaveChannel leave_channel = 32;
+        SendChannelMessage send_channel_message = 33;
+        SendChannelMessageResponse send_channel_message_response = 34;
+        ChannelMessageSent channel_message_sent = 35;
+        GetChannelMessages get_channel_messages = 36;
+        GetChannelMessagesResponse get_channel_messages_response = 37;
+
+        UpdateContacts update_contacts = 38;
+
+        GetUsers get_users = 39;
+        GetUsersResponse get_users_response = 40;
     }
 }
 
@@ -53,62 +62,77 @@ message Error {
     string message = 1;
 }
 
-message OpenWorktree {
-    string root_name = 1;
-    repeated string authorized_logins = 2;
+message RegisterProject {}
+
+message RegisterProjectResponse {
+    uint64 project_id = 1;
 }
 
-message OpenWorktreeResponse {
-    uint64 worktree_id = 1;
+message UnregisterProject {
+    uint64 project_id = 1;
 }
 
-message ShareWorktree {
-    Worktree worktree = 1;
+message ShareProject {
+    uint64 project_id = 1;
 }
 
-message ShareWorktreeResponse {}
+message UnshareProject {
+    uint64 project_id = 1;
+}
 
-message UnshareWorktree {
-    uint64 worktree_id = 1;
+message JoinProject {
+    uint64 project_id = 1;
 }
 
-message JoinWorktree {
-    uint64 worktree_id = 1;
+message JoinProjectResponse {
+    uint32 replica_id = 2;
+    repeated Worktree worktrees = 3;
+    repeated Collaborator collaborators = 4;
 }
 
-message LeaveWorktree {
-    uint64 worktree_id = 1;
+message LeaveProject {
+    uint64 project_id = 1;
 }
 
-message JoinWorktreeResponse {
-    Worktree worktree = 2;
-    uint32 replica_id = 3;
-    repeated Collaborator collaborators = 4;
+message RegisterWorktree {
+    uint64 project_id = 1;
+    uint64 worktree_id = 2;
+    string root_name = 3;
+    repeated string authorized_logins = 4;
 }
 
-message UpdateWorktree {
-    uint64 worktree_id = 1;
-    repeated Entry updated_entries = 2;
-    repeated uint64 removed_entries = 3;
+message UnregisterWorktree {
+    uint64 project_id = 1;
+    uint64 worktree_id = 2;
+}
+
+message ShareWorktree {
+    uint64 project_id = 1;
+    Worktree worktree = 2;
 }
 
-message CloseWorktree {
-    uint64 worktree_id = 1;
+message UpdateWorktree {
+    uint64 project_id = 1;
+    uint64 worktree_id = 2;
+    string root_name = 3;
+    repeated Entry updated_entries = 4;
+    repeated uint64 removed_entries = 5;
 }
 
-message AddCollaborator {
-    uint64 worktree_id = 1;
+message AddProjectCollaborator {
+    uint64 project_id = 1;
     Collaborator collaborator = 2;
 }
 
-message RemoveCollaborator {
-    uint64 worktree_id = 1;
+message RemoveProjectCollaborator {
+    uint64 project_id = 1;
     uint32 peer_id = 2;
 }
 
 message OpenBuffer {
-    uint64 worktree_id = 1;
-    string path = 2;
+    uint64 project_id = 1;
+    uint64 worktree_id = 2;
+    string path = 3;
 }
 
 message OpenBufferResponse {
@@ -116,26 +140,38 @@ message OpenBufferResponse {
 }
 
 message CloseBuffer {
-    uint64 worktree_id = 1;
-    uint64 buffer_id = 2;
+    uint64 project_id = 1;
+    uint64 worktree_id = 2;
+    uint64 buffer_id = 3;
 }
 
 message UpdateBuffer {
-    uint64 worktree_id = 1;
-    uint64 buffer_id = 2;
-    repeated Operation operations = 3;
+    uint64 project_id = 1;
+    uint64 worktree_id = 2;
+    uint64 buffer_id = 3;
+    repeated Operation operations = 4;
 }
 
 message SaveBuffer {
-    uint64 worktree_id = 1;
-    uint64 buffer_id = 2;
+    uint64 project_id = 1;
+    uint64 worktree_id = 2;
+    uint64 buffer_id = 3;
 }
 
 message BufferSaved {
-    uint64 worktree_id = 1;
-    uint64 buffer_id = 2;
-    repeated VectorClockEntry version = 3;
-    Timestamp mtime = 4;
+    uint64 project_id = 1;
+    uint64 worktree_id = 2;
+    uint64 buffer_id = 3;
+    repeated VectorClockEntry version = 4;
+    Timestamp mtime = 5;
+}
+
+message UpdateDiagnosticSummary {
+    uint64 project_id = 1;
+    uint64 worktree_id = 2;
+    string path = 3;
+    uint32 error_count = 4;
+    uint32 warning_count = 5;
 }
 
 message GetChannels {}
@@ -229,36 +265,55 @@ message Buffer {
     string content = 2;
     repeated Operation.Edit history = 3;
     repeated SelectionSet selections = 4;
-    DiagnosticSet diagnostics = 5;
+    repeated DiagnosticSet diagnostic_sets = 5;
 }
 
 message SelectionSet {
     uint32 replica_id = 1;
-    uint32 lamport_timestamp = 2;
-    bool is_active = 3;
-    repeated VectorClockEntry version = 4;
-    repeated Selection selections = 5;
+    repeated Selection selections = 2;
 }
 
 message Selection {
     uint64 id = 1;
-    uint64 start = 2;
-    uint64 end = 3;
+    Anchor start = 2;
+    Anchor end = 3;
     bool reversed = 4;
 }
 
+message Anchor {
+    uint32 replica_id = 1;
+    uint32 local_timestamp = 2;
+    uint64 offset = 3;
+    Bias bias = 4;
+}
+
+enum Bias {
+    Left = 0;
+    Right = 1;
+}
+
+message UpdateDiagnosticSet {
+    uint32 replica_id = 1;
+    uint32 lamport_timestamp = 2;
+    DiagnosticSet diagnostic_set = 3;
+}
+
 message DiagnosticSet {
-    repeated VectorClockEntry version = 1;
+    string provider_name = 1;
     repeated Diagnostic diagnostics = 2;
 }
 
 message Diagnostic {
-    uint64 start = 1;
-    uint64 end = 2;
+    Anchor start = 1;
+    Anchor end = 2;
     Severity severity = 3;
     string message = 4;
-    uint64 group_id = 5;
-    bool is_primary = 6;
+    optional string code = 5;
+    uint64 group_id = 6;
+    bool is_primary = 7;
+    bool is_valid = 8;
+    bool is_disk_based = 9;
+
     enum Severity {
         None = 0;
         Error = 1;
@@ -268,16 +323,13 @@ message Diagnostic {
     }
 }
 
-
-
 message Operation {
     oneof variant {
         Edit edit = 1;
         Undo undo = 2;
         UpdateSelections update_selections = 3;
         RemoveSelections remove_selections = 4;
-        SetActiveSelections set_active_selections = 5;
-        DiagnosticSet update_diagnostics = 6;
+        UpdateDiagnosticSet update_diagnostic_set = 5;
     }
 
     message Edit {
@@ -306,21 +358,12 @@ message Operation {
 
     message UpdateSelections {
         uint32 replica_id = 1;
-        uint32 local_timestamp = 2;
         uint32 lamport_timestamp = 3;
-        repeated VectorClockEntry version = 4;
-        repeated Selection selections = 5;
+        repeated Selection selections = 4;
     }
 
     message RemoveSelections {
         uint32 replica_id = 1;
-        uint32 local_timestamp = 2;
-        uint32 lamport_timestamp = 3;
-    }
-
-    message SetActiveSelections {
-        uint32 replica_id = 1;
-        optional uint32 local_timestamp = 2;
         uint32 lamport_timestamp = 3;
     }
 }
@@ -360,12 +403,12 @@ message ChannelMessage {
 
 message Contact {
     uint64 user_id = 1;
-    repeated WorktreeMetadata worktrees = 2;
+    repeated ProjectMetadata projects = 2;
 }
 
-message WorktreeMetadata {
+message ProjectMetadata {
     uint64 id = 1;
-    string root_name = 2;
-    bool is_shared = 3;
+    bool is_shared = 2;
+    repeated string worktree_root_names = 3;
     repeated uint64 guests = 4;
 }

crates/rpc/src/peer.rs 🔗

@@ -388,6 +388,7 @@ mod tests {
                     .request(
                         client1_conn_id,
                         proto::OpenBuffer {
+                            project_id: 0,
                             worktree_id: 1,
                             path: "path/one".to_string(),
                         },
@@ -400,7 +401,7 @@ mod tests {
                         content: "path/one content".to_string(),
                         history: vec![],
                         selections: vec![],
-                        diagnostics: None,
+                        diagnostic_sets: vec![],
                     }),
                 }
             );
@@ -410,6 +411,7 @@ mod tests {
                     .request(
                         client2_conn_id,
                         proto::OpenBuffer {
+                            project_id: 0,
                             worktree_id: 2,
                             path: "path/two".to_string(),
                         },
@@ -422,7 +424,7 @@ mod tests {
                         content: "path/two content".to_string(),
                         history: vec![],
                         selections: vec![],
-                        diagnostics: None,
+                        diagnostic_sets: vec![],
                     }),
                 }
             );
@@ -453,7 +455,7 @@ mod tests {
                                         content: "path/one content".to_string(),
                                         history: vec![],
                                         selections: vec![],
-                                        diagnostics: None,
+                                        diagnostic_sets: vec![],
                                     }),
                                 }
                             }
@@ -465,7 +467,7 @@ mod tests {
                                         content: "path/two content".to_string(),
                                         history: vec![],
                                         selections: vec![],
-                                        diagnostics: None,
+                                        diagnostic_sets: vec![],
                                     }),
                                 }
                             }

crates/rpc/src/proto.rs 🔗

@@ -121,68 +121,73 @@ macro_rules! entity_messages {
 
 messages!(
     Ack,
-    AddCollaborator,
+    AddProjectCollaborator,
     BufferSaved,
     ChannelMessageSent,
     CloseBuffer,
-    CloseWorktree,
     Error,
     GetChannelMessages,
     GetChannelMessagesResponse,
     GetChannels,
     GetChannelsResponse,
-    UpdateContacts,
     GetUsers,
     GetUsersResponse,
     JoinChannel,
     JoinChannelResponse,
-    JoinWorktree,
-    JoinWorktreeResponse,
+    JoinProject,
+    JoinProjectResponse,
     LeaveChannel,
-    LeaveWorktree,
+    LeaveProject,
     OpenBuffer,
     OpenBufferResponse,
-    OpenWorktree,
-    OpenWorktreeResponse,
+    RegisterProjectResponse,
     Ping,
-    RemoveCollaborator,
+    RegisterProject,
+    RegisterWorktree,
+    RemoveProjectCollaborator,
     SaveBuffer,
     SendChannelMessage,
     SendChannelMessageResponse,
+    ShareProject,
     ShareWorktree,
-    ShareWorktreeResponse,
-    UnshareWorktree,
+    UnregisterProject,
+    UnregisterWorktree,
+    UnshareProject,
     UpdateBuffer,
+    UpdateContacts,
     UpdateWorktree,
 );
 
 request_messages!(
+    (GetChannelMessages, GetChannelMessagesResponse),
     (GetChannels, GetChannelsResponse),
     (GetUsers, GetUsersResponse),
     (JoinChannel, JoinChannelResponse),
+    (JoinProject, JoinProjectResponse),
     (OpenBuffer, OpenBufferResponse),
-    (JoinWorktree, JoinWorktreeResponse),
-    (OpenWorktree, OpenWorktreeResponse),
     (Ping, Ack),
+    (RegisterProject, RegisterProjectResponse),
+    (RegisterWorktree, Ack),
     (SaveBuffer, BufferSaved),
-    (UpdateBuffer, Ack),
-    (ShareWorktree, ShareWorktreeResponse),
-    (UnshareWorktree, Ack),
     (SendChannelMessage, SendChannelMessageResponse),
-    (GetChannelMessages, GetChannelMessagesResponse),
+    (ShareProject, Ack),
+    (ShareWorktree, Ack),
+    (UpdateBuffer, Ack),
 );
 
 entity_messages!(
-    worktree_id,
-    AddCollaborator,
+    project_id,
+    AddProjectCollaborator,
+    RemoveProjectCollaborator,
+    JoinProject,
+    LeaveProject,
     BufferSaved,
-    CloseBuffer,
-    CloseWorktree,
     OpenBuffer,
-    JoinWorktree,
-    RemoveCollaborator,
+    CloseBuffer,
     SaveBuffer,
-    UnshareWorktree,
+    ShareWorktree,
+    UnregisterWorktree,
+    UnshareProject,
     UpdateBuffer,
     UpdateWorktree,
 );

crates/rpc/src/rpc.rs 🔗

@@ -5,4 +5,4 @@ pub mod proto;
 pub use conn::Connection;
 pub use peer::*;
 
-pub const PROTOCOL_VERSION: u32 = 3;
+pub const PROTOCOL_VERSION: u32 = 4;

crates/server/src/db.rs 🔗

@@ -443,7 +443,9 @@ impl Db {
 
 macro_rules! id_type {
     ($name:ident) => {
-        #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, sqlx::Type, Serialize)]
+        #[derive(
+            Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, sqlx::Type, Serialize,
+        )]
         #[sqlx(transparent)]
         #[serde(transparent)]
         pub struct $name(pub i32);

crates/server/src/releases.rs 🔗

@@ -2,16 +2,15 @@ use crate::{
     auth::RequestExt as _, github::Release, AppState, LayoutData, Request, RequestExt as _,
 };
 use comrak::ComrakOptions;
-use serde::{Serialize};
+use serde::Serialize;
 use std::sync::Arc;
-use tide::{http::mime};
+use tide::http::mime;
 
 pub fn add_routes(releases: &mut tide::Server<Arc<AppState>>) {
     releases.at("/releases").get(get_releases);
 }
 
 async fn get_releases(mut request: Request) -> tide::Result {
-
     #[derive(Serialize)]
     struct ReleasesData {
         #[serde(flatten)]
@@ -52,4 +51,4 @@ async fn get_releases(mut request: Request) -> tide::Result {
         .body(request.state().render_template("releases.hbs", &data)?)
         .content_type(mime::HTML)
         .build())
-}
+}

crates/server/src/rpc.rs 🔗

@@ -43,6 +43,7 @@ pub struct Server {
 
 const MESSAGE_COUNT_PER_PAGE: usize = 100;
 const MAX_MESSAGE_LEN: usize = 1024;
+const NO_SUCH_PROJECT: &'static str = "no such project";
 
 impl Server {
     pub fn new(
@@ -60,12 +61,15 @@ impl Server {
 
         server
             .add_handler(Server::ping)
-            .add_handler(Server::open_worktree)
-            .add_handler(Server::close_worktree)
+            .add_handler(Server::register_project)
+            .add_handler(Server::unregister_project)
+            .add_handler(Server::share_project)
+            .add_handler(Server::unshare_project)
+            .add_handler(Server::join_project)
+            .add_handler(Server::leave_project)
+            .add_handler(Server::register_worktree)
+            .add_handler(Server::unregister_worktree)
             .add_handler(Server::share_worktree)
-            .add_handler(Server::unshare_worktree)
-            .add_handler(Server::join_worktree)
-            .add_handler(Server::leave_worktree)
             .add_handler(Server::update_worktree)
             .add_handler(Server::open_buffer)
             .add_handler(Server::close_buffer)
@@ -169,26 +173,26 @@ impl Server {
         self.peer.disconnect(connection_id).await;
         let removed_connection = self.state_mut().remove_connection(connection_id)?;
 
-        for (worktree_id, worktree) in removed_connection.hosted_worktrees {
-            if let Some(share) = worktree.share {
+        for (project_id, project) in removed_connection.hosted_projects {
+            if let Some(share) = project.share {
                 broadcast(
                     connection_id,
                     share.guests.keys().copied().collect(),
                     |conn_id| {
                         self.peer
-                            .send(conn_id, proto::UnshareWorktree { worktree_id })
+                            .send(conn_id, proto::UnshareProject { project_id })
                     },
                 )
                 .await?;
             }
         }
 
-        for (worktree_id, peer_ids) in removed_connection.guest_worktree_ids {
+        for (project_id, peer_ids) in removed_connection.guest_project_ids {
             broadcast(connection_id, peer_ids, |conn_id| {
                 self.peer.send(
                     conn_id,
-                    proto::RemoveCollaborator {
-                        worktree_id,
+                    proto::RemoveProjectCollaborator {
+                        project_id,
                         peer_id: connection_id.0,
                     },
                 )
@@ -207,146 +211,98 @@ impl Server {
         Ok(())
     }
 
-    async fn open_worktree(
+    async fn register_project(
         mut self: Arc<Server>,
-        request: TypedEnvelope<proto::OpenWorktree>,
+        request: TypedEnvelope<proto::RegisterProject>,
     ) -> tide::Result<()> {
-        let receipt = request.receipt();
-        let host_user_id = self.state().user_id_for_connection(request.sender_id)?;
-
-        let mut contact_user_ids = HashSet::default();
-        contact_user_ids.insert(host_user_id);
-        for github_login in request.payload.authorized_logins {
-            match self.app_state.db.create_user(&github_login, false).await {
-                Ok(contact_user_id) => {
-                    contact_user_ids.insert(contact_user_id);
-                }
-                Err(err) => {
-                    let message = err.to_string();
-                    self.peer
-                        .respond_with_error(receipt, proto::Error { message })
-                        .await?;
-                    return Ok(());
-                }
-            }
-        }
-
-        let contact_user_ids = contact_user_ids.into_iter().collect::<Vec<_>>();
-        let worktree_id = self.state_mut().add_worktree(Worktree {
-            host_connection_id: request.sender_id,
-            host_user_id,
-            authorized_user_ids: contact_user_ids.clone(),
-            root_name: request.payload.root_name,
-            share: None,
-        });
-
+        let project_id = {
+            let mut state = self.state_mut();
+            let user_id = state.user_id_for_connection(request.sender_id)?;
+            state.register_project(request.sender_id, user_id)
+        };
         self.peer
-            .respond(receipt, proto::OpenWorktreeResponse { worktree_id })
+            .respond(
+                request.receipt(),
+                proto::RegisterProjectResponse { project_id },
+            )
             .await?;
-        self.update_contacts_for_users(&contact_user_ids).await?;
-
         Ok(())
     }
 
-    async fn close_worktree(
+    async fn unregister_project(
         mut self: Arc<Server>,
-        request: TypedEnvelope<proto::CloseWorktree>,
+        request: TypedEnvelope<proto::UnregisterProject>,
     ) -> tide::Result<()> {
-        let worktree_id = request.payload.worktree_id;
-        let worktree = self
+        let project = self
             .state_mut()
-            .remove_worktree(worktree_id, request.sender_id)?;
-
-        if let Some(share) = worktree.share {
-            broadcast(
-                request.sender_id,
-                share.guests.keys().copied().collect(),
-                |conn_id| {
-                    self.peer
-                        .send(conn_id, proto::UnshareWorktree { worktree_id })
-                },
-            )
-            .await?;
-        }
-        self.update_contacts_for_users(&worktree.authorized_user_ids)
+            .unregister_project(request.payload.project_id, request.sender_id)
+            .ok_or_else(|| anyhow!("no such project"))?;
+        self.update_contacts_for_users(project.authorized_user_ids().iter())
             .await?;
         Ok(())
     }
 
-    async fn share_worktree(
+    async fn share_project(
         mut self: Arc<Server>,
-        mut request: TypedEnvelope<proto::ShareWorktree>,
+        request: TypedEnvelope<proto::ShareProject>,
     ) -> tide::Result<()> {
-        let worktree = request
-            .payload
-            .worktree
-            .as_mut()
-            .ok_or_else(|| anyhow!("missing worktree"))?;
-        let entries = mem::take(&mut worktree.entries)
-            .into_iter()
-            .map(|entry| (entry.id, entry))
-            .collect();
-
-        let contact_user_ids =
-            self.state_mut()
-                .share_worktree(worktree.id, request.sender_id, entries);
-        if let Some(contact_user_ids) = contact_user_ids {
-            self.peer
-                .respond(request.receipt(), proto::ShareWorktreeResponse {})
-                .await?;
-            self.update_contacts_for_users(&contact_user_ids).await?;
-        } else {
-            self.peer
-                .respond_with_error(
-                    request.receipt(),
-                    proto::Error {
-                        message: "no such worktree".to_string(),
-                    },
-                )
-                .await?;
-        }
+        self.state_mut()
+            .share_project(request.payload.project_id, request.sender_id);
+        self.peer.respond(request.receipt(), proto::Ack {}).await?;
         Ok(())
     }
 
-    async fn unshare_worktree(
+    async fn unshare_project(
         mut self: Arc<Server>,
-        request: TypedEnvelope<proto::UnshareWorktree>,
+        request: TypedEnvelope<proto::UnshareProject>,
     ) -> tide::Result<()> {
-        let worktree_id = request.payload.worktree_id;
-        let worktree = self
+        let project_id = request.payload.project_id;
+        let project = self
             .state_mut()
-            .unshare_worktree(worktree_id, request.sender_id)?;
+            .unshare_project(project_id, request.sender_id)?;
 
-        broadcast(request.sender_id, worktree.connection_ids, |conn_id| {
+        broadcast(request.sender_id, project.connection_ids, |conn_id| {
             self.peer
-                .send(conn_id, proto::UnshareWorktree { worktree_id })
+                .send(conn_id, proto::UnshareProject { project_id })
         })
         .await?;
-        self.update_contacts_for_users(&worktree.authorized_user_ids)
+        self.update_contacts_for_users(&project.authorized_user_ids)
             .await?;
 
         Ok(())
     }
 
-    async fn join_worktree(
+    async fn join_project(
         mut self: Arc<Server>,
-        request: TypedEnvelope<proto::JoinWorktree>,
+        request: TypedEnvelope<proto::JoinProject>,
     ) -> tide::Result<()> {
-        let worktree_id = request.payload.worktree_id;
+        let project_id = request.payload.project_id;
 
         let user_id = self.state().user_id_for_connection(request.sender_id)?;
         let response_data = self
             .state_mut()
-            .join_worktree(request.sender_id, user_id, worktree_id)
+            .join_project(request.sender_id, user_id, project_id)
             .and_then(|joined| {
-                let share = joined.worktree.share()?;
+                let share = joined.project.share()?;
                 let peer_count = share.guests.len();
                 let mut collaborators = Vec::with_capacity(peer_count);
                 collaborators.push(proto::Collaborator {
-                    peer_id: joined.worktree.host_connection_id.0,
+                    peer_id: joined.project.host_connection_id.0,
                     replica_id: 0,
-                    user_id: joined.worktree.host_user_id.to_proto(),
+                    user_id: joined.project.host_user_id.to_proto(),
                 });
+                let worktrees = joined
+                    .project
+                    .worktrees
+                    .iter()
+                    .filter_map(|(id, worktree)| {
+                        worktree.share.as_ref().map(|share| proto::Worktree {
+                            id: *id,
+                            root_name: worktree.root_name.clone(),
+                            entries: share.entries.values().cloned().collect(),
+                        })
+                    })
+                    .collect();
                 for (peer_conn_id, (peer_replica_id, peer_user_id)) in &share.guests {
                     if *peer_conn_id != request.sender_id {
                         collaborators.push(proto::Collaborator {
@@ -356,17 +312,13 @@ impl Server {
                         });
                     }
                 }
-                let response = proto::JoinWorktreeResponse {
-                    worktree: Some(proto::Worktree {
-                        id: worktree_id,
-                        root_name: joined.worktree.root_name.clone(),
-                        entries: share.entries.values().cloned().collect(),
-                    }),
+                let response = proto::JoinProjectResponse {
+                    worktrees,
                     replica_id: joined.replica_id as u32,
                     collaborators,
                 };
-                let connection_ids = joined.worktree.connection_ids();
-                let contact_user_ids = joined.worktree.authorized_user_ids.clone();
+                let connection_ids = joined.project.connection_ids();
+                let contact_user_ids = joined.project.authorized_user_ids();
                 Ok((response, connection_ids, contact_user_ids))
             });
 
@@ -375,8 +327,8 @@ impl Server {
                 broadcast(request.sender_id, connection_ids, |conn_id| {
                     self.peer.send(
                         conn_id,
-                        proto::AddCollaborator {
-                            worktree_id,
+                        proto::AddProjectCollaborator {
+                            project_id: project_id,
                             collaborator: Some(proto::Collaborator {
                                 peer_id: request.sender_id.0,
                                 replica_id: response.replica_id,
@@ -404,19 +356,19 @@ impl Server {
         Ok(())
     }
 
-    async fn leave_worktree(
+    async fn leave_project(
         mut self: Arc<Server>,
-        request: TypedEnvelope<proto::LeaveWorktree>,
+        request: TypedEnvelope<proto::LeaveProject>,
     ) -> tide::Result<()> {
         let sender_id = request.sender_id;
-        let worktree_id = request.payload.worktree_id;
-        let worktree = self.state_mut().leave_worktree(sender_id, worktree_id);
+        let project_id = request.payload.project_id;
+        let worktree = self.state_mut().leave_project(sender_id, project_id);
         if let Some(worktree) = worktree {
             broadcast(sender_id, worktree.connection_ids, |conn_id| {
                 self.peer.send(
                     conn_id,
-                    proto::RemoveCollaborator {
-                        worktree_id,
+                    proto::RemoveProjectCollaborator {
+                        project_id,
                         peer_id: sender_id.0,
                     },
                 )
@@ -428,16 +380,133 @@ impl Server {
         Ok(())
     }
 
+    async fn register_worktree(
+        mut self: Arc<Server>,
+        request: TypedEnvelope<proto::RegisterWorktree>,
+    ) -> tide::Result<()> {
+        let receipt = request.receipt();
+        let host_user_id = self.state().user_id_for_connection(request.sender_id)?;
+
+        let mut contact_user_ids = HashSet::default();
+        contact_user_ids.insert(host_user_id);
+        for github_login in request.payload.authorized_logins {
+            match self.app_state.db.create_user(&github_login, false).await {
+                Ok(contact_user_id) => {
+                    contact_user_ids.insert(contact_user_id);
+                }
+                Err(err) => {
+                    let message = err.to_string();
+                    self.peer
+                        .respond_with_error(receipt, proto::Error { message })
+                        .await?;
+                    return Ok(());
+                }
+            }
+        }
+
+        let contact_user_ids = contact_user_ids.into_iter().collect::<Vec<_>>();
+        let ok = self.state_mut().register_worktree(
+            request.payload.project_id,
+            request.payload.worktree_id,
+            Worktree {
+                authorized_user_ids: contact_user_ids.clone(),
+                root_name: request.payload.root_name,
+                share: None,
+            },
+        );
+
+        if ok {
+            self.peer.respond(receipt, proto::Ack {}).await?;
+            self.update_contacts_for_users(&contact_user_ids).await?;
+        } else {
+            self.peer
+                .respond_with_error(
+                    receipt,
+                    proto::Error {
+                        message: NO_SUCH_PROJECT.to_string(),
+                    },
+                )
+                .await?;
+        }
+
+        Ok(())
+    }
+
+    async fn unregister_worktree(
+        mut self: Arc<Server>,
+        request: TypedEnvelope<proto::UnregisterWorktree>,
+    ) -> tide::Result<()> {
+        let project_id = request.payload.project_id;
+        let worktree_id = request.payload.worktree_id;
+        let (worktree, guest_connection_ids) =
+            self.state_mut()
+                .unregister_worktree(project_id, worktree_id, request.sender_id)?;
+
+        broadcast(request.sender_id, guest_connection_ids, |conn_id| {
+            self.peer.send(
+                conn_id,
+                proto::UnregisterWorktree {
+                    project_id,
+                    worktree_id,
+                },
+            )
+        })
+        .await?;
+        self.update_contacts_for_users(&worktree.authorized_user_ids)
+            .await?;
+        Ok(())
+    }
+
+    async fn share_worktree(
+        mut self: Arc<Server>,
+        mut request: TypedEnvelope<proto::ShareWorktree>,
+    ) -> tide::Result<()> {
+        let worktree = request
+            .payload
+            .worktree
+            .as_mut()
+            .ok_or_else(|| anyhow!("missing worktree"))?;
+        let entries = mem::take(&mut worktree.entries)
+            .into_iter()
+            .map(|entry| (entry.id, entry))
+            .collect();
+
+        let contact_user_ids = self.state_mut().share_worktree(
+            request.payload.project_id,
+            worktree.id,
+            request.sender_id,
+            entries,
+        );
+        if let Some(contact_user_ids) = contact_user_ids {
+            self.peer.respond(request.receipt(), proto::Ack {}).await?;
+            self.update_contacts_for_users(&contact_user_ids).await?;
+        } else {
+            self.peer
+                .respond_with_error(
+                    request.receipt(),
+                    proto::Error {
+                        message: "no such worktree".to_string(),
+                    },
+                )
+                .await?;
+        }
+        Ok(())
+    }
+
     async fn update_worktree(
         mut self: Arc<Server>,
         request: TypedEnvelope<proto::UpdateWorktree>,
     ) -> tide::Result<()> {
-        let connection_ids = self.state_mut().update_worktree(
-            request.sender_id,
-            request.payload.worktree_id,
-            &request.payload.removed_entries,
-            &request.payload.updated_entries,
-        )?;
+        let connection_ids = self
+            .state_mut()
+            .update_worktree(
+                request.sender_id,
+                request.payload.project_id,
+                request.payload.worktree_id,
+                &request.payload.removed_entries,
+                &request.payload.updated_entries,
+            )
+            .ok_or_else(|| anyhow!("no such worktree"))?;
 
         broadcast(request.sender_id, connection_ids, |connection_id| {
             self.peer
@@ -455,7 +524,9 @@ impl Server {
         let receipt = request.receipt();
         let host_connection_id = self
             .state()
-            .worktree_host_connection_id(request.sender_id, request.payload.worktree_id)?;
+            .read_project(request.payload.project_id, request.sender_id)
+            .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?
+            .host_connection_id;
         let response = self
             .peer
             .forward_request(request.sender_id, host_connection_id, request.payload)
@@ -470,7 +541,9 @@ impl Server {
     ) -> tide::Result<()> {
         let host_connection_id = self
             .state()
-            .worktree_host_connection_id(request.sender_id, request.payload.worktree_id)?;
+            .read_project(request.payload.project_id, request.sender_id)
+            .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?
+            .host_connection_id;
         self.peer
             .forward_send(request.sender_id, host_connection_id, request.payload)
             .await?;
@@ -485,10 +558,11 @@ impl Server {
         let guests;
         {
             let state = self.state();
-            host = state
-                .worktree_host_connection_id(request.sender_id, request.payload.worktree_id)?;
-            guests = state
-                .worktree_guest_connection_ids(request.sender_id, request.payload.worktree_id)?;
+            let project = state
+                .read_project(request.payload.project_id, request.sender_id)
+                .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
+            host = project.host_connection_id;
+            guests = project.guest_connection_ids()
         }
 
         let sender = request.sender_id;
@@ -520,7 +594,8 @@ impl Server {
     ) -> tide::Result<()> {
         let receiver_ids = self
             .state()
-            .worktree_connection_ids(request.sender_id, request.payload.worktree_id)?;
+            .project_connection_ids(request.payload.project_id, request.sender_id)
+            .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
         broadcast(request.sender_id, receiver_ids, |connection_id| {
             self.peer
                 .forward_send(request.sender_id, connection_id, request.payload.clone())
@@ -536,7 +611,8 @@ impl Server {
     ) -> tide::Result<()> {
         let receiver_ids = self
             .state()
-            .worktree_connection_ids(request.sender_id, request.payload.worktree_id)?;
+            .project_connection_ids(request.payload.project_id, request.sender_id)
+            .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
         broadcast(request.sender_id, receiver_ids, |connection_id| {
             self.peer
                 .forward_send(request.sender_id, connection_id, request.payload.clone())
@@ -943,33 +1019,29 @@ mod tests {
             self, test::FakeHttpClient, Channel, ChannelDetails, ChannelList, Client, Credentials,
             EstablishConnectionError, UserStore,
         },
-        contacts_panel::JoinWorktree,
-        editor::{Editor, EditorSettings, Input},
+        editor::{Editor, EditorSettings, Input, MultiBuffer},
         fs::{FakeFs, Fs as _},
         language::{
-            tree_sitter_rust, Diagnostic, Language, LanguageConfig, LanguageRegistry,
-            LanguageServerConfig, Point,
+            tree_sitter_rust, Diagnostic, DiagnosticEntry, Language, LanguageConfig,
+            LanguageRegistry, LanguageServerConfig, Point,
         },
         lsp,
-        project::{ProjectPath, Worktree},
-        test::test_app_state,
-        workspace::Workspace,
+        project::Project,
     };
 
     #[gpui::test]
-    async fn test_share_worktree(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
+    async fn test_share_project(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
         let (window_b, _) = cx_b.add_window(|_| EmptyView);
         let lang_registry = Arc::new(LanguageRegistry::new());
+        let fs = Arc::new(FakeFs::new());
+        cx_a.foreground().forbid_parking();
 
         // Connect to a server as 2 clients.
         let mut server = TestServer::start().await;
         let client_a = server.create_client(&mut cx_a, "user_a").await;
         let client_b = server.create_client(&mut cx_b, "user_b").await;
 
-        cx_a.foreground().forbid_parking();
-
-        // Share a local worktree as client A
-        let fs = Arc::new(FakeFs::new());
+        // Share a project as client A
         fs.insert_tree(
             "/a",
             json!({
@@ -979,47 +1051,56 @@ mod tests {
             }),
         )
         .await;
-        let worktree_a = Worktree::open_local(
-            client_a.clone(),
-            client_a.user_store.clone(),
-            "/a".as_ref(),
-            fs,
-            lang_registry.clone(),
-            &mut cx_a.to_async(),
-        )
-        .await
-        .unwrap();
+        let project_a = cx_a.update(|cx| {
+            Project::local(
+                client_a.clone(),
+                client_a.user_store.clone(),
+                lang_registry.clone(),
+                fs.clone(),
+                cx,
+            )
+        });
+        let worktree_a = project_a
+            .update(&mut cx_a, |p, cx| p.add_local_worktree("/a", cx))
+            .await
+            .unwrap();
         worktree_a
             .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
             .await;
-        let worktree_id = worktree_a
-            .update(&mut cx_a, |tree, cx| tree.as_local_mut().unwrap().share(cx))
+        let project_id = project_a
+            .update(&mut cx_a, |project, _| project.next_remote_id())
+            .await;
+        project_a
+            .update(&mut cx_a, |project, cx| project.share(cx))
             .await
             .unwrap();
 
-        // Join that worktree as client B, and see that a guest has joined as client A.
-        let worktree_b = Worktree::open_remote(
+        // Join that project as client B
+        let project_b = Project::remote(
+            project_id,
             client_b.clone(),
-            worktree_id,
-            lang_registry.clone(),
             client_b.user_store.clone(),
+            lang_registry.clone(),
+            fs.clone(),
             &mut cx_b.to_async(),
         )
         .await
         .unwrap();
+        let worktree_b = project_b.update(&mut cx_b, |p, _| p.worktrees()[0].clone());
 
-        let replica_id_b = worktree_b.read_with(&cx_b, |tree, _| {
+        let replica_id_b = project_b.read_with(&cx_b, |project, _| {
             assert_eq!(
-                tree.collaborators()
+                project
+                    .collaborators()
                     .get(&client_a.peer_id)
                     .unwrap()
                     .user
                     .github_login,
                 "user_a"
             );
-            tree.replica_id()
+            project.replica_id()
         });
-        worktree_a
+        project_a
             .condition(&cx_a, |tree, _| {
                 tree.collaborators()
                     .get(&client_b.peer_id)
@@ -1035,20 +1116,24 @@ mod tests {
             .update(&mut cx_b, |worktree, cx| worktree.open_buffer("b.txt", cx))
             .await
             .unwrap();
-        buffer_b.read_with(&cx_b, |buf, _| assert_eq!(buf.text(), "b-contents"));
+        let buffer_b = cx_b.add_model(|cx| MultiBuffer::singleton(buffer_b, cx));
+        buffer_b.read_with(&cx_b, |buf, cx| {
+            assert_eq!(buf.read(cx).text(), "b-contents")
+        });
         worktree_a.read_with(&cx_a, |tree, cx| assert!(tree.has_open_buffer("b.txt", cx)));
         let buffer_a = worktree_a
             .update(&mut cx_a, |tree, cx| tree.open_buffer("b.txt", cx))
             .await
             .unwrap();
 
-        // Create a selection set as client B and see that selection set as client A.
         let editor_b = cx_b.add_view(window_b, |cx| {
-            Editor::for_buffer(buffer_b, |cx| EditorSettings::test(cx), cx)
+            Editor::for_buffer(buffer_b, Arc::new(|cx| EditorSettings::test(cx)), cx)
         });
-        buffer_a
-            .condition(&cx_a, |buffer, _| buffer.selection_sets().count() == 1)
-            .await;
+        // TODO
+        // // Create a selection set as client B and see that selection set as client A.
+        // buffer_a
+        //     .condition(&cx_a, |buffer, _| buffer.selection_sets().count() == 1)
+        //     .await;
 
         // Edit the buffer as client B and see that edit as client A.
         editor_b.update(&mut cx_b, |editor, cx| {
@@ -1058,11 +1143,12 @@ mod tests {
             .condition(&cx_a, |buffer, _| buffer.text() == "ok, b-contents")
             .await;
 
-        // Remove the selection set as client B, see those selections disappear as client A.
+        // TODO
+        // // Remove the selection set as client B, see those selections disappear as client A.
         cx_b.update(move |_| drop(editor_b));
-        buffer_a
-            .condition(&cx_a, |buffer, _| buffer.selection_sets().count() == 0)
-            .await;
+        // buffer_a
+        //     .condition(&cx_a, |buffer, _| buffer.selection_sets().count() == 0)
+        //     .await;
 
         // Close the buffer as client A, see that the buffer is closed.
         cx_a.update(move |_| drop(buffer_a));
@@ -1070,32 +1156,25 @@ mod tests {
             .condition(&cx_a, |tree, cx| !tree.has_open_buffer("b.txt", cx))
             .await;
 
-        // Dropping the worktree removes client B from client A's collaborators.
-        cx_b.update(move |_| drop(worktree_b));
-        worktree_a
-            .condition(&cx_a, |tree, _| tree.collaborators().is_empty())
+        // Dropping the client B's project removes client B from client A's collaborators.
+        cx_b.update(move |_| drop(project_b));
+        project_a
+            .condition(&cx_a, |project, _| project.collaborators().is_empty())
             .await;
     }
 
     #[gpui::test]
-    async fn test_unshare_worktree(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
-        cx_b.update(zed::contacts_panel::init);
-        let mut app_state_a = cx_a.update(test_app_state);
-        let mut app_state_b = cx_b.update(test_app_state);
+    async fn test_unshare_project(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
+        let lang_registry = Arc::new(LanguageRegistry::new());
+        let fs = Arc::new(FakeFs::new());
+        cx_a.foreground().forbid_parking();
 
         // Connect to a server as 2 clients.
         let mut server = TestServer::start().await;
         let client_a = server.create_client(&mut cx_a, "user_a").await;
         let client_b = server.create_client(&mut cx_b, "user_b").await;
-        Arc::get_mut(&mut app_state_a).unwrap().client = client_a.clone();
-        Arc::get_mut(&mut app_state_a).unwrap().user_store = client_a.user_store.clone();
-        Arc::get_mut(&mut app_state_b).unwrap().client = client_b.clone();
-        Arc::get_mut(&mut app_state_b).unwrap().user_store = client_b.user_store.clone();
-
-        cx_a.foreground().forbid_parking();
 
-        // Share a local worktree as client A
-        let fs = Arc::new(FakeFs::new());
+        // Share a project as client A
         fs.insert_tree(
             "/a",
             json!({
@@ -1105,81 +1184,66 @@ mod tests {
             }),
         )
         .await;
-        let worktree_a = Worktree::open_local(
-            app_state_a.client.clone(),
-            app_state_a.user_store.clone(),
-            "/a".as_ref(),
-            fs,
-            app_state_a.languages.clone(),
-            &mut cx_a.to_async(),
-        )
-        .await
-        .unwrap();
+        let project_a = cx_a.update(|cx| {
+            Project::local(
+                client_a.clone(),
+                client_a.user_store.clone(),
+                lang_registry.clone(),
+                fs.clone(),
+                cx,
+            )
+        });
+        let worktree_a = project_a
+            .update(&mut cx_a, |p, cx| p.add_local_worktree("/a", cx))
+            .await
+            .unwrap();
         worktree_a
             .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
             .await;
-
-        let remote_worktree_id = worktree_a
-            .update(&mut cx_a, |tree, cx| tree.as_local_mut().unwrap().share(cx))
+        let project_id = project_a
+            .update(&mut cx_a, |project, _| project.next_remote_id())
+            .await;
+        project_a
+            .update(&mut cx_a, |project, cx| project.share(cx))
             .await
             .unwrap();
 
-        let (window_b, workspace_b) =
-            cx_b.add_window(|cx| Workspace::new(&app_state_b.as_ref().into(), cx));
-        cx_b.update(|cx| {
-            cx.dispatch_action(
-                window_b,
-                vec![workspace_b.id()],
-                &JoinWorktree(remote_worktree_id),
-            );
-        });
-        workspace_b
-            .condition(&cx_b, |workspace, cx| workspace.worktrees(cx).len() == 1)
-            .await;
+        // Join that project as client B
+        let project_b = Project::remote(
+            project_id,
+            client_b.clone(),
+            client_b.user_store.clone(),
+            lang_registry.clone(),
+            fs.clone(),
+            &mut cx_b.to_async(),
+        )
+        .await
+        .unwrap();
 
-        let local_worktree_id_b = workspace_b.read_with(&cx_b, |workspace, cx| {
-            let active_pane = workspace.active_pane().read(cx);
-            assert!(active_pane.active_item().is_none());
-            workspace.worktrees(cx).first().unwrap().id()
-        });
-        workspace_b
-            .update(&mut cx_b, |workspace, cx| {
-                workspace.open_entry(
-                    ProjectPath {
-                        worktree_id: local_worktree_id_b,
-                        path: Path::new("a.txt").into(),
-                    },
-                    cx,
-                )
-            })
-            .unwrap()
+        let worktree_b = project_b.read_with(&cx_b, |p, _| p.worktrees()[0].clone());
+        worktree_b
+            .update(&mut cx_b, |tree, cx| tree.open_buffer("a.txt", cx))
             .await
             .unwrap();
-        workspace_b.read_with(&cx_b, |workspace, cx| {
-            let active_pane = workspace.active_pane().read(cx);
-            assert!(active_pane.active_item().is_some());
-        });
 
-        worktree_a.update(&mut cx_a, |tree, cx| {
-            tree.as_local_mut().unwrap().unshare(cx);
-        });
-        workspace_b
-            .condition(&cx_b, |workspace, cx| workspace.worktrees(cx).len() == 0)
+        project_a
+            .update(&mut cx_a, |project, cx| project.unshare(cx))
+            .await
+            .unwrap();
+        project_b
+            .condition(&mut cx_b, |project, _| project.is_read_only())
             .await;
-        workspace_b.read_with(&cx_b, |workspace, cx| {
-            let active_pane = workspace.active_pane().read(cx);
-            assert!(active_pane.active_item().is_none());
-        });
     }
 
     #[gpui::test]
-    async fn test_propagate_saves_and_fs_changes_in_shared_worktree(
+    async fn test_propagate_saves_and_fs_changes(
         mut cx_a: TestAppContext,
         mut cx_b: TestAppContext,
         mut cx_c: TestAppContext,
     ) {
-        cx_a.foreground().forbid_parking();
         let lang_registry = Arc::new(LanguageRegistry::new());
+        let fs = Arc::new(FakeFs::new());
+        cx_a.foreground().forbid_parking();
 
         // Connect to a server as 3 clients.
         let mut server = TestServer::start().await;
@@ -1187,8 +1251,6 @@ mod tests {
         let client_b = server.create_client(&mut cx_b, "user_b").await;
         let client_c = server.create_client(&mut cx_c, "user_c").await;
 
-        let fs = Arc::new(FakeFs::new());
-
         // Share a worktree as client A.
         fs.insert_tree(
             "/a",
@@ -1199,46 +1261,55 @@ mod tests {
             }),
         )
         .await;
-
-        let worktree_a = Worktree::open_local(
-            client_a.clone(),
-            client_a.user_store.clone(),
-            "/a".as_ref(),
-            fs.clone(),
-            lang_registry.clone(),
-            &mut cx_a.to_async(),
-        )
-        .await
-        .unwrap();
+        let project_a = cx_a.update(|cx| {
+            Project::local(
+                client_a.clone(),
+                client_a.user_store.clone(),
+                lang_registry.clone(),
+                fs.clone(),
+                cx,
+            )
+        });
+        let worktree_a = project_a
+            .update(&mut cx_a, |p, cx| p.add_local_worktree("/a", cx))
+            .await
+            .unwrap();
         worktree_a
             .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
             .await;
-        let worktree_id = worktree_a
-            .update(&mut cx_a, |tree, cx| tree.as_local_mut().unwrap().share(cx))
+        let project_id = project_a
+            .update(&mut cx_a, |project, _| project.next_remote_id())
+            .await;
+        project_a
+            .update(&mut cx_a, |project, cx| project.share(cx))
             .await
             .unwrap();
 
         // Join that worktree as clients B and C.
-        let worktree_b = Worktree::open_remote(
+        let project_b = Project::remote(
+            project_id,
             client_b.clone(),
-            worktree_id,
-            lang_registry.clone(),
             client_b.user_store.clone(),
+            lang_registry.clone(),
+            fs.clone(),
             &mut cx_b.to_async(),
         )
         .await
         .unwrap();
-        let worktree_c = Worktree::open_remote(
+        let project_c = Project::remote(
+            project_id,
             client_c.clone(),
-            worktree_id,
-            lang_registry.clone(),
             client_c.user_store.clone(),
+            lang_registry.clone(),
+            fs.clone(),
             &mut cx_c.to_async(),
         )
         .await
         .unwrap();
 
         // Open and edit a buffer as both guests B and C.
+        let worktree_b = project_b.read_with(&cx_b, |p, _| p.worktrees()[0].clone());
+        let worktree_c = project_c.read_with(&cx_c, |p, _| p.worktrees()[0].clone());
         let buffer_b = worktree_b
             .update(&mut cx_b, |tree, cx| tree.open_buffer("file1", cx))
             .await
@@ -1322,14 +1393,14 @@ mod tests {
     async fn test_buffer_conflict_after_save(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
         cx_a.foreground().forbid_parking();
         let lang_registry = Arc::new(LanguageRegistry::new());
+        let fs = Arc::new(FakeFs::new());
 
         // Connect to a server as 2 clients.
         let mut server = TestServer::start().await;
         let client_a = server.create_client(&mut cx_a, "user_a").await;
         let client_b = server.create_client(&mut cx_b, "user_b").await;
 
-        // Share a local worktree as client A
-        let fs = Arc::new(FakeFs::new());
+        // Share a project as client A
         fs.insert_tree(
             "/dir",
             json!({
@@ -1339,35 +1410,44 @@ mod tests {
         )
         .await;
 
-        let worktree_a = Worktree::open_local(
-            client_a.clone(),
-            client_a.user_store.clone(),
-            "/dir".as_ref(),
-            fs,
-            lang_registry.clone(),
-            &mut cx_a.to_async(),
-        )
-        .await
-        .unwrap();
+        let project_a = cx_a.update(|cx| {
+            Project::local(
+                client_a.clone(),
+                client_a.user_store.clone(),
+                lang_registry.clone(),
+                fs.clone(),
+                cx,
+            )
+        });
+        let worktree_a = project_a
+            .update(&mut cx_a, |p, cx| p.add_local_worktree("/dir", cx))
+            .await
+            .unwrap();
         worktree_a
             .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
             .await;
-        let worktree_id = worktree_a
-            .update(&mut cx_a, |tree, cx| tree.as_local_mut().unwrap().share(cx))
+        let project_id = project_a
+            .update(&mut cx_a, |project, _| project.next_remote_id())
+            .await;
+        project_a
+            .update(&mut cx_a, |project, cx| project.share(cx))
             .await
             .unwrap();
 
-        // Join that worktree as client B, and see that a guest has joined as client A.
-        let worktree_b = Worktree::open_remote(
+        // Join that project as client B
+        let project_b = Project::remote(
+            project_id,
             client_b.clone(),
-            worktree_id,
-            lang_registry.clone(),
             client_b.user_store.clone(),
+            lang_registry.clone(),
+            fs.clone(),
             &mut cx_b.to_async(),
         )
         .await
         .unwrap();
+        let worktree_b = project_b.update(&mut cx_b, |p, _| p.worktrees()[0].clone());
 
+        // Open a buffer as client B
         let buffer_b = worktree_b
             .update(&mut cx_b, |worktree, cx| worktree.open_buffer("a.txt", cx))
             .await

crates/server/src/rpc/store.rs 🔗

@@ -8,29 +8,38 @@ use std::collections::hash_map;
 pub struct Store {
     connections: HashMap<ConnectionId, ConnectionState>,
     connections_by_user_id: HashMap<UserId, HashSet<ConnectionId>>,
-    worktrees: HashMap<u64, Worktree>,
-    visible_worktrees_by_user_id: HashMap<UserId, HashSet<u64>>,
+    projects: HashMap<u64, Project>,
+    visible_projects_by_user_id: HashMap<UserId, HashSet<u64>>,
     channels: HashMap<ChannelId, Channel>,
-    next_worktree_id: u64,
+    next_project_id: u64,
 }
 
 struct ConnectionState {
     user_id: UserId,
-    worktrees: HashSet<u64>,
+    projects: HashSet<u64>,
     channels: HashSet<ChannelId>,
 }
 
-pub struct Worktree {
+pub struct Project {
     pub host_connection_id: ConnectionId,
     pub host_user_id: UserId,
+    pub share: Option<ProjectShare>,
+    pub worktrees: HashMap<u64, Worktree>,
+}
+
+pub struct Worktree {
     pub authorized_user_ids: Vec<UserId>,
     pub root_name: String,
     pub share: Option<WorktreeShare>,
 }
 
-pub struct WorktreeShare {
+#[derive(Default)]
+pub struct ProjectShare {
     pub guests: HashMap<ConnectionId, (ReplicaId, UserId)>,
     pub active_replica_ids: HashSet<ReplicaId>,
+}
+
+pub struct WorktreeShare {
     pub entries: HashMap<u64, proto::Entry>,
 }
 
@@ -43,14 +52,14 @@ pub type ReplicaId = u16;
 
 #[derive(Default)]
 pub struct RemovedConnectionState {
-    pub hosted_worktrees: HashMap<u64, Worktree>,
-    pub guest_worktree_ids: HashMap<u64, Vec<ConnectionId>>,
+    pub hosted_projects: HashMap<u64, Project>,
+    pub guest_project_ids: HashMap<u64, Vec<ConnectionId>>,
     pub contact_ids: HashSet<UserId>,
 }
 
-pub struct JoinedWorktree<'a> {
+pub struct JoinedProject<'a> {
     pub replica_id: ReplicaId,
-    pub worktree: &'a Worktree,
+    pub project: &'a Project,
 }
 
 pub struct UnsharedWorktree {
@@ -58,7 +67,7 @@ pub struct UnsharedWorktree {
     pub authorized_user_ids: Vec<UserId>,
 }
 
-pub struct LeftWorktree {
+pub struct LeftProject {
     pub connection_ids: Vec<ConnectionId>,
     pub authorized_user_ids: Vec<UserId>,
 }
@@ -69,7 +78,7 @@ impl Store {
             connection_id,
             ConnectionState {
                 user_id,
-                worktrees: Default::default(),
+                projects: Default::default(),
                 channels: Default::default(),
             },
         );
@@ -105,17 +114,15 @@ impl Store {
         }
 
         let mut result = RemovedConnectionState::default();
-        for worktree_id in connection.worktrees.clone() {
-            if let Ok(worktree) = self.remove_worktree(worktree_id, connection_id) {
-                result
-                    .contact_ids
-                    .extend(worktree.authorized_user_ids.iter().copied());
-                result.hosted_worktrees.insert(worktree_id, worktree);
-            } else if let Some(worktree) = self.leave_worktree(connection_id, worktree_id) {
+        for project_id in connection.projects.clone() {
+            if let Some(project) = self.unregister_project(project_id, connection_id) {
+                result.contact_ids.extend(project.authorized_user_ids());
+                result.hosted_projects.insert(project_id, project);
+            } else if let Some(project) = self.leave_project(connection_id, project_id) {
                 result
-                    .guest_worktree_ids
-                    .insert(worktree_id, worktree.connection_ids);
-                result.contact_ids.extend(worktree.authorized_user_ids);
+                    .guest_project_ids
+                    .insert(project_id, project.connection_ids);
+                result.contact_ids.extend(project.authorized_user_ids);
             }
         }
 
@@ -174,15 +181,15 @@ impl Store {
 
     pub fn contacts_for_user(&self, user_id: UserId) -> Vec<proto::Contact> {
         let mut contacts = HashMap::default();
-        for worktree_id in self
-            .visible_worktrees_by_user_id
+        for project_id in self
+            .visible_projects_by_user_id
             .get(&user_id)
             .unwrap_or(&HashSet::default())
         {
-            let worktree = &self.worktrees[worktree_id];
+            let project = &self.projects[project_id];
 
             let mut guests = HashSet::default();
-            if let Ok(share) = worktree.share() {
+            if let Ok(share) = project.share() {
                 for guest_connection_id in share.guests.keys() {
                     if let Ok(user_id) = self.user_id_for_connection(*guest_connection_id) {
                         guests.insert(user_id.to_proto());
@@ -190,18 +197,24 @@ impl Store {
                 }
             }
 
-            if let Ok(host_user_id) = self.user_id_for_connection(worktree.host_connection_id) {
+            if let Ok(host_user_id) = self.user_id_for_connection(project.host_connection_id) {
+                let mut worktree_root_names = project
+                    .worktrees
+                    .values()
+                    .map(|worktree| worktree.root_name.clone())
+                    .collect::<Vec<_>>();
+                worktree_root_names.sort_unstable();
                 contacts
                     .entry(host_user_id)
                     .or_insert_with(|| proto::Contact {
                         user_id: host_user_id.to_proto(),
-                        worktrees: Vec::new(),
+                        projects: Vec::new(),
                     })
-                    .worktrees
-                    .push(proto::WorktreeMetadata {
-                        id: *worktree_id,
-                        root_name: worktree.root_name.clone(),
-                        is_shared: worktree.share.is_some(),
+                    .projects
+                    .push(proto::ProjectMetadata {
+                        id: *project_id,
+                        worktree_root_names,
+                        is_shared: project.share.is_some(),
                         guests: guests.into_iter().collect(),
                     });
             }
@@ -210,107 +223,147 @@ impl Store {
         contacts.into_values().collect()
     }
 
-    pub fn add_worktree(&mut self, worktree: Worktree) -> u64 {
-        let worktree_id = self.next_worktree_id;
-        for authorized_user_id in &worktree.authorized_user_ids {
-            self.visible_worktrees_by_user_id
-                .entry(*authorized_user_id)
-                .or_default()
-                .insert(worktree_id);
-        }
-        self.next_worktree_id += 1;
-        if let Some(connection) = self.connections.get_mut(&worktree.host_connection_id) {
-            connection.worktrees.insert(worktree_id);
-        }
-        self.worktrees.insert(worktree_id, worktree);
-
-        #[cfg(test)]
-        self.check_invariants();
-
-        worktree_id
+    pub fn register_project(
+        &mut self,
+        host_connection_id: ConnectionId,
+        host_user_id: UserId,
+    ) -> u64 {
+        let project_id = self.next_project_id;
+        self.projects.insert(
+            project_id,
+            Project {
+                host_connection_id,
+                host_user_id,
+                share: None,
+                worktrees: Default::default(),
+            },
+        );
+        self.next_project_id += 1;
+        project_id
     }
 
-    pub fn remove_worktree(
+    pub fn register_worktree(
         &mut self,
+        project_id: u64,
         worktree_id: u64,
-        acting_connection_id: ConnectionId,
-    ) -> tide::Result<Worktree> {
-        let worktree = if let hash_map::Entry::Occupied(e) = self.worktrees.entry(worktree_id) {
-            if e.get().host_connection_id != acting_connection_id {
-                Err(anyhow!("not your worktree"))?;
+        worktree: Worktree,
+    ) -> bool {
+        if let Some(project) = self.projects.get_mut(&project_id) {
+            for authorized_user_id in &worktree.authorized_user_ids {
+                self.visible_projects_by_user_id
+                    .entry(*authorized_user_id)
+                    .or_default()
+                    .insert(project_id);
             }
-            e.remove()
-        } else {
-            return Err(anyhow!("no such worktree"))?;
-        };
+            if let Some(connection) = self.connections.get_mut(&project.host_connection_id) {
+                connection.projects.insert(project_id);
+            }
+            project.worktrees.insert(worktree_id, worktree);
 
-        if let Some(connection) = self.connections.get_mut(&worktree.host_connection_id) {
-            connection.worktrees.remove(&worktree_id);
+            #[cfg(test)]
+            self.check_invariants();
+            true
+        } else {
+            false
         }
+    }
+
+    pub fn unregister_project(
+        &mut self,
+        project_id: u64,
+        connection_id: ConnectionId,
+    ) -> Option<Project> {
+        match self.projects.entry(project_id) {
+            hash_map::Entry::Occupied(e) => {
+                if e.get().host_connection_id == connection_id {
+                    for user_id in e.get().authorized_user_ids() {
+                        if let hash_map::Entry::Occupied(mut projects) =
+                            self.visible_projects_by_user_id.entry(user_id)
+                        {
+                            projects.get_mut().remove(&project_id);
+                        }
+                    }
 
-        if let Some(share) = &worktree.share {
-            for connection_id in share.guests.keys() {
-                if let Some(connection) = self.connections.get_mut(connection_id) {
-                    connection.worktrees.remove(&worktree_id);
+                    Some(e.remove())
+                } else {
+                    None
                 }
             }
+            hash_map::Entry::Vacant(_) => None,
+        }
+    }
+
+    pub fn unregister_worktree(
+        &mut self,
+        project_id: u64,
+        worktree_id: u64,
+        acting_connection_id: ConnectionId,
+    ) -> tide::Result<(Worktree, Vec<ConnectionId>)> {
+        let project = self
+            .projects
+            .get_mut(&project_id)
+            .ok_or_else(|| anyhow!("no such project"))?;
+        if project.host_connection_id != acting_connection_id {
+            Err(anyhow!("not your worktree"))?;
+        }
+
+        let worktree = project
+            .worktrees
+            .remove(&worktree_id)
+            .ok_or_else(|| anyhow!("no such worktree"))?;
+
+        let mut guest_connection_ids = Vec::new();
+        if let Some(share) = &project.share {
+            guest_connection_ids.extend(share.guests.keys());
         }
 
         for authorized_user_id in &worktree.authorized_user_ids {
-            if let Some(visible_worktrees) = self
-                .visible_worktrees_by_user_id
-                .get_mut(&authorized_user_id)
+            if let Some(visible_projects) =
+                self.visible_projects_by_user_id.get_mut(authorized_user_id)
             {
-                visible_worktrees.remove(&worktree_id);
+                if !project.has_authorized_user_id(*authorized_user_id) {
+                    visible_projects.remove(&project_id);
+                }
             }
         }
 
         #[cfg(test)]
         self.check_invariants();
 
-        Ok(worktree)
+        Ok((worktree, guest_connection_ids))
     }
 
-    pub fn share_worktree(
-        &mut self,
-        worktree_id: u64,
-        connection_id: ConnectionId,
-        entries: HashMap<u64, proto::Entry>,
-    ) -> Option<Vec<UserId>> {
-        if let Some(worktree) = self.worktrees.get_mut(&worktree_id) {
-            if worktree.host_connection_id == connection_id {
-                worktree.share = Some(WorktreeShare {
-                    guests: Default::default(),
-                    active_replica_ids: Default::default(),
-                    entries,
-                });
-                return Some(worktree.authorized_user_ids.clone());
+    pub fn share_project(&mut self, project_id: u64, connection_id: ConnectionId) -> bool {
+        if let Some(project) = self.projects.get_mut(&project_id) {
+            if project.host_connection_id == connection_id {
+                project.share = Some(ProjectShare::default());
+                return true;
             }
         }
-        None
+        false
     }
 
-    pub fn unshare_worktree(
+    pub fn unshare_project(
         &mut self,
-        worktree_id: u64,
+        project_id: u64,
         acting_connection_id: ConnectionId,
     ) -> tide::Result<UnsharedWorktree> {
-        let worktree = if let Some(worktree) = self.worktrees.get_mut(&worktree_id) {
-            worktree
+        let project = if let Some(project) = self.projects.get_mut(&project_id) {
+            project
         } else {
-            return Err(anyhow!("no such worktree"))?;
+            return Err(anyhow!("no such project"))?;
         };
 
-        if worktree.host_connection_id != acting_connection_id {
-            return Err(anyhow!("not your worktree"))?;
+        if project.host_connection_id != acting_connection_id {
+            return Err(anyhow!("not your project"))?;
         }
 
-        let connection_ids = worktree.connection_ids();
-        let authorized_user_ids = worktree.authorized_user_ids.clone();
-        if let Some(share) = worktree.share.take() {
+        let connection_ids = project.connection_ids();
+        let authorized_user_ids = project.authorized_user_ids();
+        if let Some(share) = project.share.take() {
             for connection_id in share.guests.into_keys() {
                 if let Some(connection) = self.connections.get_mut(&connection_id) {
-                    connection.worktrees.remove(&worktree_id);
+                    connection.projects.remove(&project_id);
                 }
             }
 
@@ -322,34 +375,51 @@ impl Store {
                 authorized_user_ids,
             })
         } else {
-            Err(anyhow!("worktree is not shared"))?
+            Err(anyhow!("project is not shared"))?
+        }
+    }
+
+    pub fn share_worktree(
+        &mut self,
+        project_id: u64,
+        worktree_id: u64,
+        connection_id: ConnectionId,
+        entries: HashMap<u64, proto::Entry>,
+    ) -> Option<Vec<UserId>> {
+        let project = self.projects.get_mut(&project_id)?;
+        let worktree = project.worktrees.get_mut(&worktree_id)?;
+        if project.host_connection_id == connection_id && project.share.is_some() {
+            worktree.share = Some(WorktreeShare { entries });
+            Some(project.authorized_user_ids())
+        } else {
+            None
         }
     }
 
-    pub fn join_worktree(
+    pub fn join_project(
         &mut self,
         connection_id: ConnectionId,
         user_id: UserId,
-        worktree_id: u64,
-    ) -> tide::Result<JoinedWorktree> {
+        project_id: u64,
+    ) -> tide::Result<JoinedProject> {
         let connection = self
             .connections
             .get_mut(&connection_id)
             .ok_or_else(|| anyhow!("no such connection"))?;
-        let worktree = self
-            .worktrees
-            .get_mut(&worktree_id)
-            .and_then(|worktree| {
-                if worktree.authorized_user_ids.contains(&user_id) {
-                    Some(worktree)
+        let project = self
+            .projects
+            .get_mut(&project_id)
+            .and_then(|project| {
+                if project.has_authorized_user_id(user_id) {
+                    Some(project)
                 } else {
                     None
                 }
             })
-            .ok_or_else(|| anyhow!("no such worktree"))?;
+            .ok_or_else(|| anyhow!("no such project"))?;
 
-        let share = worktree.share_mut()?;
-        connection.worktrees.insert(worktree_id);
+        let share = project.share_mut()?;
+        connection.projects.insert(project_id);
 
         let mut replica_id = 1;
         while share.active_replica_ids.contains(&replica_id) {
@@ -361,33 +431,33 @@ impl Store {
         #[cfg(test)]
         self.check_invariants();
 
-        Ok(JoinedWorktree {
+        Ok(JoinedProject {
             replica_id,
-            worktree: &self.worktrees[&worktree_id],
+            project: &self.projects[&project_id],
         })
     }
 
-    pub fn leave_worktree(
+    pub fn leave_project(
         &mut self,
         connection_id: ConnectionId,
-        worktree_id: u64,
-    ) -> Option<LeftWorktree> {
-        let worktree = self.worktrees.get_mut(&worktree_id)?;
-        let share = worktree.share.as_mut()?;
+        project_id: u64,
+    ) -> Option<LeftProject> {
+        let project = self.projects.get_mut(&project_id)?;
+        let share = project.share.as_mut()?;
         let (replica_id, _) = share.guests.remove(&connection_id)?;
         share.active_replica_ids.remove(&replica_id);
 
         if let Some(connection) = self.connections.get_mut(&connection_id) {
-            connection.worktrees.remove(&worktree_id);
+            connection.projects.remove(&project_id);
         }
 
-        let connection_ids = worktree.connection_ids();
-        let authorized_user_ids = worktree.authorized_user_ids.clone();
+        let connection_ids = project.connection_ids();
+        let authorized_user_ids = project.authorized_user_ids();
 
         #[cfg(test)]
         self.check_invariants();
 
-        Some(LeftWorktree {
+        Some(LeftProject {
             connection_ids,
             authorized_user_ids,
         })
@@ -396,115 +466,75 @@ impl Store {
     pub fn update_worktree(
         &mut self,
         connection_id: ConnectionId,
+        project_id: u64,
         worktree_id: u64,
         removed_entries: &[u64],
         updated_entries: &[proto::Entry],
-    ) -> tide::Result<Vec<ConnectionId>> {
-        let worktree = self.write_worktree(worktree_id, connection_id)?;
-        let share = worktree.share_mut()?;
+    ) -> Option<Vec<ConnectionId>> {
+        let project = self.write_project(project_id, connection_id)?;
+        let share = project.worktrees.get_mut(&worktree_id)?.share.as_mut()?;
         for entry_id in removed_entries {
             share.entries.remove(&entry_id);
         }
         for entry in updated_entries {
             share.entries.insert(entry.id, entry.clone());
         }
-        Ok(worktree.connection_ids())
-    }
-
-    pub fn worktree_host_connection_id(
-        &self,
-        connection_id: ConnectionId,
-        worktree_id: u64,
-    ) -> tide::Result<ConnectionId> {
-        Ok(self
-            .read_worktree(worktree_id, connection_id)?
-            .host_connection_id)
-    }
-
-    pub fn worktree_guest_connection_ids(
-        &self,
-        connection_id: ConnectionId,
-        worktree_id: u64,
-    ) -> tide::Result<Vec<ConnectionId>> {
-        Ok(self
-            .read_worktree(worktree_id, connection_id)?
-            .share()?
-            .guests
-            .keys()
-            .copied()
-            .collect())
+        Some(project.connection_ids())
     }
 
-    pub fn worktree_connection_ids(
+    pub fn project_connection_ids(
         &self,
-        connection_id: ConnectionId,
-        worktree_id: u64,
-    ) -> tide::Result<Vec<ConnectionId>> {
-        Ok(self
-            .read_worktree(worktree_id, connection_id)?
-            .connection_ids())
+        project_id: u64,
+        acting_connection_id: ConnectionId,
+    ) -> Option<Vec<ConnectionId>> {
+        Some(
+            self.read_project(project_id, acting_connection_id)?
+                .connection_ids(),
+        )
     }
 
     pub fn channel_connection_ids(&self, channel_id: ChannelId) -> Option<Vec<ConnectionId>> {
         Some(self.channels.get(&channel_id)?.connection_ids())
     }
 
-    fn read_worktree(
-        &self,
-        worktree_id: u64,
-        connection_id: ConnectionId,
-    ) -> tide::Result<&Worktree> {
-        let worktree = self
-            .worktrees
-            .get(&worktree_id)
-            .ok_or_else(|| anyhow!("worktree not found"))?;
-
-        if worktree.host_connection_id == connection_id
-            || worktree.share()?.guests.contains_key(&connection_id)
+    pub fn read_project(&self, project_id: u64, connection_id: ConnectionId) -> Option<&Project> {
+        let project = self.projects.get(&project_id)?;
+        if project.host_connection_id == connection_id
+            || project.share.as_ref()?.guests.contains_key(&connection_id)
         {
-            Ok(worktree)
+            Some(project)
         } else {
-            Err(anyhow!(
-                "{} is not a member of worktree {}",
-                connection_id,
-                worktree_id
-            ))?
+            None
         }
     }
 
-    fn write_worktree(
+    fn write_project(
         &mut self,
-        worktree_id: u64,
+        project_id: u64,
         connection_id: ConnectionId,
-    ) -> tide::Result<&mut Worktree> {
-        let worktree = self
-            .worktrees
-            .get_mut(&worktree_id)
-            .ok_or_else(|| anyhow!("worktree not found"))?;
-
-        if worktree.host_connection_id == connection_id
-            || worktree
-                .share
-                .as_ref()
-                .map_or(false, |share| share.guests.contains_key(&connection_id))
+    ) -> Option<&mut Project> {
+        let project = self.projects.get_mut(&project_id)?;
+        if project.host_connection_id == connection_id
+            || project.share.as_ref()?.guests.contains_key(&connection_id)
         {
-            Ok(worktree)
+            Some(project)
         } else {
-            Err(anyhow!(
-                "{} is not a member of worktree {}",
-                connection_id,
-                worktree_id
-            ))?
+            None
         }
     }
 
     #[cfg(test)]
     fn check_invariants(&self) {
         for (connection_id, connection) in &self.connections {
-            for worktree_id in &connection.worktrees {
-                let worktree = &self.worktrees.get(&worktree_id).unwrap();
-                if worktree.host_connection_id != *connection_id {
-                    assert!(worktree.share().unwrap().guests.contains_key(connection_id));
+            for project_id in &connection.projects {
+                let project = &self.projects.get(&project_id).unwrap();
+                if project.host_connection_id != *connection_id {
+                    assert!(project
+                        .share
+                        .as_ref()
+                        .unwrap()
+                        .guests
+                        .contains_key(connection_id));
                 }
             }
             for channel_id in &connection.channels {
@@ -527,22 +557,22 @@ impl Store {
             }
         }
 
-        for (worktree_id, worktree) in &self.worktrees {
-            let host_connection = self.connections.get(&worktree.host_connection_id).unwrap();
-            assert!(host_connection.worktrees.contains(worktree_id));
+        for (project_id, project) in &self.projects {
+            let host_connection = self.connections.get(&project.host_connection_id).unwrap();
+            assert!(host_connection.projects.contains(project_id));
 
-            for authorized_user_ids in &worktree.authorized_user_ids {
-                let visible_worktree_ids = self
-                    .visible_worktrees_by_user_id
-                    .get(authorized_user_ids)
+            for authorized_user_ids in project.authorized_user_ids() {
+                let visible_project_ids = self
+                    .visible_projects_by_user_id
+                    .get(&authorized_user_ids)
                     .unwrap();
-                assert!(visible_worktree_ids.contains(worktree_id));
+                assert!(visible_project_ids.contains(project_id));
             }
 
-            if let Some(share) = &worktree.share {
+            if let Some(share) = &project.share {
                 for guest_connection_id in share.guests.keys() {
                     let guest_connection = self.connections.get(guest_connection_id).unwrap();
-                    assert!(guest_connection.worktrees.contains(worktree_id));
+                    assert!(guest_connection.projects.contains(project_id));
                 }
                 assert_eq!(share.active_replica_ids.len(), share.guests.len(),);
                 assert_eq!(
@@ -556,10 +586,10 @@ impl Store {
             }
         }
 
-        for (user_id, visible_worktree_ids) in &self.visible_worktrees_by_user_id {
-            for worktree_id in visible_worktree_ids {
-                let worktree = self.worktrees.get(worktree_id).unwrap();
-                assert!(worktree.authorized_user_ids.contains(user_id));
+        for (user_id, visible_project_ids) in &self.visible_projects_by_user_id {
+            for project_id in visible_project_ids {
+                let project = self.projects.get(project_id).unwrap();
+                assert!(project.authorized_user_ids().contains(user_id));
             }
         }
 
@@ -572,7 +602,33 @@ impl Store {
     }
 }
 
-impl Worktree {
+impl Project {
+    pub fn has_authorized_user_id(&self, user_id: UserId) -> bool {
+        self.worktrees
+            .values()
+            .any(|worktree| worktree.authorized_user_ids.contains(&user_id))
+    }
+
+    pub fn authorized_user_ids(&self) -> Vec<UserId> {
+        let mut ids = self
+            .worktrees
+            .values()
+            .flat_map(|worktree| worktree.authorized_user_ids.iter())
+            .copied()
+            .collect::<Vec<_>>();
+        ids.sort_unstable();
+        ids.dedup();
+        ids
+    }
+
+    pub fn guest_connection_ids(&self) -> Vec<ConnectionId> {
+        if let Some(share) = &self.share {
+            share.guests.keys().copied().collect()
+        } else {
+            Vec::new()
+        }
+    }
+
     pub fn connection_ids(&self) -> Vec<ConnectionId> {
         if let Some(share) = &self.share {
             share
@@ -586,14 +642,14 @@ impl Worktree {
         }
     }
 
-    pub fn share(&self) -> tide::Result<&WorktreeShare> {
+    pub fn share(&self) -> tide::Result<&ProjectShare> {
         Ok(self
             .share
             .as_ref()
             .ok_or_else(|| anyhow!("worktree is not shared"))?)
     }
 
-    fn share_mut(&mut self) -> tide::Result<&mut WorktreeShare> {
+    fn share_mut(&mut self) -> tide::Result<&mut ProjectShare> {
         Ok(self
             .share
             .as_mut()

crates/sum_tree/src/cursor.rs 🔗

@@ -18,6 +18,11 @@ pub struct Cursor<'a, T: Item, D> {
     at_end: bool,
 }
 
+pub struct Iter<'a, T: Item> {
+    tree: &'a SumTree<T>,
+    stack: ArrayVec<StackEntry<'a, T, ()>, 16>,
+}
+
 impl<'a, T, D> Cursor<'a, T, D>
 where
     T: Item,
@@ -487,6 +492,71 @@ where
     }
 }
 
+impl<'a, T: Item> Iter<'a, T> {
+    pub(crate) fn new(tree: &'a SumTree<T>) -> Self {
+        Self {
+            tree,
+            stack: Default::default(),
+        }
+    }
+}
+
+impl<'a, T: Item> Iterator for Iter<'a, T> {
+    type Item = &'a T;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        let mut descend = false;
+
+        if self.stack.is_empty() {
+            self.stack.push(StackEntry {
+                tree: self.tree,
+                index: 0,
+                position: (),
+            });
+            descend = true;
+        }
+
+        while self.stack.len() > 0 {
+            let new_subtree = {
+                let entry = self.stack.last_mut().unwrap();
+                match entry.tree.0.as_ref() {
+                    Node::Internal { child_trees, .. } => {
+                        if !descend {
+                            entry.index += 1;
+                        }
+                        child_trees.get(entry.index)
+                    }
+                    Node::Leaf { items, .. } => {
+                        if !descend {
+                            entry.index += 1;
+                        }
+
+                        if let Some(next_item) = items.get(entry.index) {
+                            return Some(next_item);
+                        } else {
+                            None
+                        }
+                    }
+                }
+            };
+
+            if let Some(subtree) = new_subtree {
+                descend = true;
+                self.stack.push(StackEntry {
+                    tree: subtree,
+                    index: 0,
+                    position: (),
+                });
+            } else {
+                descend = false;
+                self.stack.pop();
+            }
+        }
+
+        None
+    }
+}
+
 impl<'a, T, S, D> Iterator for Cursor<'a, T, D>
 where
     T: Item<Summary = S>,

crates/sum_tree/src/sum_tree.rs 🔗

@@ -1,10 +1,11 @@
 mod cursor;
+mod tree_map;
 
 use arrayvec::ArrayVec;
-pub use cursor::Cursor;
-pub use cursor::FilterCursor;
+pub use cursor::{Cursor, FilterCursor, Iter};
 use std::marker::PhantomData;
 use std::{cmp::Ordering, fmt, iter::FromIterator, sync::Arc};
+pub use tree_map::TreeMap;
 
 #[cfg(test)]
 const TREE_BASE: usize = 2;
@@ -156,6 +157,10 @@ impl<T: Item> SumTree<T> {
         items
     }
 
+    pub fn iter(&self) -> Iter<T> {
+        Iter::new(self)
+    }
+
     pub fn cursor<'a, S>(&'a self) -> Cursor<T, S>
     where
         S: Dimension<'a, T::Summary>,
@@ -722,6 +727,10 @@ mod tests {
                 };
 
                 assert_eq!(tree.items(&()), reference_items);
+                assert_eq!(
+                    tree.iter().collect::<Vec<_>>(),
+                    tree.cursor::<()>().collect::<Vec<_>>()
+                );
 
                 let mut filter_cursor =
                     tree.filter::<_, Count>(|summary| summary.contains_even, &());

crates/sum_tree/src/tree_map.rs 🔗

@@ -0,0 +1,152 @@
+use std::{cmp::Ordering, fmt::Debug};
+
+use crate::{Bias, Dimension, Item, KeyedItem, SeekTarget, SumTree, Summary};
+
+#[derive(Clone)]
+pub struct TreeMap<K, V>(SumTree<MapEntry<K, V>>)
+where
+    K: Clone + Debug + Default + Ord,
+    V: Clone + Debug;
+
+#[derive(Clone)]
+pub struct MapEntry<K, V> {
+    key: K,
+    value: V,
+}
+
+#[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
+pub struct MapKey<K>(K);
+
+#[derive(Clone, Debug, Default)]
+pub struct MapKeyRef<'a, K>(Option<&'a K>);
+
+impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
+    pub fn get<'a>(&self, key: &'a K) -> Option<&V> {
+        let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
+        let key = MapKeyRef(Some(key));
+        cursor.seek(&key, Bias::Left, &());
+        if key.cmp(cursor.start(), &()) == Ordering::Equal {
+            Some(&cursor.item().unwrap().value)
+        } else {
+            None
+        }
+    }
+
+    pub fn insert(&mut self, key: K, value: V) {
+        self.0.insert_or_replace(MapEntry { key, value }, &());
+    }
+
+    pub fn remove<'a>(&mut self, key: &'a K) -> Option<V> {
+        let mut removed = None;
+        let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
+        let key = MapKeyRef(Some(key));
+        let mut new_tree = cursor.slice(&key, Bias::Left, &());
+        if key.cmp(&cursor.end(&()), &()) == Ordering::Equal {
+            removed = Some(cursor.item().unwrap().value.clone());
+            cursor.next(&());
+        }
+        new_tree.push_tree(cursor.suffix(&()), &());
+        drop(cursor);
+        self.0 = new_tree;
+        removed
+    }
+
+    pub fn iter<'a>(&'a self) -> impl 'a + Iterator<Item = (&'a K, &'a V)> {
+        self.0.iter().map(|entry| (&entry.key, &entry.value))
+    }
+}
+
+impl<K, V> Default for TreeMap<K, V>
+where
+    K: Clone + Debug + Default + Ord,
+    V: Clone + Debug,
+{
+    fn default() -> Self {
+        Self(Default::default())
+    }
+}
+
+impl<K, V> Item for MapEntry<K, V>
+where
+    K: Clone + Debug + Default + Ord,
+    V: Clone,
+{
+    type Summary = MapKey<K>;
+
+    fn summary(&self) -> Self::Summary {
+        self.key()
+    }
+}
+
+impl<K, V> KeyedItem for MapEntry<K, V>
+where
+    K: Clone + Debug + Default + Ord,
+    V: Clone,
+{
+    type Key = MapKey<K>;
+
+    fn key(&self) -> Self::Key {
+        MapKey(self.key.clone())
+    }
+}
+
+impl<K> Summary for MapKey<K>
+where
+    K: Clone + Debug + Default,
+{
+    type Context = ();
+
+    fn add_summary(&mut self, summary: &Self, _: &()) {
+        *self = summary.clone()
+    }
+}
+
+impl<'a, K> Dimension<'a, MapKey<K>> for MapKeyRef<'a, K>
+where
+    K: Clone + Debug + Default + Ord,
+{
+    fn add_summary(&mut self, summary: &'a MapKey<K>, _: &()) {
+        self.0 = Some(&summary.0)
+    }
+}
+
+impl<'a, K> SeekTarget<'a, MapKey<K>, MapKeyRef<'a, K>> for MapKeyRef<'_, K>
+where
+    K: Clone + Debug + Default + Ord,
+{
+    fn cmp(&self, cursor_location: &MapKeyRef<K>, _: &()) -> Ordering {
+        self.0.cmp(&cursor_location.0)
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_basic() {
+        let mut map = TreeMap::default();
+        assert_eq!(map.iter().collect::<Vec<_>>(), vec![]);
+
+        map.insert(3, "c");
+        assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&3, &"c")]);
+
+        map.insert(1, "a");
+        assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&1, &"a"), (&3, &"c")]);
+
+        map.insert(2, "b");
+        assert_eq!(
+            map.iter().collect::<Vec<_>>(),
+            vec![(&1, &"a"), (&2, &"b"), (&3, &"c")]
+        );
+
+        map.remove(&2);
+        assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&1, &"a"), (&3, &"c")]);
+
+        map.remove(&3);
+        assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&1, &"a")]);
+
+        map.remove(&1);
+        assert_eq!(map.iter().collect::<Vec<_>>(), vec![]);
+    }
+}

crates/text/Cargo.toml 🔗

@@ -15,6 +15,7 @@ collections = { path = "../collections" }
 sum_tree = { path = "../sum_tree" }
 anyhow = "1.0.38"
 arrayvec = "0.7.1"
+lazy_static = "1.4"
 log = "0.4"
 parking_lot = "0.11"
 rand = { version = "0.8.3", optional = true }

crates/text/src/anchor.rs 🔗

@@ -1,105 +1,48 @@
-use crate::{rope::TextDimension, Snapshot};
-
-use super::{Buffer, FromAnchor, FullOffset, Point, ToOffset};
+use super::{Point, ToOffset};
+use crate::{rope::TextDimension, BufferSnapshot};
 use anyhow::Result;
-use std::{
-    cmp::Ordering,
-    fmt::{Debug, Formatter},
-    ops::Range,
-};
-use sum_tree::{Bias, SumTree};
+use std::{cmp::Ordering, fmt::Debug, ops::Range};
+use sum_tree::Bias;
 
 #[derive(Clone, Eq, PartialEq, Debug, Hash)]
 pub struct Anchor {
-    pub full_offset: FullOffset,
+    pub timestamp: clock::Local,
+    pub offset: usize,
     pub bias: Bias,
-    pub version: clock::Global,
-}
-
-#[derive(Clone)]
-pub struct AnchorMap<T> {
-    pub(crate) version: clock::Global,
-    pub(crate) bias: Bias,
-    pub(crate) entries: Vec<(FullOffset, T)>,
-}
-
-#[derive(Clone)]
-pub struct AnchorSet(pub(crate) AnchorMap<()>);
-
-#[derive(Clone)]
-pub struct AnchorRangeMap<T> {
-    pub(crate) version: clock::Global,
-    pub(crate) entries: Vec<(Range<FullOffset>, T)>,
-    pub(crate) start_bias: Bias,
-    pub(crate) end_bias: Bias,
-}
-
-#[derive(Clone)]
-pub struct AnchorRangeSet(pub(crate) AnchorRangeMap<()>);
-
-#[derive(Clone)]
-pub struct AnchorRangeMultimap<T: Clone> {
-    pub(crate) entries: SumTree<AnchorRangeMultimapEntry<T>>,
-    pub(crate) version: clock::Global,
-    pub(crate) start_bias: Bias,
-    pub(crate) end_bias: Bias,
-}
-
-#[derive(Clone)]
-pub(crate) struct AnchorRangeMultimapEntry<T> {
-    pub(crate) range: FullOffsetRange,
-    pub(crate) value: T,
-}
-
-#[derive(Clone, Debug)]
-pub(crate) struct FullOffsetRange {
-    pub(crate) start: FullOffset,
-    pub(crate) end: FullOffset,
-}
-
-#[derive(Clone, Debug)]
-pub(crate) struct AnchorRangeMultimapSummary {
-    start: FullOffset,
-    end: FullOffset,
-    min_start: FullOffset,
-    max_end: FullOffset,
-    count: usize,
 }
 
 impl Anchor {
     pub fn min() -> Self {
         Self {
-            full_offset: FullOffset(0),
+            timestamp: clock::Local::MIN,
+            offset: usize::MIN,
             bias: Bias::Left,
-            version: Default::default(),
         }
     }
 
     pub fn max() -> Self {
         Self {
-            full_offset: FullOffset::MAX,
+            timestamp: clock::Local::MAX,
+            offset: usize::MAX,
             bias: Bias::Right,
-            version: Default::default(),
         }
     }
 
-    pub fn cmp<'a>(&self, other: &Anchor, buffer: &Snapshot) -> Result<Ordering> {
-        if self == other {
-            return Ok(Ordering::Equal);
-        }
-
-        let offset_comparison = if self.version == other.version {
-            self.full_offset.cmp(&other.full_offset)
+    pub fn cmp(&self, other: &Anchor, buffer: &BufferSnapshot) -> Result<Ordering> {
+        let fragment_id_comparison = if self.timestamp == other.timestamp {
+            Ordering::Equal
         } else {
             buffer
-                .full_offset_for_anchor(self)
-                .cmp(&buffer.full_offset_for_anchor(other))
+                .fragment_id_for_anchor(self)
+                .cmp(&buffer.fragment_id_for_anchor(other))
         };
 
-        Ok(offset_comparison.then_with(|| self.bias.cmp(&other.bias)))
+        Ok(fragment_id_comparison
+            .then_with(|| self.offset.cmp(&other.offset))
+            .then_with(|| self.bias.cmp(&other.bias)))
     }
 
-    pub fn bias_left(&self, buffer: &Buffer) -> Anchor {
+    pub fn bias_left(&self, buffer: &BufferSnapshot) -> Anchor {
         if self.bias == Bias::Left {
             self.clone()
         } else {
@@ -107,7 +50,7 @@ impl Anchor {
         }
     }
 
-    pub fn bias_right(&self, buffer: &Buffer) -> Anchor {
+    pub fn bias_right(&self, buffer: &BufferSnapshot) -> Anchor {
         if self.bias == Bias::Right {
             self.clone()
         } else {
@@ -115,464 +58,33 @@ impl Anchor {
         }
     }
 
-    pub fn summary<'a, D>(&self, content: &'a Snapshot) -> D
+    pub fn summary<'a, D>(&self, content: &'a BufferSnapshot) -> D
     where
-        D: TextDimension<'a>,
+        D: TextDimension,
     {
         content.summary_for_anchor(self)
     }
 }
 
-impl<T> AnchorMap<T> {
-    pub fn version(&self) -> &clock::Global {
-        &self.version
-    }
-
-    pub fn len(&self) -> usize {
-        self.entries.len()
-    }
-
-    pub fn iter<'a, D>(&'a self, snapshot: &'a Snapshot) -> impl Iterator<Item = (D, &'a T)> + 'a
-    where
-        D: 'a + TextDimension<'a>,
-    {
-        snapshot
-            .summaries_for_anchors(
-                self.version.clone(),
-                self.bias,
-                self.entries.iter().map(|e| &e.0),
-            )
-            .zip(self.entries.iter().map(|e| &e.1))
-    }
-}
-
-impl AnchorSet {
-    pub fn version(&self) -> &clock::Global {
-        &self.0.version
-    }
-
-    pub fn len(&self) -> usize {
-        self.0.len()
-    }
-
-    pub fn iter<'a, D>(&'a self, content: &'a Snapshot) -> impl Iterator<Item = D> + 'a
-    where
-        D: 'a + TextDimension<'a>,
-    {
-        self.0.iter(content).map(|(position, _)| position)
-    }
-}
-
-impl<T> AnchorRangeMap<T> {
-    pub fn version(&self) -> &clock::Global {
-        &self.version
-    }
-
-    pub fn len(&self) -> usize {
-        self.entries.len()
-    }
-
-    pub fn from_full_offset_ranges(
-        version: clock::Global,
-        start_bias: Bias,
-        end_bias: Bias,
-        entries: Vec<(Range<FullOffset>, T)>,
-    ) -> Self {
-        Self {
-            version,
-            start_bias,
-            end_bias,
-            entries,
-        }
-    }
-
-    pub fn ranges<'a, D>(
-        &'a self,
-        content: &'a Snapshot,
-    ) -> impl Iterator<Item = (Range<D>, &'a T)> + 'a
-    where
-        D: 'a + TextDimension<'a>,
-    {
-        content
-            .summaries_for_anchor_ranges(
-                self.version.clone(),
-                self.start_bias,
-                self.end_bias,
-                self.entries.iter().map(|e| &e.0),
-            )
-            .zip(self.entries.iter().map(|e| &e.1))
-    }
-
-    pub fn intersecting_ranges<'a, D, I>(
-        &'a self,
-        range: Range<(I, Bias)>,
-        content: &'a Snapshot,
-    ) -> impl Iterator<Item = (Range<D>, &'a T)> + 'a
-    where
-        D: 'a + TextDimension<'a>,
-        I: ToOffset,
-    {
-        let range = content.anchor_at(range.start.0, range.start.1)
-            ..content.anchor_at(range.end.0, range.end.1);
-
-        let mut probe_anchor = Anchor {
-            full_offset: Default::default(),
-            bias: self.start_bias,
-            version: self.version.clone(),
-        };
-        let start_ix = self.entries.binary_search_by(|probe| {
-            probe_anchor.full_offset = probe.0.end;
-            probe_anchor.cmp(&range.start, &content).unwrap()
-        });
-
-        match start_ix {
-            Ok(start_ix) | Err(start_ix) => content
-                .summaries_for_anchor_ranges(
-                    self.version.clone(),
-                    self.start_bias,
-                    self.end_bias,
-                    self.entries[start_ix..].iter().map(|e| &e.0),
-                )
-                .zip(self.entries.iter().map(|e| &e.1)),
-        }
-    }
-
-    pub fn full_offset_ranges(&self) -> impl Iterator<Item = &(Range<FullOffset>, T)> {
-        self.entries.iter()
-    }
-
-    pub fn min_by_key<'a, D, F, K>(
-        &self,
-        content: &'a Snapshot,
-        mut extract_key: F,
-    ) -> Option<(Range<D>, &T)>
-    where
-        D: 'a + TextDimension<'a>,
-        F: FnMut(&T) -> K,
-        K: Ord,
-    {
-        self.entries
-            .iter()
-            .min_by_key(|(_, value)| extract_key(value))
-            .map(|(range, value)| (self.resolve_range(range, &content), value))
-    }
-
-    pub fn max_by_key<'a, D, F, K>(
-        &self,
-        content: &'a Snapshot,
-        mut extract_key: F,
-    ) -> Option<(Range<D>, &T)>
-    where
-        D: 'a + TextDimension<'a>,
-        F: FnMut(&T) -> K,
-        K: Ord,
-    {
-        self.entries
-            .iter()
-            .max_by_key(|(_, value)| extract_key(value))
-            .map(|(range, value)| (self.resolve_range(range, &content), value))
-    }
-
-    fn resolve_range<'a, D>(&self, range: &Range<FullOffset>, content: &'a Snapshot) -> Range<D>
-    where
-        D: 'a + TextDimension<'a>,
-    {
-        let mut anchor = Anchor {
-            full_offset: range.start,
-            bias: self.start_bias,
-            version: self.version.clone(),
-        };
-        let start = content.summary_for_anchor(&anchor);
-
-        anchor.full_offset = range.end;
-        anchor.bias = self.end_bias;
-        let end = content.summary_for_anchor(&anchor);
-
-        start..end
-    }
-}
-
-impl<T: PartialEq> PartialEq for AnchorRangeMap<T> {
-    fn eq(&self, other: &Self) -> bool {
-        self.version == other.version && self.entries == other.entries
-    }
-}
-
-impl<T: Eq> Eq for AnchorRangeMap<T> {}
-
-impl<T: Debug> Debug for AnchorRangeMap<T> {
-    fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
-        let mut f = f.debug_map();
-        for (range, value) in &self.entries {
-            f.key(range);
-            f.value(value);
-        }
-        f.finish()
-    }
-}
-
-impl Debug for AnchorRangeSet {
-    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
-        let mut f = f.debug_set();
-        for (range, _) in &self.0.entries {
-            f.entry(range);
-        }
-        f.finish()
-    }
-}
-
-impl AnchorRangeSet {
-    pub fn len(&self) -> usize {
-        self.0.len()
-    }
-
-    pub fn version(&self) -> &clock::Global {
-        self.0.version()
-    }
-
-    pub fn ranges<'a, D>(&'a self, content: &'a Snapshot) -> impl 'a + Iterator<Item = Range<Point>>
-    where
-        D: 'a + TextDimension<'a>,
-    {
-        self.0.ranges(content).map(|(range, _)| range)
-    }
-}
-
-impl<T: Clone> Default for AnchorRangeMultimap<T> {
-    fn default() -> Self {
-        Self {
-            entries: Default::default(),
-            version: Default::default(),
-            start_bias: Bias::Left,
-            end_bias: Bias::Left,
-        }
-    }
-}
-
-impl<T: Clone> AnchorRangeMultimap<T> {
-    pub fn version(&self) -> &clock::Global {
-        &self.version
-    }
-
-    pub fn intersecting_ranges<'a, I, O>(
-        &'a self,
-        range: Range<I>,
-        content: &'a Snapshot,
-        inclusive: bool,
-    ) -> impl Iterator<Item = (usize, Range<O>, &T)> + 'a
-    where
-        I: ToOffset,
-        O: FromAnchor,
-    {
-        let end_bias = if inclusive { Bias::Right } else { Bias::Left };
-        let range = range.start.to_full_offset(&content, Bias::Left)
-            ..range.end.to_full_offset(&content, end_bias);
-        let mut cursor = self.entries.filter::<_, usize>(
-            {
-                let mut endpoint = Anchor {
-                    full_offset: FullOffset(0),
-                    bias: Bias::Right,
-                    version: self.version.clone(),
-                };
-                move |summary: &AnchorRangeMultimapSummary| {
-                    endpoint.full_offset = summary.max_end;
-                    endpoint.bias = self.end_bias;
-                    let max_end = endpoint.to_full_offset(&content, self.end_bias);
-                    let start_cmp = range.start.cmp(&max_end);
-
-                    endpoint.full_offset = summary.min_start;
-                    endpoint.bias = self.start_bias;
-                    let min_start = endpoint.to_full_offset(&content, self.start_bias);
-                    let end_cmp = range.end.cmp(&min_start);
-
-                    if inclusive {
-                        start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal
-                    } else {
-                        start_cmp == Ordering::Less && end_cmp == Ordering::Greater
-                    }
-                }
-            },
-            &(),
-        );
-
-        std::iter::from_fn({
-            let mut endpoint = Anchor {
-                full_offset: FullOffset(0),
-                bias: Bias::Left,
-                version: self.version.clone(),
-            };
-            move || {
-                if let Some(item) = cursor.item() {
-                    let ix = *cursor.start();
-                    endpoint.full_offset = item.range.start;
-                    endpoint.bias = self.start_bias;
-                    let start = O::from_anchor(&endpoint, &content);
-                    endpoint.full_offset = item.range.end;
-                    endpoint.bias = self.end_bias;
-                    let end = O::from_anchor(&endpoint, &content);
-                    let value = &item.value;
-                    cursor.next(&());
-                    Some((ix, start..end, value))
-                } else {
-                    None
-                }
-            }
-        })
-    }
-
-    pub fn from_full_offset_ranges(
-        version: clock::Global,
-        start_bias: Bias,
-        end_bias: Bias,
-        entries: impl Iterator<Item = (Range<FullOffset>, T)>,
-    ) -> Self {
-        Self {
-            version,
-            start_bias,
-            end_bias,
-            entries: SumTree::from_iter(
-                entries.map(|(range, value)| AnchorRangeMultimapEntry {
-                    range: FullOffsetRange {
-                        start: range.start,
-                        end: range.end,
-                    },
-                    value,
-                }),
-                &(),
-            ),
-        }
-    }
-
-    pub fn full_offset_ranges(&self) -> impl Iterator<Item = (Range<FullOffset>, &T)> {
-        self.entries
-            .cursor::<()>()
-            .map(|entry| (entry.range.start..entry.range.end, &entry.value))
-    }
-
-    pub fn filter<'a, O, F>(
-        &'a self,
-        content: &'a Snapshot,
-        mut f: F,
-    ) -> impl 'a + Iterator<Item = (usize, Range<O>, &T)>
-    where
-        O: FromAnchor,
-        F: 'a + FnMut(&'a T) -> bool,
-    {
-        let mut endpoint = Anchor {
-            full_offset: FullOffset(0),
-            bias: Bias::Left,
-            version: self.version.clone(),
-        };
-        self.entries
-            .cursor::<()>()
-            .enumerate()
-            .filter_map(move |(ix, entry)| {
-                if f(&entry.value) {
-                    endpoint.full_offset = entry.range.start;
-                    endpoint.bias = self.start_bias;
-                    let start = O::from_anchor(&endpoint, &content);
-                    endpoint.full_offset = entry.range.end;
-                    endpoint.bias = self.end_bias;
-                    let end = O::from_anchor(&endpoint, &content);
-                    Some((ix, start..end, &entry.value))
-                } else {
-                    None
-                }
-            })
-    }
-}
-
-impl<T: Clone> sum_tree::Item for AnchorRangeMultimapEntry<T> {
-    type Summary = AnchorRangeMultimapSummary;
-
-    fn summary(&self) -> Self::Summary {
-        AnchorRangeMultimapSummary {
-            start: self.range.start,
-            end: self.range.end,
-            min_start: self.range.start,
-            max_end: self.range.end,
-            count: 1,
-        }
-    }
-}
-
-impl Default for AnchorRangeMultimapSummary {
-    fn default() -> Self {
-        Self {
-            start: FullOffset(0),
-            end: FullOffset::MAX,
-            min_start: FullOffset::MAX,
-            max_end: FullOffset(0),
-            count: 0,
-        }
-    }
-}
-
-impl sum_tree::Summary for AnchorRangeMultimapSummary {
-    type Context = ();
-
-    fn add_summary(&mut self, other: &Self, _: &Self::Context) {
-        self.min_start = self.min_start.min(other.min_start);
-        self.max_end = self.max_end.max(other.max_end);
-
-        #[cfg(debug_assertions)]
-        {
-            let start_comparison = self.start.cmp(&other.start);
-            assert!(start_comparison <= Ordering::Equal);
-            if start_comparison == Ordering::Equal {
-                assert!(self.end.cmp(&other.end) >= Ordering::Equal);
-            }
-        }
-
-        self.start = other.start;
-        self.end = other.end;
-        self.count += other.count;
-    }
-}
-
-impl Default for FullOffsetRange {
-    fn default() -> Self {
-        Self {
-            start: FullOffset(0),
-            end: FullOffset::MAX,
-        }
-    }
-}
-
-impl<'a> sum_tree::Dimension<'a, AnchorRangeMultimapSummary> for usize {
-    fn add_summary(&mut self, summary: &'a AnchorRangeMultimapSummary, _: &()) {
-        *self += summary.count;
-    }
-}
-
-impl<'a> sum_tree::Dimension<'a, AnchorRangeMultimapSummary> for FullOffsetRange {
-    fn add_summary(&mut self, summary: &'a AnchorRangeMultimapSummary, _: &()) {
-        self.start = summary.start;
-        self.end = summary.end;
-    }
-}
-
-impl<'a> sum_tree::SeekTarget<'a, AnchorRangeMultimapSummary, FullOffsetRange> for FullOffsetRange {
-    fn cmp(&self, cursor_location: &FullOffsetRange, _: &()) -> Ordering {
-        Ord::cmp(&self.start, &cursor_location.start)
-            .then_with(|| Ord::cmp(&cursor_location.end, &self.end))
-    }
-}
-
 pub trait AnchorRangeExt {
-    fn cmp(&self, b: &Range<Anchor>, buffer: &Snapshot) -> Result<Ordering>;
-    fn to_offset(&self, content: &Snapshot) -> Range<usize>;
+    fn cmp(&self, b: &Range<Anchor>, buffer: &BufferSnapshot) -> Result<Ordering>;
+    fn to_offset(&self, content: &BufferSnapshot) -> Range<usize>;
+    fn to_point(&self, content: &BufferSnapshot) -> Range<Point>;
 }
 
 impl AnchorRangeExt for Range<Anchor> {
-    fn cmp(&self, other: &Range<Anchor>, buffer: &Snapshot) -> Result<Ordering> {
+    fn cmp(&self, other: &Range<Anchor>, buffer: &BufferSnapshot) -> Result<Ordering> {
         Ok(match self.start.cmp(&other.start, buffer)? {
             Ordering::Equal => other.end.cmp(&self.end, buffer)?,
             ord @ _ => ord,
         })
     }
 
-    fn to_offset(&self, content: &Snapshot) -> Range<usize> {
+    fn to_offset(&self, content: &BufferSnapshot) -> Range<usize> {
         self.start.to_offset(&content)..self.end.to_offset(&content)
     }
+
+    fn to_point(&self, content: &BufferSnapshot) -> Range<Point> {
+        self.start.summary::<Point>(&content)..self.end.summary::<Point>(&content)
+    }
 }

crates/text/src/locator.rs 🔗

@@ -0,0 +1,89 @@
+use lazy_static::lazy_static;
+use smallvec::{smallvec, SmallVec};
+use std::iter;
+
+lazy_static! {
+    pub static ref MIN: Locator = Locator::min();
+    pub static ref MAX: Locator = Locator::max();
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct Locator(SmallVec<[u64; 4]>);
+
+impl Locator {
+    pub fn min() -> Self {
+        Self(smallvec![u64::MIN])
+    }
+
+    pub fn max() -> Self {
+        Self(smallvec![u64::MAX])
+    }
+
+    pub fn assign(&mut self, other: &Self) {
+        self.0.resize(other.0.len(), 0);
+        self.0.copy_from_slice(&other.0);
+    }
+
+    pub fn between(lhs: &Self, rhs: &Self) -> Self {
+        let lhs = lhs.0.iter().copied().chain(iter::repeat(u64::MIN));
+        let rhs = rhs.0.iter().copied().chain(iter::repeat(u64::MAX));
+        let mut location = SmallVec::new();
+        for (lhs, rhs) in lhs.zip(rhs) {
+            let mid = lhs + ((rhs.saturating_sub(lhs)) >> 48);
+            location.push(mid);
+            if mid > lhs {
+                break;
+            }
+        }
+        Self(location)
+    }
+
+    pub fn len(&self) -> usize {
+        self.0.len()
+    }
+}
+
+impl Default for Locator {
+    fn default() -> Self {
+        Self::min()
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use rand::prelude::*;
+    use std::mem;
+
+    #[gpui::test(iterations = 100)]
+    fn test_locators(mut rng: StdRng) {
+        let mut lhs = Default::default();
+        let mut rhs = Default::default();
+        while lhs == rhs {
+            lhs = Locator(
+                (0..rng.gen_range(1..=5))
+                    .map(|_| rng.gen_range(0..=100))
+                    .collect(),
+            );
+            rhs = Locator(
+                (0..rng.gen_range(1..=5))
+                    .map(|_| rng.gen_range(0..=100))
+                    .collect(),
+            );
+        }
+
+        if lhs > rhs {
+            mem::swap(&mut lhs, &mut rhs);
+        }
+
+        let middle = Locator::between(&lhs, &rhs);
+        assert!(middle > lhs);
+        assert!(middle < rhs);
+        for ix in 0..middle.0.len() - 1 {
+            assert!(
+                middle.0[ix] == *lhs.0.get(ix).unwrap_or(&0)
+                    || middle.0[ix] == *rhs.0.get(ix).unwrap_or(&0)
+            );
+        }
+    }
+}

crates/text/src/operation_queue.rs 🔗

@@ -1,9 +1,15 @@
-use super::Operation;
 use std::{fmt::Debug, ops::Add};
-use sum_tree::{Cursor, Dimension, Edit, Item, KeyedItem, SumTree, Summary};
+use sum_tree::{Dimension, Edit, Item, KeyedItem, SumTree, Summary};
+
+pub trait Operation: Clone + Debug {
+    fn lamport_timestamp(&self) -> clock::Lamport;
+}
+
+#[derive(Clone, Debug)]
+struct OperationItem<T>(T);
 
 #[derive(Clone, Debug)]
-pub struct OperationQueue(SumTree<Operation>);
+pub struct OperationQueue<T: Operation>(SumTree<OperationItem<T>>);
 
 #[derive(Clone, Copy, Debug, Default, Eq, Ord, PartialEq, PartialOrd)]
 pub struct OperationKey(clock::Lamport);
@@ -20,7 +26,7 @@ impl OperationKey {
     }
 }
 
-impl OperationQueue {
+impl<T: Operation> OperationQueue<T> {
     pub fn new() -> Self {
         OperationQueue(SumTree::new())
     }
@@ -29,11 +35,15 @@ impl OperationQueue {
         self.0.summary().len
     }
 
-    pub fn insert(&mut self, mut ops: Vec<Operation>) {
+    pub fn insert(&mut self, mut ops: Vec<T>) {
         ops.sort_by_key(|op| op.lamport_timestamp());
         ops.dedup_by_key(|op| op.lamport_timestamp());
-        self.0
-            .edit(ops.into_iter().map(Edit::Insert).collect(), &());
+        self.0.edit(
+            ops.into_iter()
+                .map(|op| Edit::Insert(OperationItem(op)))
+                .collect(),
+            &(),
+        );
     }
 
     pub fn drain(&mut self) -> Self {
@@ -42,8 +52,8 @@ impl OperationQueue {
         clone
     }
 
-    pub fn cursor(&self) -> Cursor<Operation, ()> {
-        self.0.cursor()
+    pub fn iter(&self) -> impl Iterator<Item = &T> {
+        self.0.cursor::<()>().map(|i| &i.0)
     }
 }
 
@@ -76,22 +86,22 @@ impl<'a> Dimension<'a, OperationSummary> for OperationKey {
     }
 }
 
-impl Item for Operation {
+impl<T: Operation> Item for OperationItem<T> {
     type Summary = OperationSummary;
 
     fn summary(&self) -> Self::Summary {
         OperationSummary {
-            key: OperationKey::new(self.lamport_timestamp()),
+            key: OperationKey::new(self.0.lamport_timestamp()),
             len: 1,
         }
     }
 }
 
-impl KeyedItem for Operation {
+impl<T: Operation> KeyedItem for OperationItem<T> {
     type Key = OperationKey;
 
     fn key(&self) -> Self::Key {
-        OperationKey::new(self.lamport_timestamp())
+        OperationKey::new(self.0.lamport_timestamp())
     }
 }
 
@@ -107,21 +117,27 @@ mod tests {
         assert_eq!(queue.len(), 0);
 
         queue.insert(vec![
-            Operation::Test(clock.tick()),
-            Operation::Test(clock.tick()),
+            TestOperation(clock.tick()),
+            TestOperation(clock.tick()),
         ]);
         assert_eq!(queue.len(), 2);
 
-        queue.insert(vec![Operation::Test(clock.tick())]);
+        queue.insert(vec![TestOperation(clock.tick())]);
         assert_eq!(queue.len(), 3);
 
         drop(queue.drain());
         assert_eq!(queue.len(), 0);
 
-        queue.insert(vec![Operation::Test(clock.tick())]);
+        queue.insert(vec![TestOperation(clock.tick())]);
         assert_eq!(queue.len(), 1);
     }
 
     #[derive(Clone, Debug, Eq, PartialEq)]
     struct TestOperation(clock::Lamport);
+
+    impl Operation for TestOperation {
+        fn lamport_timestamp(&self) -> clock::Lamport {
+            self.0
+        }
+    }
 }

crates/text/src/patch.rs 🔗

@@ -9,7 +9,8 @@ pub struct Patch<T>(Vec<Edit<T>>);
 
 impl<T> Patch<T>
 where
-    T: Clone
+    T: 'static
+        + Clone
         + Copy
         + Ord
         + Sub<T, Output = T>
@@ -33,13 +34,17 @@ where
         Self(edits)
     }
 
+    pub fn edits(&self) -> &[Edit<T>] {
+        &self.0
+    }
+
     pub fn into_inner(self) -> Vec<Edit<T>> {
         self.0
     }
 
-    pub fn compose(&self, other: &Self) -> Self {
+    pub fn compose(&self, new_edits_iter: impl IntoIterator<Item = Edit<T>>) -> Self {
         let mut old_edits_iter = self.0.iter().cloned().peekable();
-        let mut new_edits_iter = other.0.iter().cloned().peekable();
+        let mut new_edits_iter = new_edits_iter.into_iter().peekable();
         let mut composed = Patch(Vec::new());
 
         let mut old_start = T::default();
@@ -196,6 +201,33 @@ where
     }
 }
 
+impl<T: Clone> IntoIterator for Patch<T> {
+    type Item = Edit<T>;
+    type IntoIter = std::vec::IntoIter<Edit<T>>;
+
+    fn into_iter(self) -> Self::IntoIter {
+        self.0.into_iter()
+    }
+}
+
+impl<'a, T: Clone> IntoIterator for &'a Patch<T> {
+    type Item = Edit<T>;
+    type IntoIter = std::iter::Cloned<std::slice::Iter<'a, Edit<T>>>;
+
+    fn into_iter(self) -> Self::IntoIter {
+        self.0.iter().cloned()
+    }
+}
+
+impl<'a, T: Clone> IntoIterator for &'a mut Patch<T> {
+    type Item = Edit<T>;
+    type IntoIter = std::iter::Cloned<std::slice::Iter<'a, Edit<T>>>;
+
+    fn into_iter(self) -> Self::IntoIter {
+        self.0.iter().cloned()
+    }
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;

crates/text/src/point.rs 🔗

@@ -35,6 +35,14 @@ impl Point {
     pub fn is_zero(&self) -> bool {
         self.row == 0 && self.column == 0
     }
+
+    pub fn saturating_sub(self, other: Self) -> Self {
+        if self < other {
+            Self::zero()
+        } else {
+            self - other
+        }
+    }
 }
 
 impl<'a> Add<&'a Self> for Point {

crates/text/src/point_utf16.rs 🔗

@@ -26,6 +26,14 @@ impl PointUtf16 {
     pub fn is_zero(&self) -> bool {
         self.row == 0 && self.column == 0
     }
+
+    pub fn saturating_sub(self, other: Self) -> Self {
+        if self < other {
+            Self::zero()
+        } else {
+            self - other
+        }
+    }
 }
 
 impl<'a> Add<&'a Self> for PointUtf16 {

crates/text/src/random_char_iter.rs 🔗

@@ -12,14 +12,22 @@ impl<T: Rng> Iterator for RandomCharIter<T> {
     type Item = char;
 
     fn next(&mut self) -> Option<Self::Item> {
+        if std::env::var("SIMPLE_TEXT").map_or(false, |v| !v.is_empty()) {
+            return if self.0.gen_range(0..100) < 5 {
+                Some('\n')
+            } else {
+                Some(self.0.gen_range(b'a'..b'z' + 1).into())
+            };
+        }
+
         match self.0.gen_range(0..100) {
             // whitespace
             0..=19 => [' ', '\n', '\t'].choose(&mut self.0).copied(),
             // two-byte greek letters
             20..=32 => char::from_u32(self.0.gen_range(('α' as u32)..('ω' as u32 + 1))),
-            // three-byte characters
+            // // three-byte characters
             33..=45 => ['✋', '✅', '❌', '❎', '⭐'].choose(&mut self.0).copied(),
-            // four-byte characters
+            // // four-byte characters
             46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.0).copied(),
             // ascii letters
             _ => Some(self.0.gen_range(b'a'..b'z' + 1).into()),

crates/text/src/rope.rs 🔗

@@ -205,6 +205,19 @@ impl Rope {
                 .map_or(0, |chunk| chunk.point_utf16_to_offset(overshoot))
     }
 
+    pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point {
+        if point >= self.summary().lines_utf16 {
+            return self.summary().lines;
+        }
+        let mut cursor = self.chunks.cursor::<(PointUtf16, Point)>();
+        cursor.seek(&point, Bias::Left, &());
+        let overshoot = point - cursor.start().0;
+        cursor.start().1
+            + cursor
+                .item()
+                .map_or(Point::zero(), |chunk| chunk.point_utf16_to_point(overshoot))
+    }
+
     pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize {
         let mut cursor = self.chunks.cursor::<usize>();
         cursor.seek(&offset, Bias::Left, &());
@@ -327,7 +340,7 @@ impl<'a> Cursor<'a> {
         slice
     }
 
-    pub fn summary<D: TextDimension<'a>>(&mut self, end_offset: usize) -> D {
+    pub fn summary<D: TextDimension>(&mut self, end_offset: usize) -> D {
         debug_assert!(end_offset >= self.offset);
 
         let mut summary = D::default();
@@ -583,6 +596,28 @@ impl Chunk {
         offset
     }
 
+    fn point_utf16_to_point(&self, target: PointUtf16) -> Point {
+        let mut point = Point::zero();
+        let mut point_utf16 = PointUtf16::zero();
+        for ch in self.0.chars() {
+            if point_utf16 >= target {
+                if point_utf16 > target {
+                    panic!("point {:?} is inside of character {:?}", target, ch);
+                }
+                break;
+            }
+
+            if ch == '\n' {
+                point_utf16 += PointUtf16::new(1, 0);
+                point += Point::new(1, 0);
+            } else {
+                point_utf16 += PointUtf16::new(0, ch.len_utf16() as u32);
+                point += Point::new(0, ch.len_utf8() as u32);
+            }
+        }
+        point
+    }
+
     fn clip_point(&self, target: Point, bias: Bias) -> Point {
         for (row, line) in self.0.split('\n').enumerate() {
             if row == target.row as usize {
@@ -685,6 +720,15 @@ impl sum_tree::Summary for TextSummary {
     }
 }
 
+impl<'a> std::ops::Add<Self> for TextSummary {
+    type Output = Self;
+
+    fn add(mut self, rhs: Self) -> Self::Output {
+        self.add_assign(&rhs);
+        self
+    }
+}
+
 impl<'a> std::ops::AddAssign<&'a Self> for TextSummary {
     fn add_assign(&mut self, other: &'a Self) {
         let joined_chars = self.last_line_chars + other.first_line_chars;
@@ -719,12 +763,12 @@ impl std::ops::AddAssign<Self> for TextSummary {
     }
 }
 
-pub trait TextDimension<'a>: Dimension<'a, TextSummary> {
+pub trait TextDimension: 'static + for<'a> Dimension<'a, TextSummary> {
     fn from_text_summary(summary: &TextSummary) -> Self;
     fn add_assign(&mut self, other: &Self);
 }
 
-impl<'a, D1: TextDimension<'a>, D2: TextDimension<'a>> TextDimension<'a> for (D1, D2) {
+impl<'a, D1: TextDimension, D2: TextDimension> TextDimension for (D1, D2) {
     fn from_text_summary(summary: &TextSummary) -> Self {
         (
             D1::from_text_summary(summary),
@@ -738,7 +782,7 @@ impl<'a, D1: TextDimension<'a>, D2: TextDimension<'a>> TextDimension<'a> for (D1
     }
 }
 
-impl<'a> TextDimension<'a> for TextSummary {
+impl TextDimension for TextSummary {
     fn from_text_summary(summary: &TextSummary) -> Self {
         summary.clone()
     }
@@ -754,7 +798,7 @@ impl<'a> sum_tree::Dimension<'a, TextSummary> for usize {
     }
 }
 
-impl<'a> TextDimension<'a> for usize {
+impl TextDimension for usize {
     fn from_text_summary(summary: &TextSummary) -> Self {
         summary.bytes
     }
@@ -770,7 +814,7 @@ impl<'a> sum_tree::Dimension<'a, TextSummary> for Point {
     }
 }
 
-impl<'a> TextDimension<'a> for Point {
+impl TextDimension for Point {
     fn from_text_summary(summary: &TextSummary) -> Self {
         summary.lines
     }
@@ -786,7 +830,7 @@ impl<'a> sum_tree::Dimension<'a, TextSummary> for PointUtf16 {
     }
 }
 
-impl<'a> TextDimension<'a> for PointUtf16 {
+impl TextDimension for PointUtf16 {
     fn from_text_summary(summary: &TextSummary) -> Self {
         summary.lines_utf16
     }
@@ -949,6 +993,22 @@ mod tests {
                 }
             }
 
+            let mut point_utf16 = PointUtf16::zero();
+            for unit in expected.encode_utf16() {
+                let left_point = actual.clip_point_utf16(point_utf16, Bias::Left);
+                let right_point = actual.clip_point_utf16(point_utf16, Bias::Right);
+                assert!(right_point >= left_point);
+                // Ensure translating UTF-16 points to offsets doesn't panic.
+                actual.point_utf16_to_offset(left_point);
+                actual.point_utf16_to_offset(right_point);
+
+                if unit == b'\n' as u16 {
+                    point_utf16 += PointUtf16::new(1, 0);
+                } else {
+                    point_utf16 += PointUtf16::new(0, 1);
+                }
+            }
+
             for _ in 0..5 {
                 let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right);
                 let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left);

crates/text/src/selection.rs 🔗

@@ -1,12 +1,6 @@
-use sum_tree::Bias;
-
-use crate::{rope::TextDimension, Snapshot};
-
-use super::{AnchorRangeMap, Buffer, Point, ToOffset, ToPoint};
-use std::{cmp::Ordering, ops::Range, sync::Arc};
-
-pub type SelectionSetId = clock::Lamport;
-pub type SelectionsVersion = usize;
+use crate::Anchor;
+use crate::{rope::TextDimension, BufferSnapshot, ToOffset, ToPoint};
+use std::cmp::Ordering;
 
 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
 pub enum SelectionGoal {
@@ -24,20 +18,6 @@ pub struct Selection<T> {
     pub goal: SelectionGoal,
 }
 
-#[derive(Clone, Debug, Eq, PartialEq)]
-pub struct SelectionSet {
-    pub id: SelectionSetId,
-    pub active: bool,
-    pub selections: Arc<AnchorRangeMap<SelectionState>>,
-}
-
-#[derive(Debug, Eq, PartialEq)]
-pub struct SelectionState {
-    pub id: usize,
-    pub reversed: bool,
-    pub goal: SelectionGoal,
-}
-
 impl<T: Clone> Selection<T> {
     pub fn head(&self) -> T {
         if self.reversed {
@@ -76,98 +56,19 @@ impl<T: ToOffset + ToPoint + Copy + Ord> Selection<T> {
             self.end = head;
         }
     }
-
-    pub fn point_range(&self, buffer: &Buffer) -> Range<Point> {
-        let start = self.start.to_point(buffer);
-        let end = self.end.to_point(buffer);
-        if self.reversed {
-            end..start
-        } else {
-            start..end
-        }
-    }
-
-    pub fn offset_range(&self, buffer: &Buffer) -> Range<usize> {
-        let start = self.start.to_offset(buffer);
-        let end = self.end.to_offset(buffer);
-        if self.reversed {
-            end..start
-        } else {
-            start..end
-        }
-    }
 }
 
-impl SelectionSet {
-    pub fn len(&self) -> usize {
-        self.selections.len()
-    }
-
-    pub fn selections<'a, D>(
+impl Selection<Anchor> {
+    pub fn resolve<'a, D: 'a + TextDimension>(
         &'a self,
-        content: &'a Snapshot,
-    ) -> impl 'a + Iterator<Item = Selection<D>>
-    where
-        D: 'a + TextDimension<'a>,
-    {
-        self.selections
-            .ranges(content)
-            .map(|(range, state)| Selection {
-                id: state.id,
-                start: range.start,
-                end: range.end,
-                reversed: state.reversed,
-                goal: state.goal,
-            })
-    }
-
-    pub fn intersecting_selections<'a, D, I>(
-        &'a self,
-        range: Range<(I, Bias)>,
-        content: &'a Snapshot,
-    ) -> impl 'a + Iterator<Item = Selection<D>>
-    where
-        D: 'a + TextDimension<'a>,
-        I: 'a + ToOffset,
-    {
-        self.selections
-            .intersecting_ranges(range, content)
-            .map(|(range, state)| Selection {
-                id: state.id,
-                start: range.start,
-                end: range.end,
-                reversed: state.reversed,
-                goal: state.goal,
-            })
-    }
-
-    pub fn oldest_selection<'a, D>(&'a self, content: &'a Snapshot) -> Option<Selection<D>>
-    where
-        D: 'a + TextDimension<'a>,
-    {
-        self.selections
-            .min_by_key(content, |selection| selection.id)
-            .map(|(range, state)| Selection {
-                id: state.id,
-                start: range.start,
-                end: range.end,
-                reversed: state.reversed,
-                goal: state.goal,
-            })
-    }
-
-    pub fn newest_selection<'a, D>(&'a self, content: &'a Snapshot) -> Option<Selection<D>>
-    where
-        D: 'a + TextDimension<'a>,
-    {
-        self.selections
-            .max_by_key(content, |selection| selection.id)
-            .map(|(range, state)| Selection {
-                id: state.id,
-                start: range.start,
-                end: range.end,
-                reversed: state.reversed,
-                goal: state.goal,
-            })
+        snapshot: &'a BufferSnapshot,
+    ) -> Selection<D> {
+        Selection {
+            id: self.id,
+            start: snapshot.summary_for_anchor(&self.start),
+            end: snapshot.summary_for_anchor(&self.end),
+            reversed: self.reversed,
+            goal: self.goal,
+        }
     }
 }

crates/text/src/subscription.rs 🔗

@@ -0,0 +1,48 @@
+use crate::{Edit, Patch};
+use parking_lot::Mutex;
+use std::{
+    mem,
+    sync::{Arc, Weak},
+};
+
+#[derive(Default)]
+pub struct Topic(Mutex<Vec<Weak<Mutex<Patch<usize>>>>>);
+
+pub struct Subscription(Arc<Mutex<Patch<usize>>>);
+
+impl Topic {
+    pub fn subscribe(&mut self) -> Subscription {
+        let subscription = Subscription(Default::default());
+        self.0.get_mut().push(Arc::downgrade(&subscription.0));
+        subscription
+    }
+
+    pub fn publish(&self, edits: impl Clone + IntoIterator<Item = Edit<usize>>) {
+        publish(&mut *self.0.lock(), edits);
+    }
+
+    pub fn publish_mut(&mut self, edits: impl Clone + IntoIterator<Item = Edit<usize>>) {
+        publish(self.0.get_mut(), edits);
+    }
+}
+
+impl Subscription {
+    pub fn consume(&self) -> Patch<usize> {
+        mem::take(&mut *self.0.lock())
+    }
+}
+
+fn publish(
+    subscriptions: &mut Vec<Weak<Mutex<Patch<usize>>>>,
+    edits: impl Clone + IntoIterator<Item = Edit<usize>>,
+) {
+    subscriptions.retain(|subscription| {
+        if let Some(subscription) = subscription.upgrade() {
+            let mut patch = subscription.lock();
+            *patch = patch.compose(edits.clone());
+            true
+        } else {
+            false
+        }
+    });
+}

crates/text/src/tests.rs 🔗

@@ -78,6 +78,8 @@ fn test_random_edits(mut rng: StdRng) {
             TextSummary::from(&reference_string[range])
         );
 
+        buffer.check_invariants();
+
         if rng.gen_bool(0.3) {
             buffer_versions.push((buffer.clone(), buffer.subscribe()));
         }
@@ -102,6 +104,32 @@ fn test_random_edits(mut rng: StdRng) {
         }
         assert_eq!(text.to_string(), buffer.text());
 
+        for _ in 0..5 {
+            let end_ix = old_buffer.clip_offset(rng.gen_range(0..=old_buffer.len()), Bias::Right);
+            let start_ix = old_buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left);
+            let range = old_buffer.anchor_before(start_ix)..old_buffer.anchor_after(end_ix);
+            let mut old_text = old_buffer.text_for_range(range.clone()).collect::<String>();
+            let edits = buffer
+                .edits_since_in_range::<usize>(&old_buffer.version, range.clone())
+                .collect::<Vec<_>>();
+            log::info!(
+                "applying edits since version {:?} to old text in range {:?}: {:?}: {:?}",
+                old_buffer.version(),
+                start_ix..end_ix,
+                old_text,
+                edits,
+            );
+
+            let new_text = buffer.text_for_range(range).collect::<String>();
+            for edit in edits {
+                old_text.replace_range(
+                    edit.new.start..edit.new.start + edit.old_len(),
+                    &new_text[edit.new],
+                );
+            }
+            assert_eq!(old_text, new_text);
+        }
+
         let subscription_edits = subscription.consume();
         log::info!(
             "applying subscription edits since version {:?} to old text: {:?}: {:?}",
@@ -432,63 +460,41 @@ fn test_history() {
     let mut now = Instant::now();
     let mut buffer = Buffer::new(0, 0, History::new("123456".into()));
 
-    let set_id = if let Operation::UpdateSelections { set_id, .. } =
-        buffer.add_selection_set(&buffer.selections_from_ranges(vec![4..4]).unwrap())
-    {
-        set_id
-    } else {
-        unreachable!()
-    };
-    buffer.start_transaction_at(Some(set_id), now).unwrap();
+    buffer.start_transaction_at(now);
     buffer.edit(vec![2..4], "cd");
-    buffer.end_transaction_at(Some(set_id), now).unwrap();
+    buffer.end_transaction_at(now);
     assert_eq!(buffer.text(), "12cd56");
-    assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]);
 
-    buffer.start_transaction_at(Some(set_id), now).unwrap();
-    buffer
-        .update_selection_set(set_id, &buffer.selections_from_ranges(vec![1..3]).unwrap())
-        .unwrap();
+    buffer.start_transaction_at(now);
     buffer.edit(vec![4..5], "e");
-    buffer.end_transaction_at(Some(set_id), now).unwrap();
+    buffer.end_transaction_at(now).unwrap();
     assert_eq!(buffer.text(), "12cde6");
-    assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]);
 
     now += buffer.history.group_interval + Duration::from_millis(1);
-    buffer.start_transaction_at(Some(set_id), now).unwrap();
-    buffer
-        .update_selection_set(set_id, &buffer.selections_from_ranges(vec![2..2]).unwrap())
-        .unwrap();
+    buffer.start_transaction_at(now);
     buffer.edit(vec![0..1], "a");
     buffer.edit(vec![1..1], "b");
-    buffer.end_transaction_at(Some(set_id), now).unwrap();
+    buffer.end_transaction_at(now).unwrap();
     assert_eq!(buffer.text(), "ab2cde6");
-    assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]);
 
-    // Last transaction happened past the group interval, undo it on its
-    // own.
+    // Last transaction happened past the group interval, undo it on its own.
     buffer.undo();
     assert_eq!(buffer.text(), "12cde6");
-    assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]);
 
-    // First two transactions happened within the group interval, undo them
-    // together.
+    // First two transactions happened within the group interval, undo them together.
     buffer.undo();
     assert_eq!(buffer.text(), "123456");
-    assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]);
 
     // Redo the first two transactions together.
     buffer.redo();
     assert_eq!(buffer.text(), "12cde6");
-    assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]);
 
     // Redo the last transaction on its own.
     buffer.redo();
     assert_eq!(buffer.text(), "ab2cde6");
-    assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]);
 
-    buffer.start_transaction_at(None, now).unwrap();
-    assert!(buffer.end_transaction_at(None, now).is_none());
+    buffer.start_transaction_at(now);
+    assert!(buffer.end_transaction_at(now).is_none());
     buffer.undo();
     assert_eq!(buffer.text(), "12cde6");
 }
@@ -554,8 +560,8 @@ fn test_random_concurrent_edits(mut rng: StdRng) {
         let buffer = &mut buffers[replica_index];
         match rng.gen_range(0..=100) {
             0..=50 if mutation_count != 0 => {
-                let ops = buffer.randomly_mutate(&mut rng);
-                network.broadcast(buffer.replica_id, ops);
+                let op = buffer.randomly_edit(&mut rng, 5).2;
+                network.broadcast(buffer.replica_id, vec![op]);
                 log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text());
                 mutation_count -= 1;
             }
@@ -577,6 +583,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) {
             }
             _ => {}
         }
+        buffer.check_invariants();
 
         if mutation_count == 0 && network.is_idle() {
             break;
@@ -591,18 +598,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) {
             "Replica {} text != Replica 0 text",
             buffer.replica_id
         );
-        assert_eq!(
-            buffer.selection_sets().collect::<HashMap<_, _>>(),
-            first_buffer.selection_sets().collect::<HashMap<_, _>>()
-        );
-        assert_eq!(
-            buffer
-                .all_selection_ranges::<usize>()
-                .collect::<HashMap<_, _>>(),
-            first_buffer
-                .all_selection_ranges::<usize>()
-                .collect::<HashMap<_, _>>()
-        );
+        buffer.check_invariants();
     }
 }
 
@@ -618,6 +614,39 @@ struct Network<T: Clone, R: rand::Rng> {
     rng: R,
 }
 
+impl Buffer {
+    fn check_invariants(&self) {
+        // Ensure every fragment is ordered by locator in the fragment tree and corresponds
+        // to an insertion fragment in the insertions tree.
+        let mut prev_fragment_id = Locator::min();
+        for fragment in self.snapshot.fragments.items(&None) {
+            assert!(fragment.id > prev_fragment_id);
+            prev_fragment_id = fragment.id.clone();
+
+            let insertion_fragment = self
+                .snapshot
+                .insertions
+                .get(
+                    &InsertionFragmentKey {
+                        timestamp: fragment.insertion_timestamp.local(),
+                        split_offset: fragment.insertion_offset,
+                    },
+                    &(),
+                )
+                .unwrap();
+            assert_eq!(insertion_fragment.fragment_id, fragment.id);
+        }
+
+        let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>();
+        for insertion_fragment in self.snapshot.insertions.cursor::<()>() {
+            cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left, &None);
+            let fragment = cursor.item().unwrap();
+            assert_eq!(insertion_fragment.fragment_id, fragment.id);
+            assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
+        }
+    }
+}
+
 impl<T: Clone, R: rand::Rng> Network<T, R> {
     fn new(rng: R) -> Self {
         Network {

crates/text/src/text.rs 🔗

@@ -1,5 +1,6 @@
 mod anchor;
-mod operation_queue;
+pub mod locator;
+pub mod operation_queue;
 mod patch;
 mod point;
 mod point_utf16;
@@ -7,15 +8,16 @@ mod point_utf16;
 pub mod random_char_iter;
 pub mod rope;
 mod selection;
+pub mod subscription;
 #[cfg(test)]
 mod tests;
 
 pub use anchor::*;
-use anyhow::{anyhow, Result};
+use anyhow::Result;
 use clock::ReplicaId;
 use collections::{HashMap, HashSet};
+use locator::Locator;
 use operation_queue::OperationQueue;
-use parking_lot::Mutex;
 pub use patch::Patch;
 pub use point::*;
 pub use point_utf16::*;
@@ -25,56 +27,55 @@ use rope::TextDimension;
 pub use rope::{Chunks, Rope, TextSummary};
 pub use selection::*;
 use std::{
-    cmp::{self, Reverse},
+    cmp::{self, Ordering},
     iter::Iterator,
     ops::{self, Deref, Range, Sub},
     str,
-    sync::{Arc, Weak},
+    sync::Arc,
     time::{Duration, Instant},
 };
+pub use subscription::*;
 pub use sum_tree::Bias;
 use sum_tree::{FilterCursor, SumTree};
 
+pub type TransactionId = usize;
+
 pub struct Buffer {
-    snapshot: Snapshot,
+    snapshot: BufferSnapshot,
     last_edit: clock::Local,
     history: History,
-    selections: HashMap<SelectionSetId, SelectionSet>,
-    deferred_ops: OperationQueue,
+    deferred_ops: OperationQueue<Operation>,
     deferred_replicas: HashSet<ReplicaId>,
     replica_id: ReplicaId,
     remote_id: u64,
     local_clock: clock::Local,
-    lamport_clock: clock::Lamport,
-    subscriptions: Vec<Weak<Mutex<Vec<Patch<usize>>>>>,
+    pub lamport_clock: clock::Lamport,
+    subscriptions: Topic,
 }
 
-#[derive(Clone)]
-pub struct Snapshot {
+#[derive(Clone, Debug)]
+pub struct BufferSnapshot {
+    replica_id: ReplicaId,
     visible_text: Rope,
     deleted_text: Rope,
     undo_map: UndoMap,
     fragments: SumTree<Fragment>,
+    insertions: SumTree<InsertionFragment>,
     pub version: clock::Global,
 }
 
 #[derive(Clone, Debug)]
 pub struct Transaction {
+    id: TransactionId,
     start: clock::Global,
     end: clock::Global,
     edits: Vec<clock::Local>,
     ranges: Vec<Range<FullOffset>>,
-    selections_before: HashMap<SelectionSetId, Arc<AnchorRangeMap<SelectionState>>>,
-    selections_after: HashMap<SelectionSetId, Arc<AnchorRangeMap<SelectionState>>>,
     first_edit_at: Instant,
     last_edit_at: Instant,
 }
 
 impl Transaction {
-    pub fn starting_selection_set_ids<'a>(&'a self) -> impl Iterator<Item = SelectionSetId> + 'a {
-        self.selections_before.keys().copied()
-    }
-
     fn push_edit(&mut self, edit: &EditOperation) {
         self.edits.push(edit.timestamp.local());
         self.end.observe(edit.timestamp.local());
@@ -131,6 +132,7 @@ pub struct History {
     redo_stack: Vec<Transaction>,
     transaction_depth: usize,
     group_interval: Duration,
+    next_transaction_id: TransactionId,
 }
 
 impl History {
@@ -142,6 +144,7 @@ impl History {
             redo_stack: Vec::new(),
             transaction_depth: 0,
             group_interval: Duration::from_millis(300),
+            next_transaction_id: 0,
         }
     }
 
@@ -149,32 +152,27 @@ impl History {
         self.ops.insert(op.timestamp.local(), op);
     }
 
-    fn start_transaction(
-        &mut self,
-        start: clock::Global,
-        selections_before: HashMap<SelectionSetId, Arc<AnchorRangeMap<SelectionState>>>,
-        now: Instant,
-    ) {
+    fn start_transaction(&mut self, start: clock::Global, now: Instant) -> Option<TransactionId> {
         self.transaction_depth += 1;
         if self.transaction_depth == 1 {
+            let id = self.next_transaction_id;
+            self.next_transaction_id += 1;
             self.undo_stack.push(Transaction {
+                id,
                 start: start.clone(),
                 end: start,
                 edits: Vec::new(),
                 ranges: Vec::new(),
-                selections_before,
-                selections_after: Default::default(),
                 first_edit_at: now,
                 last_edit_at: now,
             });
+            Some(id)
+        } else {
+            None
         }
     }
 
-    fn end_transaction(
-        &mut self,
-        selections_after: HashMap<SelectionSetId, Arc<AnchorRangeMap<SelectionState>>>,
-        now: Instant,
-    ) -> Option<&Transaction> {
+    fn end_transaction(&mut self, now: Instant) -> Option<&Transaction> {
         assert_ne!(self.transaction_depth, 0);
         self.transaction_depth -= 1;
         if self.transaction_depth == 0 {
@@ -183,7 +181,6 @@ impl History {
                 None
             } else {
                 let transaction = self.undo_stack.last_mut().unwrap();
-                transaction.selections_after = selections_after;
                 transaction.last_edit_at = now;
                 Some(transaction)
             }
@@ -192,7 +189,7 @@ impl History {
         }
     }
 
-    fn group(&mut self) {
+    fn group(&mut self) -> Option<TransactionId> {
         let mut new_len = self.undo_stack.len();
         let mut transactions = self.undo_stack.iter_mut();
 
@@ -219,14 +216,12 @@ impl History {
 
             if let Some(transaction) = transactions_to_merge.last_mut() {
                 last_transaction.last_edit_at = transaction.last_edit_at;
-                last_transaction
-                    .selections_after
-                    .extend(transaction.selections_after.drain());
                 last_transaction.end = transaction.end.clone();
             }
         }
 
         self.undo_stack.truncate(new_len);
+        self.undo_stack.last().map(|t| t.id)
     }
 
     fn push_undo(&mut self, edit_id: clock::Local) {
@@ -245,6 +240,17 @@ impl History {
         }
     }
 
+    fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&Transaction> {
+        assert_eq!(self.transaction_depth, 0);
+        if let Some(transaction_ix) = self.undo_stack.iter().rposition(|t| t.id == transaction_id) {
+            let transaction = self.undo_stack.remove(transaction_ix);
+            self.redo_stack.push(transaction);
+            self.redo_stack.last()
+        } else {
+            None
+        }
+    }
+
     fn pop_redo(&mut self) -> Option<&Transaction> {
         assert_eq!(self.transaction_depth, 0);
         if let Some(transaction) = self.redo_stack.pop() {
@@ -254,6 +260,17 @@ impl History {
             None
         }
     }
+
+    fn remove_from_redo(&mut self, transaction_id: TransactionId) -> Option<&Transaction> {
+        assert_eq!(self.transaction_depth, 0);
+        if let Some(transaction_ix) = self.redo_stack.iter().rposition(|t| t.id == transaction_id) {
+            let transaction = self.redo_stack.remove(transaction_ix);
+            self.undo_stack.push(transaction);
+            self.undo_stack.last()
+        } else {
+            None
+        }
+    }
 }
 
 #[derive(Clone, Default, Debug)]
@@ -294,7 +311,7 @@ impl UndoMap {
     }
 }
 
-struct Edits<'a, D: TextDimension<'a>, F: FnMut(&FragmentSummary) -> bool> {
+struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
     visible_cursor: rope::Cursor<'a>,
     deleted_cursor: rope::Cursor<'a>,
     fragments_cursor: Option<FilterCursor<'a, F, Fragment, FragmentTextSummary>>,
@@ -302,6 +319,7 @@ struct Edits<'a, D: TextDimension<'a>, F: FnMut(&FragmentSummary) -> bool> {
     since: &'a clock::Global,
     old_end: D,
     new_end: D,
+    range: Range<(&'a Locator, usize)>,
 }
 
 #[derive(Clone, Debug, Default, Eq, PartialEq)]
@@ -342,21 +360,7 @@ impl<D1, D2> Edit<(D1, D2)> {
     }
 }
 
-#[derive(Clone, Default)]
-pub struct Subscription(Arc<Mutex<Vec<Patch<usize>>>>);
-
-impl Subscription {
-    pub fn consume(&self) -> Patch<usize> {
-        let mut patches = self.0.lock();
-        let mut changes = Patch::default();
-        for patch in patches.drain(..) {
-            changes = changes.compose(&patch);
-        }
-        changes
-    }
-}
-
-#[derive(Copy, Clone, Debug, Default, Eq, PartialEq)]
+#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord)]
 pub struct InsertionTimestamp {
     pub replica_id: ReplicaId,
     pub local: clock::Seq,
@@ -381,7 +385,9 @@ impl InsertionTimestamp {
 
 #[derive(Eq, PartialEq, Clone, Debug)]
 struct Fragment {
-    timestamp: InsertionTimestamp,
+    id: Locator,
+    insertion_timestamp: InsertionTimestamp,
+    insertion_offset: usize,
     len: usize,
     visible: bool,
     deletions: HashSet<clock::Local>,
@@ -391,6 +397,7 @@ struct Fragment {
 #[derive(Eq, PartialEq, Clone, Debug)]
 pub struct FragmentSummary {
     text: FragmentTextSummary,
+    max_id: Locator,
     max_version: clock::Global,
     min_insertion_version: clock::Global,
     max_insertion_version: clock::Global,
@@ -409,6 +416,19 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
     }
 }
 
+#[derive(Eq, PartialEq, Clone, Debug)]
+struct InsertionFragment {
+    timestamp: clock::Local,
+    split_offset: usize,
+    fragment_id: Locator,
+}
+
+#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
+struct InsertionFragmentKey {
+    timestamp: clock::Local,
+    split_offset: usize,
+}
+
 #[derive(Clone, Debug, Eq, PartialEq)]
 pub enum Operation {
     Edit(EditOperation),
@@ -416,21 +436,6 @@ pub enum Operation {
         undo: UndoOperation,
         lamport_timestamp: clock::Lamport,
     },
-    UpdateSelections {
-        set_id: SelectionSetId,
-        selections: Arc<AnchorRangeMap<SelectionState>>,
-        lamport_timestamp: clock::Lamport,
-    },
-    RemoveSelections {
-        set_id: SelectionSetId,
-        lamport_timestamp: clock::Lamport,
-    },
-    SetActiveSelections {
-        set_id: Option<SelectionSetId>,
-        lamport_timestamp: clock::Lamport,
-    },
-    #[cfg(test)]
-    Test(clock::Lamport),
 }
 
 #[derive(Clone, Debug, Eq, PartialEq)]
@@ -452,43 +457,47 @@ pub struct UndoOperation {
 impl Buffer {
     pub fn new(replica_id: u16, remote_id: u64, history: History) -> Buffer {
         let mut fragments = SumTree::new();
+        let mut insertions = SumTree::new();
 
         let mut local_clock = clock::Local::new(replica_id);
         let mut lamport_clock = clock::Lamport::new(replica_id);
         let mut version = clock::Global::new();
         let visible_text = Rope::from(history.base_text.as_ref());
         if visible_text.len() > 0 {
-            let timestamp = InsertionTimestamp {
+            let insertion_timestamp = InsertionTimestamp {
                 replica_id: 0,
                 local: 1,
                 lamport: 1,
             };
-            local_clock.observe(timestamp.local());
-            lamport_clock.observe(timestamp.lamport());
-            version.observe(timestamp.local());
-            fragments.push(
-                Fragment {
-                    timestamp,
-                    len: visible_text.len(),
-                    visible: true,
-                    deletions: Default::default(),
-                    max_undos: Default::default(),
-                },
-                &None,
-            );
+            local_clock.observe(insertion_timestamp.local());
+            lamport_clock.observe(insertion_timestamp.lamport());
+            version.observe(insertion_timestamp.local());
+            let fragment_id = Locator::between(&Locator::min(), &Locator::max());
+            let fragment = Fragment {
+                id: fragment_id,
+                insertion_timestamp,
+                insertion_offset: 0,
+                len: visible_text.len(),
+                visible: true,
+                deletions: Default::default(),
+                max_undos: Default::default(),
+            };
+            insertions.push(InsertionFragment::new(&fragment), &());
+            fragments.push(fragment, &None);
         }
 
         Buffer {
-            snapshot: Snapshot {
+            snapshot: BufferSnapshot {
+                replica_id,
                 visible_text,
                 deleted_text: Rope::new(),
                 fragments,
+                insertions,
                 version,
                 undo_map: Default::default(),
             },
             last_edit: clock::Local::default(),
             history,
-            selections: Default::default(),
             deferred_ops: OperationQueue::new(),
             deferred_replicas: HashSet::default(),
             replica_id,
@@ -503,14 +512,8 @@ impl Buffer {
         self.version.clone()
     }
 
-    pub fn snapshot(&self) -> Snapshot {
-        Snapshot {
-            visible_text: self.visible_text.clone(),
-            deleted_text: self.deleted_text.clone(),
-            undo_map: self.undo_map.clone(),
-            fragments: self.fragments.clone(),
-            version: self.version.clone(),
-        }
+    pub fn snapshot(&self) -> BufferSnapshot {
+        self.snapshot.clone()
     }
 
     pub fn replica_id(&self) -> ReplicaId {
@@ -525,6 +528,10 @@ impl Buffer {
         self.deferred_ops.len()
     }
 
+    pub fn transaction_group_interval(&self) -> Duration {
+        self.history.group_interval
+    }
+
     pub fn edit<R, I, S, T>(&mut self, ranges: R, new_text: T) -> EditOperation
     where
         R: IntoIterator<IntoIter = I>,
@@ -540,7 +547,7 @@ impl Buffer {
             None
         };
 
-        self.start_transaction(None).unwrap();
+        self.start_transaction();
         let timestamp = InsertionTimestamp {
             replica_id: self.replica_id,
             local: self.local_clock.tick().value,
@@ -552,7 +559,7 @@ impl Buffer {
         self.history.push_undo(edit.timestamp.local());
         self.last_edit = edit.timestamp.local();
         self.snapshot.version.observe(edit.timestamp.local());
-        self.end_transaction(None);
+        self.end_transaction();
         edit
     }
 
@@ -569,6 +576,8 @@ impl Buffer {
             ranges: Vec::with_capacity(ranges.len()),
             new_text: None,
         };
+        let mut new_insertions = Vec::new();
+        let mut insertion_offset = 0;
 
         let mut ranges = ranges
             .map(|range| range.start.to_offset(&*self)..range.end.to_offset(&*self))
@@ -594,6 +603,8 @@ impl Buffer {
                     if fragment_end > fragment_start {
                         let mut suffix = old_fragments.item().unwrap().clone();
                         suffix.len = fragment_end - fragment_start;
+                        suffix.insertion_offset += fragment_start - old_fragments.start().visible;
+                        new_insertions.push(InsertionFragment::insert_new(&suffix));
                         new_ropes.push_fragment(&suffix, suffix.visible);
                         new_fragments.push(suffix, &None);
                     }
@@ -612,6 +623,9 @@ impl Buffer {
             if fragment_start < range.start {
                 let mut prefix = old_fragments.item().unwrap().clone();
                 prefix.len = range.start - fragment_start;
+                prefix.insertion_offset += fragment_start - old_fragments.start().visible;
+                prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
+                new_insertions.push(InsertionFragment::insert_new(&prefix));
                 new_ropes.push_fragment(&prefix, prefix.visible);
                 new_fragments.push(prefix, &None);
                 fragment_start = range.start;
@@ -624,17 +638,24 @@ impl Buffer {
                     old: fragment_start..fragment_start,
                     new: new_start..new_start + new_text.len(),
                 });
+                let fragment = Fragment {
+                    id: Locator::between(
+                        &new_fragments.summary().max_id,
+                        old_fragments
+                            .item()
+                            .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
+                    ),
+                    insertion_timestamp: timestamp,
+                    insertion_offset,
+                    len: new_text.len(),
+                    deletions: Default::default(),
+                    max_undos: Default::default(),
+                    visible: true,
+                };
+                new_insertions.push(InsertionFragment::insert_new(&fragment));
                 new_ropes.push_str(new_text);
-                new_fragments.push(
-                    Fragment {
-                        timestamp,
-                        len: new_text.len(),
-                        deletions: Default::default(),
-                        max_undos: Default::default(),
-                        visible: true,
-                    },
-                    &None,
-                );
+                new_fragments.push(fragment, &None);
+                insertion_offset += new_text.len();
             }
 
             // Advance through every fragment that intersects this range, marking the intersecting
@@ -646,6 +667,9 @@ impl Buffer {
                 let intersection_end = cmp::min(range.end, fragment_end);
                 if fragment.visible {
                     intersection.len = intersection_end - fragment_start;
+                    intersection.insertion_offset += fragment_start - old_fragments.start().visible;
+                    intersection.id =
+                        Locator::between(&new_fragments.summary().max_id, &intersection.id);
                     intersection.deletions.insert(timestamp.local());
                     intersection.visible = false;
                 }
@@ -657,6 +681,7 @@ impl Buffer {
                             new: new_start..new_start,
                         });
                     }
+                    new_insertions.push(InsertionFragment::insert_new(&intersection));
                     new_ropes.push_fragment(&intersection, fragment.visible);
                     new_fragments.push(intersection, &None);
                     fragment_start = intersection_end;
@@ -677,6 +702,8 @@ impl Buffer {
             if fragment_end > fragment_start {
                 let mut suffix = old_fragments.item().unwrap().clone();
                 suffix.len = fragment_end - fragment_start;
+                suffix.insertion_offset += fragment_start - old_fragments.start().visible;
+                new_insertions.push(InsertionFragment::insert_new(&suffix));
                 new_ropes.push_fragment(&suffix, suffix.visible);
                 new_fragments.push(suffix, &None);
             }
@@ -690,9 +717,10 @@ impl Buffer {
         drop(old_fragments);
 
         self.snapshot.fragments = new_fragments;
+        self.snapshot.insertions.edit(new_insertions, &());
         self.snapshot.visible_text = visible_text;
         self.snapshot.deleted_text = deleted_text;
-        self.update_subscriptions(edits);
+        self.subscriptions.publish_mut(&edits);
         edit_op.new_text = new_text;
         edit_op
     }
@@ -736,49 +764,6 @@ impl Buffer {
                     self.lamport_clock.observe(lamport_timestamp);
                 }
             }
-            Operation::UpdateSelections {
-                set_id,
-                selections,
-                lamport_timestamp,
-            } => {
-                if let Some(set) = self.selections.get_mut(&set_id) {
-                    set.selections = selections;
-                } else {
-                    self.selections.insert(
-                        set_id,
-                        SelectionSet {
-                            id: set_id,
-                            selections,
-                            active: false,
-                        },
-                    );
-                }
-                self.lamport_clock.observe(lamport_timestamp);
-            }
-            Operation::RemoveSelections {
-                set_id,
-                lamport_timestamp,
-            } => {
-                self.selections.remove(&set_id);
-                self.lamport_clock.observe(lamport_timestamp);
-            }
-            Operation::SetActiveSelections {
-                set_id,
-                lamport_timestamp,
-            } => {
-                for (id, set) in &mut self.selections {
-                    if id.replica_id == lamport_timestamp.replica_id {
-                        if Some(*id) == set_id {
-                            set.active = true;
-                        } else {
-                            set.active = false;
-                        }
-                    }
-                }
-                self.lamport_clock.observe(lamport_timestamp);
-            }
-            #[cfg(test)]
-            Operation::Test(_) => {}
         }
         Ok(())
     }
@@ -796,6 +781,8 @@ impl Buffer {
 
         let mut edits = Patch::default();
         let cx = Some(version.clone());
+        let mut new_insertions = Vec::new();
+        let mut insertion_offset = 0;
         let mut new_ropes =
             RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
         let mut old_fragments = self.fragments.cursor::<(VersionedFullOffset, usize)>();
@@ -819,6 +806,9 @@ impl Buffer {
                     if fragment_end > fragment_start {
                         let mut suffix = old_fragments.item().unwrap().clone();
                         suffix.len = fragment_end.0 - fragment_start.0;
+                        suffix.insertion_offset +=
+                            fragment_start - old_fragments.start().0.full_offset();
+                        new_insertions.push(InsertionFragment::insert_new(&suffix));
                         new_ropes.push_fragment(&suffix, suffix.visible);
                         new_fragments.push(suffix, &None);
                     }
@@ -837,6 +827,8 @@ impl Buffer {
             if fragment_end == range.start && fragment_end > fragment_start {
                 let mut fragment = old_fragments.item().unwrap().clone();
                 fragment.len = fragment_end.0 - fragment_start.0;
+                fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
+                new_insertions.push(InsertionFragment::insert_new(&fragment));
                 new_ropes.push_fragment(&fragment, fragment.visible);
                 new_fragments.push(fragment, &None);
                 old_fragments.next(&cx);
@@ -847,7 +839,7 @@ impl Buffer {
             // timestamp.
             while let Some(fragment) = old_fragments.item() {
                 if fragment_start == range.start
-                    && fragment.timestamp.lamport() > timestamp.lamport()
+                    && fragment.insertion_timestamp.lamport() > timestamp.lamport()
                 {
                     new_ropes.push_fragment(fragment, fragment.visible);
                     new_fragments.push(fragment.clone(), &None);
@@ -863,6 +855,9 @@ impl Buffer {
             if fragment_start < range.start {
                 let mut prefix = old_fragments.item().unwrap().clone();
                 prefix.len = range.start.0 - fragment_start.0;
+                prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
+                prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
+                new_insertions.push(InsertionFragment::insert_new(&prefix));
                 fragment_start = range.start;
                 new_ropes.push_fragment(&prefix, prefix.visible);
                 new_fragments.push(prefix, &None);
@@ -879,17 +874,24 @@ impl Buffer {
                     old: old_start..old_start,
                     new: new_start..new_start + new_text.len(),
                 });
+                let fragment = Fragment {
+                    id: Locator::between(
+                        &new_fragments.summary().max_id,
+                        old_fragments
+                            .item()
+                            .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
+                    ),
+                    insertion_timestamp: timestamp,
+                    insertion_offset,
+                    len: new_text.len(),
+                    deletions: Default::default(),
+                    max_undos: Default::default(),
+                    visible: true,
+                };
+                new_insertions.push(InsertionFragment::insert_new(&fragment));
                 new_ropes.push_str(new_text);
-                new_fragments.push(
-                    Fragment {
-                        timestamp,
-                        len: new_text.len(),
-                        deletions: Default::default(),
-                        max_undos: Default::default(),
-                        visible: true,
-                    },
-                    &None,
-                );
+                new_fragments.push(fragment, &None);
+                insertion_offset += new_text.len();
             }
 
             // Advance through every fragment that intersects this range, marking the intersecting
@@ -901,6 +903,10 @@ impl Buffer {
                 let intersection_end = cmp::min(range.end, fragment_end);
                 if fragment.was_visible(version, &self.undo_map) {
                     intersection.len = intersection_end.0 - fragment_start.0;
+                    intersection.insertion_offset +=
+                        fragment_start - old_fragments.start().0.full_offset();
+                    intersection.id =
+                        Locator::between(&new_fragments.summary().max_id, &intersection.id);
                     intersection.deletions.insert(timestamp.local());
                     intersection.visible = false;
                 }
@@ -914,6 +920,7 @@ impl Buffer {
                             new: new_start..new_start,
                         });
                     }
+                    new_insertions.push(InsertionFragment::insert_new(&intersection));
                     new_ropes.push_fragment(&intersection, fragment.visible);
                     new_fragments.push(intersection, &None);
                     fragment_start = intersection_end;
@@ -931,6 +938,8 @@ impl Buffer {
             if fragment_end > fragment_start {
                 let mut suffix = old_fragments.item().unwrap().clone();
                 suffix.len = fragment_end.0 - fragment_start.0;
+                suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
+                new_insertions.push(InsertionFragment::insert_new(&suffix));
                 new_ropes.push_fragment(&suffix, suffix.visible);
                 new_fragments.push(suffix, &None);
             }
@@ -946,9 +955,10 @@ impl Buffer {
         self.snapshot.fragments = new_fragments;
         self.snapshot.visible_text = visible_text;
         self.snapshot.deleted_text = deleted_text;
+        self.snapshot.insertions.edit(new_insertions, &());
         self.local_clock.observe(timestamp.local());
         self.lamport_clock.observe(timestamp.lamport());
-        self.update_subscriptions(edits);
+        self.subscriptions.publish_mut(&edits);
     }
 
     fn apply_undo(&mut self, undo: &UndoOperation) -> Result<()> {
@@ -990,7 +1000,9 @@ impl Buffer {
                     let fragment_was_visible = fragment.visible;
 
                     if fragment.was_visible(&undo.version, &self.undo_map)
-                        || undo.counts.contains_key(&fragment.timestamp.local())
+                        || undo
+                            .counts
+                            .contains_key(&fragment.insertion_timestamp.local())
                     {
                         fragment.visible = fragment.is_visible(&self.undo_map);
                         fragment.max_undos.observe(undo.id);
@@ -1038,14 +1050,14 @@ impl Buffer {
         self.snapshot.fragments = new_fragments;
         self.snapshot.visible_text = visible_text;
         self.snapshot.deleted_text = deleted_text;
-        self.update_subscriptions(edits);
+        self.subscriptions.publish_mut(&edits);
         Ok(())
     }
 
     fn flush_deferred_ops(&mut self) -> Result<()> {
         self.deferred_replicas.clear();
         let mut deferred_ops = Vec::new();
-        for op in self.deferred_ops.drain().cursor().cloned() {
+        for op in self.deferred_ops.drain().iter().cloned() {
             if self.can_apply_op(&op) {
                 self.apply_op(op)?;
             } else {
@@ -1064,84 +1076,42 @@ impl Buffer {
             match op {
                 Operation::Edit(edit) => self.version.ge(&edit.version),
                 Operation::Undo { undo, .. } => self.version.ge(&undo.version),
-                Operation::UpdateSelections { selections, .. } => {
-                    self.version.ge(selections.version())
-                }
-                Operation::RemoveSelections { .. } => true,
-                Operation::SetActiveSelections { set_id, .. } => {
-                    set_id.map_or(true, |set_id| self.selections.contains_key(&set_id))
-                }
-                #[cfg(test)]
-                Operation::Test(_) => true,
             }
         }
     }
 
+    pub fn can_resolve(&self, anchor: &Anchor) -> bool {
+        *anchor == Anchor::min()
+            || *anchor == Anchor::max()
+            || self.version.observed(anchor.timestamp)
+    }
+
     pub fn peek_undo_stack(&self) -> Option<&Transaction> {
         self.history.undo_stack.last()
     }
 
-    pub fn start_transaction(
-        &mut self,
-        selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
-    ) -> Result<()> {
-        self.start_transaction_at(selection_set_ids, Instant::now())
+    pub fn start_transaction(&mut self) -> Option<TransactionId> {
+        self.start_transaction_at(Instant::now())
     }
 
-    pub fn start_transaction_at(
-        &mut self,
-        selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
-        now: Instant,
-    ) -> Result<()> {
-        let selections = selection_set_ids
-            .into_iter()
-            .map(|set_id| {
-                let set = self
-                    .selections
-                    .get(&set_id)
-                    .expect("invalid selection set id");
-                (set_id, set.selections.clone())
-            })
-            .collect();
-        self.history
-            .start_transaction(self.version.clone(), selections, now);
-        Ok(())
+    pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
+        self.history.start_transaction(self.version.clone(), now)
     }
 
-    pub fn end_transaction(&mut self, selection_set_ids: impl IntoIterator<Item = SelectionSetId>) {
-        self.end_transaction_at(selection_set_ids, Instant::now());
+    pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
+        self.end_transaction_at(Instant::now())
     }
 
-    pub fn end_transaction_at(
-        &mut self,
-        selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
-        now: Instant,
-    ) -> Option<clock::Global> {
-        let selections = selection_set_ids
-            .into_iter()
-            .map(|set_id| {
-                let set = self
-                    .selections
-                    .get(&set_id)
-                    .expect("invalid selection set id");
-                (set_id, set.selections.clone())
-            })
-            .collect();
-
-        if let Some(transaction) = self.history.end_transaction(selections, now) {
+    pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
+        if let Some(transaction) = self.history.end_transaction(now) {
             let since = transaction.start.clone();
-            self.history.group();
-            Some(since)
+            let id = self.history.group().unwrap();
+            Some((id, since))
         } else {
             None
         }
     }
 
-    pub fn remove_peer(&mut self, replica_id: ReplicaId) {
-        self.selections
-            .retain(|set_id, _| set_id.replica_id != replica_id)
-    }
-
     pub fn base_text(&self) -> &Arc<str> {
         &self.history.base_text
     }
@@ -1150,28 +1120,42 @@ impl Buffer {
         self.history.ops.values()
     }
 
-    pub fn undo(&mut self) -> Vec<Operation> {
-        let mut ops = Vec::new();
+    pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
         if let Some(transaction) = self.history.pop_undo().cloned() {
-            let selections = transaction.selections_before.clone();
-            ops.push(self.undo_or_redo(transaction).unwrap());
-            for (set_id, selections) in selections {
-                ops.extend(self.restore_selection_set(set_id, selections));
-            }
+            let transaction_id = transaction.id;
+            let op = self.undo_or_redo(transaction).unwrap();
+            Some((transaction_id, op))
+        } else {
+            None
         }
-        ops
     }
 
-    pub fn redo(&mut self) -> Vec<Operation> {
-        let mut ops = Vec::new();
+    pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
+        if let Some(transaction) = self.history.remove_from_undo(transaction_id).cloned() {
+            let op = self.undo_or_redo(transaction).unwrap();
+            Some(op)
+        } else {
+            None
+        }
+    }
+
+    pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
         if let Some(transaction) = self.history.pop_redo().cloned() {
-            let selections = transaction.selections_after.clone();
-            ops.push(self.undo_or_redo(transaction).unwrap());
-            for (set_id, selections) in selections {
-                ops.extend(self.restore_selection_set(set_id, selections));
-            }
+            let transaction_id = transaction.id;
+            let op = self.undo_or_redo(transaction).unwrap();
+            Some((transaction_id, op))
+        } else {
+            None
+        }
+    }
+
+    pub fn redo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
+        if let Some(transaction) = self.history.remove_from_redo(transaction_id).cloned() {
+            let op = self.undo_or_redo(transaction).unwrap();
+            Some(op)
+        } else {
+            None
         }
-        ops
     }
 
     fn undo_or_redo(&mut self, transaction: Transaction) -> Result<Operation> {
@@ -1196,142 +1180,7 @@ impl Buffer {
     }
 
     pub fn subscribe(&mut self) -> Subscription {
-        let subscription = Subscription(Default::default());
-        self.subscriptions.push(Arc::downgrade(&subscription.0));
-        subscription
-    }
-
-    fn update_subscriptions(&mut self, edits: Patch<usize>) {
-        self.subscriptions.retain(|subscription| {
-            if let Some(subscription) = subscription.upgrade() {
-                subscription.lock().push(edits.clone());
-                true
-            } else {
-                false
-            }
-        });
-    }
-
-    pub fn selection_set(&self, set_id: SelectionSetId) -> Result<&SelectionSet> {
-        self.selections
-            .get(&set_id)
-            .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id))
-    }
-
-    pub fn selection_sets(&self) -> impl Iterator<Item = (&SelectionSetId, &SelectionSet)> {
-        self.selections.iter()
-    }
-
-    fn build_selection_anchor_range_map<T: ToOffset>(
-        &self,
-        selections: &[Selection<T>],
-    ) -> Arc<AnchorRangeMap<SelectionState>> {
-        Arc::new(self.anchor_range_map(
-            Bias::Left,
-            Bias::Left,
-            selections.iter().map(|selection| {
-                let start = selection.start.to_offset(self);
-                let end = selection.end.to_offset(self);
-                let range = start..end;
-                let state = SelectionState {
-                    id: selection.id,
-                    reversed: selection.reversed,
-                    goal: selection.goal,
-                };
-                (range, state)
-            }),
-        ))
-    }
-
-    pub fn update_selection_set<T: ToOffset>(
-        &mut self,
-        set_id: SelectionSetId,
-        selections: &[Selection<T>],
-    ) -> Result<Operation> {
-        let selections = self.build_selection_anchor_range_map(selections);
-        let set = self
-            .selections
-            .get_mut(&set_id)
-            .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id))?;
-        set.selections = selections.clone();
-        Ok(Operation::UpdateSelections {
-            set_id,
-            selections,
-            lamport_timestamp: self.lamport_clock.tick(),
-        })
-    }
-
-    pub fn restore_selection_set(
-        &mut self,
-        set_id: SelectionSetId,
-        selections: Arc<AnchorRangeMap<SelectionState>>,
-    ) -> Result<Operation> {
-        let set = self
-            .selections
-            .get_mut(&set_id)
-            .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id))?;
-        set.selections = selections.clone();
-        Ok(Operation::UpdateSelections {
-            set_id,
-            selections,
-            lamport_timestamp: self.lamport_clock.tick(),
-        })
-    }
-
-    pub fn add_selection_set<T: ToOffset>(&mut self, selections: &[Selection<T>]) -> Operation {
-        let selections = self.build_selection_anchor_range_map(selections);
-        let set_id = self.lamport_clock.tick();
-        self.selections.insert(
-            set_id,
-            SelectionSet {
-                id: set_id,
-                selections: selections.clone(),
-                active: false,
-            },
-        );
-        Operation::UpdateSelections {
-            set_id,
-            selections,
-            lamport_timestamp: set_id,
-        }
-    }
-
-    pub fn add_raw_selection_set(&mut self, id: SelectionSetId, selections: SelectionSet) {
-        self.selections.insert(id, selections);
-    }
-
-    pub fn set_active_selection_set(
-        &mut self,
-        set_id: Option<SelectionSetId>,
-    ) -> Result<Operation> {
-        if let Some(set_id) = set_id {
-            assert_eq!(set_id.replica_id, self.replica_id());
-        }
-
-        for (id, set) in &mut self.selections {
-            if id.replica_id == self.local_clock.replica_id {
-                if Some(*id) == set_id {
-                    set.active = true;
-                } else {
-                    set.active = false;
-                }
-            }
-        }
-
-        Ok(Operation::SetActiveSelections {
-            set_id,
-            lamport_timestamp: self.lamport_clock.tick(),
-        })
-    }
-
-    pub fn remove_selection_set(&mut self, set_id: SelectionSetId) -> Result<Operation> {
-        self.selections
-            .remove(&set_id)
-            .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id))?;
-        Ok(Operation::RemoveSelections {
-            set_id,
-            lamport_timestamp: self.lamport_clock.tick(),
-        })
+        self.subscriptions.subscribe()
     }
 }
 

crates/theme/src/theme.rs 🔗

@@ -155,10 +155,10 @@ pub struct ContactsPanel {
     pub host_username: ContainedText,
     pub tree_branch_width: f32,
     pub tree_branch_color: Color,
-    pub shared_worktree: WorktreeRow,
-    pub hovered_shared_worktree: WorktreeRow,
-    pub unshared_worktree: WorktreeRow,
-    pub hovered_unshared_worktree: WorktreeRow,
+    pub shared_project: WorktreeRow,
+    pub hovered_shared_project: WorktreeRow,
+    pub unshared_project: WorktreeRow,
+    pub hovered_unshared_project: WorktreeRow,
 }
 
 #[derive(Deserialize, Default)]

crates/theme_selector/src/theme_selector.rs 🔗

@@ -11,7 +11,7 @@ use parking_lot::Mutex;
 use postage::watch;
 use std::{cmp, sync::Arc};
 use theme::ThemeRegistry;
-use workspace::{Settings, Workspace, AppState};
+use workspace::{AppState, Settings, Workspace};
 
 #[derive(Clone)]
 pub struct ThemeSelectorParams {
@@ -64,14 +64,14 @@ impl ThemeSelector {
             Editor::single_line(
                 {
                     let settings = settings.clone();
-                    move |_| {
+                    Arc::new(move |_| {
                         let settings = settings.borrow();
                         EditorSettings {
                             tab_size: settings.tab_size,
                             style: settings.theme.selector.input_editor.as_editor(),
                             soft_wrap: editor::SoftWrap::None,
                         }
-                    }
+                    })
                 },
                 cx,
             )

crates/util/src/test.rs 🔗

@@ -35,3 +35,16 @@ fn write_tree(path: &Path, tree: serde_json::Value) {
         panic!("You must pass a JSON object to this helper")
     }
 }
+
+pub fn sample_text(rows: usize, cols: usize, start_char: char) -> String {
+    let mut text = String::new();
+    for row in 0..rows {
+        let c: char = (start_char as u32 + row as u32) as u8 as char;
+        let mut line = c.to_string().repeat(cols);
+        if row < rows - 1 {
+            line.push('\n');
+        }
+        text += &line;
+    }
+    text
+}

crates/workspace/Cargo.toml 🔗

@@ -11,6 +11,7 @@ test-support = ["client/test-support", "project/test-support"]
 
 [dependencies]
 client = { path = "../client" }
+clock = { path = "../clock" }
 gpui = { path = "../gpui" }
 language = { path = "../language" }
 project = { path = "../project" }

crates/workspace/src/workspace.rs 🔗

@@ -6,6 +6,7 @@ mod status_bar;
 
 use anyhow::{anyhow, Result};
 use client::{Authenticate, ChannelList, Client, User, UserStore};
+use clock::ReplicaId;
 use gpui::{
     action,
     color::Color,
@@ -30,7 +31,6 @@ use sidebar::{Side, Sidebar, SidebarItemId, ToggleSidebarItem, ToggleSidebarItem
 use status_bar::StatusBar;
 pub use status_bar::StatusItemView;
 use std::{
-    collections::{hash_map::Entry, HashMap},
     future::Future,
     path::{Path, PathBuf},
     sync::Arc,
@@ -40,18 +40,24 @@ use theme::{Theme, ThemeRegistry};
 action!(Open, Arc<AppState>);
 action!(OpenNew, Arc<AppState>);
 action!(OpenPaths, OpenParams);
+action!(ToggleShare);
+action!(JoinProject, JoinProjectParams);
 action!(Save);
 action!(DebugElements);
 
 pub fn init(cx: &mut MutableAppContext) {
     cx.add_global_action(open);
     cx.add_global_action(move |action: &OpenPaths, cx: &mut MutableAppContext| {
-        open_paths(&action.0.paths, &action.0.app_state, cx).detach()
+        open_paths(&action.0.paths, &action.0.app_state, cx).detach();
     });
     cx.add_global_action(move |action: &OpenNew, cx: &mut MutableAppContext| {
         open_new(&action.0, cx)
     });
+    cx.add_global_action(move |action: &JoinProject, cx: &mut MutableAppContext| {
+        join_project(action.0.project_id, &action.0.app_state, cx).detach();
+    });
 
+    cx.add_action(Workspace::toggle_share);
     cx.add_action(Workspace::save_active_item);
     cx.add_action(Workspace::debug_elements);
     cx.add_action(Workspace::toggle_sidebar_item);
@@ -90,8 +96,11 @@ pub struct AppState {
     pub channel_list: ModelHandle<client::ChannelList>,
     pub entry_openers: Arc<[Box<dyn EntryOpener>]>,
     pub build_window_options: &'static dyn Fn() -> WindowOptions<'static>,
-    pub build_workspace:
-        &'static dyn Fn(&WorkspaceParams, &mut ViewContext<Workspace>) -> Workspace,
+    pub build_workspace: &'static dyn Fn(
+        ModelHandle<Project>,
+        &Arc<AppState>,
+        &mut ViewContext<Workspace>,
+    ) -> Workspace,
 }
 
 #[derive(Clone)]
@@ -100,6 +109,12 @@ pub struct OpenParams {
     pub app_state: Arc<AppState>,
 }
 
+#[derive(Clone)]
+pub struct JoinProjectParams {
+    pub project_id: u64,
+    pub app_state: Arc<AppState>,
+}
+
 pub trait EntryOpener {
     fn open(
         &self,
@@ -136,7 +151,9 @@ pub trait ItemView: View {
     fn has_conflict(&self, _: &AppContext) -> bool {
         false
     }
+    fn can_save(&self, cx: &AppContext) -> bool;
     fn save(&mut self, cx: &mut ViewContext<Self>) -> Result<Task<Result<()>>>;
+    fn can_save_as(&self, cx: &AppContext) -> bool;
     fn save_as(
         &mut self,
         worktree: ModelHandle<Worktree>,
@@ -180,6 +197,8 @@ pub trait ItemViewHandle {
     fn to_any(&self) -> AnyViewHandle;
     fn is_dirty(&self, cx: &AppContext) -> bool;
     fn has_conflict(&self, cx: &AppContext) -> bool;
+    fn can_save(&self, cx: &AppContext) -> bool;
+    fn can_save_as(&self, cx: &AppContext) -> bool;
     fn save(&self, cx: &mut MutableAppContext) -> Result<Task<Result<()>>>;
     fn save_as(
         &self,
@@ -310,6 +329,14 @@ impl<T: ItemView> ItemViewHandle for ViewHandle<T> {
     fn to_any(&self) -> AnyViewHandle {
         self.into()
     }
+
+    fn can_save(&self, cx: &AppContext) -> bool {
+        self.read(cx).can_save(cx)
+    }
+
+    fn can_save_as(&self, cx: &AppContext) -> bool {
+        self.read(cx).can_save_as(cx)
+    }
 }
 
 impl Clone for Box<dyn ItemViewHandle> {
@@ -326,6 +353,7 @@ impl Clone for Box<dyn ItemHandle> {
 
 #[derive(Clone)]
 pub struct WorkspaceParams {
+    pub project: ModelHandle<Project>,
     pub client: Arc<Client>,
     pub fs: Arc<dyn Fs>,
     pub languages: Arc<LanguageRegistry>,
@@ -338,7 +366,8 @@ pub struct WorkspaceParams {
 impl WorkspaceParams {
     #[cfg(any(test, feature = "test-support"))]
     pub fn test(cx: &mut MutableAppContext) -> Self {
-        let languages = LanguageRegistry::new();
+        let fs = Arc::new(project::FakeFs::new());
+        let languages = Arc::new(LanguageRegistry::new());
         let client = Client::new();
         let http_client = client::test::FakeHttpClient::new(|_| async move {
             Ok(client::http::ServerResponse::new(404))
@@ -347,17 +376,45 @@ impl WorkspaceParams {
             gpui::fonts::with_font_cache(cx.font_cache().clone(), || theme::Theme::default());
         let settings = Settings::new("Courier", cx.font_cache(), Arc::new(theme)).unwrap();
         let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
+        let project = Project::local(
+            client.clone(),
+            user_store.clone(),
+            languages.clone(),
+            fs.clone(),
+            cx,
+        );
         Self {
+            project,
             channel_list: cx
                 .add_model(|cx| ChannelList::new(user_store.clone(), client.clone(), cx)),
             client,
-            fs: Arc::new(project::FakeFs::new()),
-            languages: Arc::new(languages),
+            fs,
+            languages,
             settings: watch::channel_with(settings).1,
             user_store,
             entry_openers: Arc::from([]),
         }
     }
+
+    #[cfg(any(test, feature = "test-support"))]
+    pub fn local(app_state: &Arc<AppState>, cx: &mut MutableAppContext) -> Self {
+        Self {
+            project: Project::local(
+                app_state.client.clone(),
+                app_state.user_store.clone(),
+                app_state.languages.clone(),
+                app_state.fs.clone(),
+                cx,
+            ),
+            client: app_state.client.clone(),
+            fs: app_state.fs.clone(),
+            languages: app_state.languages.clone(),
+            settings: app_state.settings.clone(),
+            user_store: app_state.user_store.clone(),
+            channel_list: app_state.channel_list.clone(),
+            entry_openers: app_state.entry_openers.clone(),
+        }
+    }
 }
 
 pub struct Workspace {
@@ -375,24 +432,12 @@ pub struct Workspace {
     project: ModelHandle<Project>,
     entry_openers: Arc<[Box<dyn EntryOpener>]>,
     items: Vec<Box<dyn WeakItemHandle>>,
-    loading_items: HashMap<
-        ProjectPath,
-        postage::watch::Receiver<Option<Result<Box<dyn ItemHandle>, Arc<anyhow::Error>>>>,
-    >,
     _observe_current_user: Task<()>,
 }
 
 impl Workspace {
     pub fn new(params: &WorkspaceParams, cx: &mut ViewContext<Self>) -> Self {
-        let project = cx.add_model(|_| {
-            Project::new(
-                params.languages.clone(),
-                params.client.clone(),
-                params.user_store.clone(),
-                params.fs.clone(),
-            )
-        });
-        cx.observe(&project, |_, _, cx| cx.notify()).detach();
+        cx.observe(&params.project, |_, _, cx| cx.notify()).detach();
 
         let pane = cx.add_view(|_| Pane::new(params.settings.clone()));
         let pane_id = pane.id();
@@ -438,10 +483,9 @@ impl Workspace {
             fs: params.fs.clone(),
             left_sidebar: Sidebar::new(Side::Left),
             right_sidebar: Sidebar::new(Side::Right),
-            project,
+            project: params.project.clone(),
             entry_openers: params.entry_openers.clone(),
             items: Default::default(),
-            loading_items: Default::default(),
             _observe_current_user,
         }
     }
@@ -636,7 +680,7 @@ impl Workspace {
         let worktree = match self
             .project
             .read(cx)
-            .worktree_for_id(project_path.worktree_id)
+            .worktree_for_id(project_path.worktree_id, cx)
         {
             Some(worktree) => worktree,
             None => {
@@ -645,47 +689,27 @@ impl Workspace {
             }
         };
 
-        if let Entry::Vacant(entry) = self.loading_items.entry(project_path.clone()) {
-            let (mut tx, rx) = postage::watch::channel();
-            entry.insert(rx);
-
-            let project_path = project_path.clone();
-            let entry_openers = self.entry_openers.clone();
-            cx.as_mut()
-                .spawn(|mut cx| async move {
-                    let item = worktree.update(&mut cx, move |worktree, cx| {
-                        for opener in entry_openers.iter() {
-                            if let Some(task) = opener.open(worktree, project_path.clone(), cx) {
-                                return task;
-                            }
-                        }
-
-                        cx.spawn(|_, _| async move {
-                            Err(anyhow!("no opener for path {:?} found", project_path))
-                        })
-                    });
-                    *tx.borrow_mut() = Some(item.await.map_err(Arc::new));
-                })
-                .detach();
-        }
+        let project_path = project_path.clone();
+        let entry_openers = self.entry_openers.clone();
+        let task = worktree.update(cx, |worktree, cx| {
+            for opener in entry_openers.iter() {
+                if let Some(task) = opener.open(worktree, project_path.clone(), cx) {
+                    return Some(task);
+                }
+            }
+            log::error!("no opener for path {:?} found", project_path);
+            None
+        })?;
 
         let pane = pane.downgrade();
-        let mut watch = self.loading_items.get(&project_path).unwrap().clone();
-
         Some(cx.spawn(|this, mut cx| async move {
-            let load_result = loop {
-                if let Some(load_result) = watch.borrow().as_ref() {
-                    break load_result.clone();
-                }
-                watch.recv().await;
-            };
-
+            let load_result = task.await;
             this.update(&mut cx, |this, cx| {
-                this.loading_items.remove(&project_path);
                 let pane = pane
                     .upgrade(&cx)
                     .ok_or_else(|| anyhow!("could not upgrade pane reference"))?;
                 let item = load_result?;
+
                 // By the time loading finishes, the entry could have been already added
                 // to the pane. If it was, we activate it, otherwise we'll store the
                 // item and add a new view for it.
@@ -694,7 +718,7 @@ impl Workspace {
                 {
                     Ok(existing)
                 } else {
-                    Ok(this.add_item(item.boxed_clone(), cx))
+                    Ok(this.add_item(item, cx))
                 }
             })
         }))
@@ -752,7 +776,43 @@ impl Workspace {
     pub fn save_active_item(&mut self, _: &Save, cx: &mut ViewContext<Self>) {
         if let Some(item) = self.active_item(cx) {
             let handle = cx.handle();
-            if item.project_path(cx.as_ref()).is_none() {
+            if item.can_save(cx) {
+                if item.has_conflict(cx.as_ref()) {
+                    const CONFLICT_MESSAGE: &'static str = "This file has changed on disk since you started editing it. Do you want to overwrite it?";
+
+                    cx.prompt(
+                        PromptLevel::Warning,
+                        CONFLICT_MESSAGE,
+                        &["Overwrite", "Cancel"],
+                        move |answer, cx| {
+                            if answer == 0 {
+                                cx.spawn(|mut cx| async move {
+                                    if let Err(error) = cx.update(|cx| item.save(cx)).unwrap().await
+                                    {
+                                        error!("failed to save item: {:?}, ", error);
+                                    }
+
+                                    handle.update(&mut cx, |this, cx| {
+                                        this.project.update(cx, |project, cx| project.diagnose(cx))
+                                    });
+                                })
+                                .detach();
+                            }
+                        },
+                    );
+                } else {
+                    cx.spawn(|this, mut cx| async move {
+                        if let Err(error) = cx.update(|cx| item.save(cx)).unwrap().await {
+                            error!("failed to save item: {:?}, ", error);
+                        }
+
+                        this.update(&mut cx, |this, cx| {
+                            this.project.update(cx, |project, cx| project.diagnose(cx))
+                        });
+                    })
+                    .detach();
+                }
+            } else if item.can_save_as(cx) {
                 let worktree = self.worktrees(cx).first();
                 let start_abs_path = worktree
                     .and_then(|w| w.read(cx).as_local())
@@ -780,36 +840,14 @@ impl Workspace {
                             if let Err(error) = result {
                                 error!("failed to save item: {:?}, ", error);
                             }
+
+                            handle.update(&mut cx, |this, cx| {
+                                this.project.update(cx, |project, cx| project.diagnose(cx))
+                            });
                         })
                         .detach()
                     }
                 });
-                return;
-            } else if item.has_conflict(cx.as_ref()) {
-                const CONFLICT_MESSAGE: &'static str = "This file has changed on disk since you started editing it. Do you want to overwrite it?";
-
-                cx.prompt(
-                    PromptLevel::Warning,
-                    CONFLICT_MESSAGE,
-                    &["Overwrite", "Cancel"],
-                    move |answer, cx| {
-                        if answer == 0 {
-                            cx.spawn(|mut cx| async move {
-                                if let Err(error) = cx.update(|cx| item.save(cx)).unwrap().await {
-                                    error!("failed to save item: {:?}, ", error);
-                                }
-                            })
-                            .detach();
-                        }
-                    },
-                );
-            } else {
-                cx.spawn(|_, mut cx| async move {
-                    if let Err(error) = cx.update(|cx| item.save(cx)).unwrap().await {
-                        error!("failed to save item: {:?}, ", error);
-                    }
-                })
-                .detach();
             }
         }
     }
@@ -968,6 +1006,18 @@ impl Workspace {
         &self.active_pane
     }
 
+    fn toggle_share(&mut self, _: &ToggleShare, cx: &mut ViewContext<Self>) {
+        self.project.update(cx, |project, cx| {
+            if project.is_local() {
+                if project.is_shared() {
+                    project.unshare(cx).detach();
+                } else {
+                    project.share(cx).detach();
+                }
+            }
+        });
+    }
+
     fn render_connection_status(&self) -> Option<ElementBox> {
         let theme = &self.settings.borrow().theme;
         match &*self.client.status().borrow() {
@@ -1019,18 +1069,14 @@ impl Workspace {
                     .with_child(
                         Align::new(
                             Flex::row()
+                                .with_children(self.render_share_icon(cx))
                                 .with_children(self.render_collaborators(theme, cx))
-                                .with_child(
-                                    self.render_avatar(
-                                        self.user_store.read(cx).current_user().as_ref(),
-                                        self.project
-                                            .read(cx)
-                                            .active_worktree()
-                                            .map(|worktree| worktree.read(cx).replica_id()),
-                                        theme,
-                                        cx,
-                                    ),
-                                )
+                                .with_child(self.render_avatar(
+                                    self.user_store.read(cx).current_user().as_ref(),
+                                    self.project.read(cx).replica_id(),
+                                    theme,
+                                    cx,
+                                ))
                                 .with_children(self.render_connection_status())
                                 .boxed(),
                         )
@@ -1047,30 +1093,26 @@ impl Workspace {
     }
 
     fn render_collaborators(&self, theme: &Theme, cx: &mut RenderContext<Self>) -> Vec<ElementBox> {
-        let mut elements = Vec::new();
-        if let Some(active_worktree) = self.project.read(cx).active_worktree() {
-            let collaborators = active_worktree
-                .read(cx)
-                .collaborators()
-                .values()
-                .cloned()
-                .collect::<Vec<_>>();
-            for collaborator in collaborators {
-                elements.push(self.render_avatar(
-                    Some(&collaborator.user),
-                    Some(collaborator.replica_id),
-                    theme,
-                    cx,
-                ));
-            }
-        }
-        elements
+        let mut collaborators = self
+            .project
+            .read(cx)
+            .collaborators()
+            .values()
+            .cloned()
+            .collect::<Vec<_>>();
+        collaborators.sort_unstable_by_key(|collaborator| collaborator.replica_id);
+        collaborators
+            .into_iter()
+            .map(|collaborator| {
+                self.render_avatar(Some(&collaborator.user), collaborator.replica_id, theme, cx)
+            })
+            .collect()
     }
 
     fn render_avatar(
         &self,
         user: Option<&Arc<User>>,
-        replica_id: Option<u16>,
+        replica_id: ReplicaId,
         theme: &Theme,
         cx: &mut RenderContext<Self>,
     ) -> ElementBox {
@@ -1088,15 +1130,13 @@ impl Workspace {
                         .boxed(),
                     )
                     .with_child(
-                        AvatarRibbon::new(replica_id.map_or(Default::default(), |id| {
-                            theme.editor.replica_selection_style(id).cursor
-                        }))
-                        .constrained()
-                        .with_width(theme.workspace.titlebar.avatar_ribbon.width)
-                        .with_height(theme.workspace.titlebar.avatar_ribbon.height)
-                        .aligned()
-                        .bottom()
-                        .boxed(),
+                        AvatarRibbon::new(theme.editor.replica_selection_style(replica_id).cursor)
+                            .constrained()
+                            .with_width(theme.workspace.titlebar.avatar_ribbon.width)
+                            .with_height(theme.workspace.titlebar.avatar_ribbon.height)
+                            .aligned()
+                            .bottom()
+                            .boxed(),
                     )
                     .boxed(),
             )
@@ -1120,6 +1160,35 @@ impl Workspace {
             .boxed()
         }
     }
+
+    fn render_share_icon(&self, cx: &mut RenderContext<Self>) -> Option<ElementBox> {
+        if self.project().read(cx).is_local() && self.client.user_id().is_some() {
+            enum Share {}
+
+            let color = if self.project().read(cx).is_shared() {
+                Color::green()
+            } else {
+                Color::red()
+            };
+            Some(
+                MouseEventHandler::new::<Share, _, _, _>(0, cx, |_, _| {
+                    Align::new(
+                        ConstrainedBox::new(
+                            Svg::new("icons/broadcast-24.svg").with_color(color).boxed(),
+                        )
+                        .with_width(24.)
+                        .boxed(),
+                    )
+                    .boxed()
+                })
+                .with_cursor_style(CursorStyle::PointingHand)
+                .on_click(|cx| cx.dispatch_action(ToggleShare))
+                .boxed(),
+            )
+        } else {
+            None
+        }
+    }
 }
 
 impl Entity for Workspace {
@@ -1281,20 +1350,6 @@ impl std::fmt::Debug for OpenParams {
     }
 }
 
-impl<'a> From<&'a AppState> for WorkspaceParams {
-    fn from(state: &'a AppState) -> Self {
-        Self {
-            client: state.client.clone(),
-            fs: state.fs.clone(),
-            languages: state.languages.clone(),
-            settings: state.settings.clone(),
-            user_store: state.user_store.clone(),
-            channel_list: state.channel_list.clone(),
-            entry_openers: state.entry_openers.clone(),
-        }
-    }
-}
-
 fn open(action: &Open, cx: &mut MutableAppContext) {
     let app_state = action.0.clone();
     cx.prompt_for_paths(
@@ -1337,7 +1392,14 @@ pub fn open_paths(
 
     let workspace = existing.unwrap_or_else(|| {
         cx.add_window((app_state.build_window_options)(), |cx| {
-            (app_state.build_workspace)(&WorkspaceParams::from(app_state.as_ref()), cx)
+            let project = Project::local(
+                app_state.client.clone(),
+                app_state.user_store.clone(),
+                app_state.languages.clone(),
+                app_state.fs.clone(),
+                cx,
+            );
+            (app_state.build_workspace)(project, &app_state, cx)
         })
         .1
     });
@@ -1349,9 +1411,49 @@ pub fn open_paths(
     })
 }
 
+pub fn join_project(
+    project_id: u64,
+    app_state: &Arc<AppState>,
+    cx: &mut MutableAppContext,
+) -> Task<Result<ViewHandle<Workspace>>> {
+    for window_id in cx.window_ids().collect::<Vec<_>>() {
+        if let Some(workspace) = cx.root_view::<Workspace>(window_id) {
+            if workspace.read(cx).project().read(cx).remote_id() == Some(project_id) {
+                return Task::ready(Ok(workspace));
+            }
+        }
+    }
+
+    let app_state = app_state.clone();
+    cx.spawn(|mut cx| async move {
+        let project = Project::remote(
+            project_id,
+            app_state.client.clone(),
+            app_state.user_store.clone(),
+            app_state.languages.clone(),
+            app_state.fs.clone(),
+            &mut cx,
+        )
+        .await?;
+        let (_, workspace) = cx.update(|cx| {
+            cx.add_window((app_state.build_window_options)(), |cx| {
+                (app_state.build_workspace)(project, &app_state, cx)
+            })
+        });
+        Ok(workspace)
+    })
+}
+
 fn open_new(app_state: &Arc<AppState>, cx: &mut MutableAppContext) {
     let (window_id, workspace) = cx.add_window((app_state.build_window_options)(), |cx| {
-        (app_state.build_workspace)(&app_state.as_ref().into(), cx)
+        let project = Project::local(
+            app_state.client.clone(),
+            app_state.user_store.clone(),
+            app_state.languages.clone(),
+            app_state.fs.clone(),
+            cx,
+        );
+        (app_state.build_workspace)(project, &app_state, cx)
     });
     cx.dispatch_action(window_id, vec![workspace.id()], &OpenNew(app_state.clone()));
 }

crates/zed/Cargo.toml 🔗

@@ -29,9 +29,11 @@ test-support = [
 
 [dependencies]
 chat_panel = { path = "../chat_panel" }
+collections = { path = "../collections" }
 client = { path = "../client" }
 clock = { path = "../clock" }
 contacts_panel = { path = "../contacts_panel" }
+diagnostics = { path = "../diagnostics" }
 editor = { path = "../editor" }
 file_finder = { path = "../file_finder" }
 fsevent = { path = "../fsevent" }

crates/zed/assets/icons/broadcast-24.svg 🔗

@@ -0,0 +1,6 @@
+<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M6.87348 15.1266C4.04217 12.2953 4.04217 7.70484 6.87348 4.87354M17.1265 4.87354C19.9578 7.70484 19.9578 12.2953 17.1265 15.1266" stroke="#636B78" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M8.9948 13.0052C7.33507 11.3454 7.33507 8.65448 8.9948 6.99475M15.0052 6.99475C16.6649 8.65448 16.6649 11.3454 15.0052 13.0052" stroke="#636B78" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M12.5 10C12.5 10.2761 12.2761 10.5 12 10.5C11.7239 10.5 11.5 10.2761 11.5 10C11.5 9.72386 11.7239 9.5 12 9.5C12.2761 9.5 12.5 9.72386 12.5 10Z" stroke="#636B78" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M12 13.75V19.25" stroke="#636B78" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
+</svg>

crates/zed/assets/themes/_base.toml 🔗

@@ -156,30 +156,30 @@ host_username = { extends = "$text.0", padding.left = 8 }
 tree_branch_width = 1
 tree_branch_color = "$surface.2"
 
-[contacts_panel.worktree]
+[contacts_panel.project]
 height = 24
 padding = { left = 8 }
 guest_avatar = { corner_radius = 8, width = 14 }
 guest_avatar_spacing = 4
 
-[contacts_panel.worktree.name]
+[contacts_panel.project.name]
 extends = "$text.1"
 margin = { right = 6 }
 
-[contacts_panel.unshared_worktree]
-extends = "$contacts_panel.worktree"
+[contacts_panel.unshared_project]
+extends = "$contacts_panel.project"
 
-[contacts_panel.hovered_unshared_worktree]
-extends = "$contacts_panel.unshared_worktree"
+[contacts_panel.hovered_unshared_project]
+extends = "$contacts_panel.unshared_project"
 background = "$state.hover"
 corner_radius = 6
 
-[contacts_panel.shared_worktree]
-extends = "$contacts_panel.worktree"
+[contacts_panel.shared_project]
+extends = "$contacts_panel.project"
 name.color = "$text.0.color"
 
-[contacts_panel.hovered_shared_worktree]
-extends = "$contacts_panel.shared_worktree"
+[contacts_panel.hovered_shared_project]
+extends = "$contacts_panel.shared_project"
 background = "$state.hover"
 corner_radius = 6
 

crates/zed/src/language.rs 🔗

@@ -7,6 +7,184 @@ use std::{str, sync::Arc};
 #[folder = "languages"]
 struct LanguageDir;
 
+mod rust {
+    use anyhow::Result;
+    use async_trait::async_trait;
+    use collections::{HashMap, HashSet};
+    use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity};
+    use parking_lot::Mutex;
+    use serde::Deserialize;
+    use serde_json::Deserializer;
+    use smol::process::Command;
+    use std::path::{Path, PathBuf};
+    use std::sync::Arc;
+
+    #[derive(Default)]
+    pub struct DiagnosticProvider {
+        reported_paths: Mutex<HashSet<Arc<Path>>>,
+    }
+
+    #[derive(Debug, Deserialize)]
+    struct Check {
+        message: CompilerMessage,
+    }
+
+    #[derive(Debug, Deserialize)]
+    struct CompilerMessage {
+        code: Option<ErrorCode>,
+        spans: Vec<Span>,
+        message: String,
+        level: ErrorLevel,
+        children: Vec<CompilerMessage>,
+    }
+
+    #[derive(Debug, Deserialize)]
+    enum ErrorLevel {
+        #[serde(rename = "warning")]
+        Warning,
+        #[serde(rename = "error")]
+        Error,
+        #[serde(rename = "help")]
+        Help,
+        #[serde(rename = "note")]
+        Note,
+    }
+
+    #[derive(Debug, Deserialize)]
+    struct ErrorCode {
+        code: String,
+    }
+
+    #[derive(Clone, Debug, Deserialize)]
+    struct Span {
+        is_primary: bool,
+        file_name: PathBuf,
+        byte_start: usize,
+        byte_end: usize,
+        expansion: Option<Box<Expansion>>,
+    }
+
+    #[derive(Clone, Debug, Deserialize)]
+    struct Expansion {
+        span: Span,
+    }
+
+    #[async_trait]
+    impl language::DiagnosticProvider for DiagnosticProvider {
+        async fn diagnose(
+            &self,
+            root_path: Arc<Path>,
+        ) -> Result<HashMap<Arc<Path>, Vec<DiagnosticEntry<usize>>>> {
+            let output = Command::new("cargo")
+                .arg("check")
+                .args(["--message-format", "json"])
+                .current_dir(&root_path)
+                .output()
+                .await?;
+
+            let mut group_id = 0;
+            let mut diagnostics_by_path = HashMap::default();
+            let mut new_reported_paths = HashSet::default();
+            for value in
+                Deserializer::from_slice(&output.stdout).into_iter::<&serde_json::value::RawValue>()
+            {
+                if let Ok(check) = serde_json::from_str::<Check>(value?.get()) {
+                    let check_severity = match check.message.level {
+                        ErrorLevel::Warning => DiagnosticSeverity::WARNING,
+                        ErrorLevel::Error => DiagnosticSeverity::ERROR,
+                        ErrorLevel::Help => DiagnosticSeverity::HINT,
+                        ErrorLevel::Note => DiagnosticSeverity::INFORMATION,
+                    };
+
+                    let mut primary_span = None;
+                    for mut span in check.message.spans {
+                        if let Some(mut expansion) = span.expansion {
+                            expansion.span.is_primary = span.is_primary;
+                            span = expansion.span;
+                        }
+
+                        let span_path: Arc<Path> = span.file_name.as_path().into();
+                        new_reported_paths.insert(span_path.clone());
+                        diagnostics_by_path
+                            .entry(span_path)
+                            .or_insert(Vec::new())
+                            .push(DiagnosticEntry {
+                                range: span.byte_start..span.byte_end,
+                                diagnostic: Diagnostic {
+                                    code: check.message.code.as_ref().map(|c| c.code.clone()),
+                                    severity: check_severity,
+                                    message: check.message.message.clone(),
+                                    group_id,
+                                    is_valid: true,
+                                    is_primary: span.is_primary,
+                                    is_disk_based: true,
+                                },
+                            });
+
+                        if span.is_primary {
+                            primary_span = Some(span);
+                        }
+                    }
+
+                    for mut child in check.message.children {
+                        if child.spans.is_empty() {
+                            if let Some(primary_span) = primary_span.clone() {
+                                child.spans.push(primary_span);
+                            }
+                        } else {
+                            // TODO
+                            continue;
+                        }
+
+                        let child_severity = match child.level {
+                            ErrorLevel::Warning => DiagnosticSeverity::WARNING,
+                            ErrorLevel::Error => DiagnosticSeverity::ERROR,
+                            ErrorLevel::Help => DiagnosticSeverity::HINT,
+                            ErrorLevel::Note => DiagnosticSeverity::INFORMATION,
+                        };
+
+                        for mut span in child.spans {
+                            if let Some(expansion) = span.expansion {
+                                span = expansion.span;
+                            }
+
+                            let span_path: Arc<Path> = span.file_name.as_path().into();
+                            new_reported_paths.insert(span_path.clone());
+                            diagnostics_by_path
+                                .entry(span_path)
+                                .or_insert(Vec::new())
+                                .push(DiagnosticEntry {
+                                    range: span.byte_start..span.byte_end,
+                                    diagnostic: Diagnostic {
+                                        code: child.code.as_ref().map(|c| c.code.clone()),
+                                        severity: child_severity,
+                                        message: child.message.clone(),
+                                        group_id,
+                                        is_valid: true,
+                                        is_primary: false,
+                                        is_disk_based: true,
+                                    },
+                                });
+                        }
+                    }
+
+                    group_id += 1;
+                }
+            }
+
+            let reported_paths = &mut *self.reported_paths.lock();
+            for old_reported_path in reported_paths.iter() {
+                if !diagnostics_by_path.contains_key(old_reported_path) {
+                    diagnostics_by_path.insert(old_reported_path.clone(), Default::default());
+                }
+            }
+            *reported_paths = new_reported_paths;
+
+            Ok(diagnostics_by_path)
+        }
+    }
+}
+
 pub fn build_language_registry() -> LanguageRegistry {
     let mut languages = LanguageRegistry::default();
     languages.add(Arc::new(rust()));
@@ -24,6 +202,7 @@ fn rust() -> Language {
         .unwrap()
         .with_indents_query(load_query("rust/indents.scm").as_ref())
         .unwrap()
+        .with_diagnostic_provider(rust::DiagnosticProvider::default())
 }
 
 fn markdown() -> Language {

crates/zed/src/main.rs 🔗

@@ -58,9 +58,9 @@ fn main() {
         editor::init(cx, &mut entry_openers);
         go_to_line::init(cx);
         file_finder::init(cx);
-        contacts_panel::init(cx);
         chat_panel::init(cx);
         project_panel::init(cx);
+        diagnostics::init(cx);
 
         let app_state = Arc::new(AppState {
             languages: languages.clone(),

crates/zed/src/zed.rs 🔗

@@ -14,9 +14,10 @@ use gpui::{
     geometry::vector::vec2f,
     keymap::Binding,
     platform::{WindowBounds, WindowOptions},
-    ViewContext,
+    ModelHandle, ViewContext,
 };
 pub use lsp;
+use project::Project;
 pub use project::{self, fs};
 use project_panel::ProjectPanel;
 use std::sync::Arc;
@@ -48,27 +49,39 @@ pub fn init(app_state: &Arc<AppState>, cx: &mut gpui::MutableAppContext) {
     ])
 }
 
-pub fn build_workspace(params: &WorkspaceParams, cx: &mut ViewContext<Workspace>) -> Workspace {
-    let mut workspace = Workspace::new(params, cx);
+pub fn build_workspace(
+    project: ModelHandle<Project>,
+    app_state: &Arc<AppState>,
+    cx: &mut ViewContext<Workspace>,
+) -> Workspace {
+    let workspace_params = WorkspaceParams {
+        project,
+        client: app_state.client.clone(),
+        fs: app_state.fs.clone(),
+        languages: app_state.languages.clone(),
+        settings: app_state.settings.clone(),
+        user_store: app_state.user_store.clone(),
+        channel_list: app_state.channel_list.clone(),
+        entry_openers: app_state.entry_openers.clone(),
+    };
+    let mut workspace = Workspace::new(&workspace_params, cx);
     let project = workspace.project().clone();
     workspace.left_sidebar_mut().add_item(
         "icons/folder-tree-16.svg",
-        ProjectPanel::new(project, params.settings.clone(), cx).into(),
+        ProjectPanel::new(project, app_state.settings.clone(), cx).into(),
     );
     workspace.right_sidebar_mut().add_item(
         "icons/user-16.svg",
-        cx.add_view(|cx| {
-            ContactsPanel::new(params.user_store.clone(), params.settings.clone(), cx)
-        })
-        .into(),
+        cx.add_view(|cx| ContactsPanel::new(app_state.clone(), cx))
+            .into(),
     );
     workspace.right_sidebar_mut().add_item(
         "icons/comment-16.svg",
         cx.add_view(|cx| {
             ChatPanel::new(
-                params.client.clone(),
-                params.channel_list.clone(),
-                params.settings.clone(),
+                app_state.client.clone(),
+                app_state.channel_list.clone(),
+                app_state.settings.clone(),
                 cx,
             )
         })
@@ -76,9 +89,9 @@ pub fn build_workspace(params: &WorkspaceParams, cx: &mut ViewContext<Workspace>
     );
 
     let diagnostic =
-        cx.add_view(|_| editor::items::DiagnosticMessage::new(params.settings.clone()));
+        cx.add_view(|_| editor::items::DiagnosticMessage::new(app_state.settings.clone()));
     let cursor_position =
-        cx.add_view(|_| editor::items::CursorPosition::new(params.settings.clone()));
+        cx.add_view(|_| editor::items::CursorPosition::new(app_state.settings.clone()));
     workspace.status_bar().update(cx, |status_bar, cx| {
         status_bar.add_left_item(diagnostic, cx);
         status_bar.add_right_item(cursor_position, cx);
@@ -225,8 +238,8 @@ mod tests {
                 }),
             )
             .await;
-
-        let (_, workspace) = cx.add_window(|cx| Workspace::new(&app_state.as_ref().into(), cx));
+        let params = cx.update(|cx| WorkspaceParams::local(&app_state, cx));
+        let (_, workspace) = cx.add_window(|cx| Workspace::new(&params, cx));
         workspace
             .update(&mut cx, |workspace, cx| {
                 workspace.add_worktree(Path::new("/root"), cx)
@@ -340,7 +353,8 @@ mod tests {
         fs.insert_file("/dir1/a.txt", "".into()).await.unwrap();
         fs.insert_file("/dir2/b.txt", "".into()).await.unwrap();
 
-        let (_, workspace) = cx.add_window(|cx| Workspace::new(&app_state.as_ref().into(), cx));
+        let params = cx.update(|cx| WorkspaceParams::local(&app_state, cx));
+        let (_, workspace) = cx.add_window(|cx| Workspace::new(&params, cx));
         workspace
             .update(&mut cx, |workspace, cx| {
                 workspace.add_worktree("/dir1".as_ref(), cx)
@@ -378,7 +392,7 @@ mod tests {
                 .read(cx)
                 .worktrees(cx)
                 .iter()
-                .map(|w| w.read(cx).as_local().unwrap().abs_path())
+                .map(|w| w.read(cx).as_local().unwrap().abs_path().as_ref())
                 .collect::<HashSet<_>>();
             assert_eq!(
                 worktree_roots,
@@ -406,8 +420,8 @@ mod tests {
         let fs = app_state.fs.as_fake();
         fs.insert_tree("/root", json!({ "a.txt": "" })).await;
 
-        let (window_id, workspace) =
-            cx.add_window(|cx| Workspace::new(&app_state.as_ref().into(), cx));
+        let params = cx.update(|cx| WorkspaceParams::local(&app_state, cx));
+        let (window_id, workspace) = cx.add_window(|cx| Workspace::new(&params, cx));
         workspace
             .update(&mut cx, |workspace, cx| {
                 workspace.add_worktree(Path::new("/root"), cx)
@@ -453,7 +467,7 @@ mod tests {
     async fn test_open_and_save_new_file(mut cx: gpui::TestAppContext) {
         let app_state = cx.update(test_app_state);
         app_state.fs.as_fake().insert_dir("/root").await.unwrap();
-        let params = app_state.as_ref().into();
+        let params = cx.update(|cx| WorkspaceParams::local(&app_state, cx));
         let (window_id, workspace) = cx.add_window(|cx| Workspace::new(&params, cx));
         workspace
             .update(&mut cx, |workspace, cx| {
@@ -570,7 +584,7 @@ mod tests {
     ) {
         let app_state = cx.update(test_app_state);
         app_state.fs.as_fake().insert_dir("/root").await.unwrap();
-        let params = app_state.as_ref().into();
+        let params = cx.update(|cx| WorkspaceParams::local(&app_state, cx));
         let (window_id, workspace) = cx.add_window(|cx| Workspace::new(&params, cx));
 
         // Create a new untitled buffer
@@ -628,8 +642,8 @@ mod tests {
             )
             .await;
 
-        let (window_id, workspace) =
-            cx.add_window(|cx| Workspace::new(&app_state.as_ref().into(), cx));
+        let params = cx.update(|cx| WorkspaceParams::local(&app_state, cx));
+        let (window_id, workspace) = cx.add_window(|cx| Workspace::new(&params, cx));
         workspace
             .update(&mut cx, |workspace, cx| {
                 workspace.add_worktree(Path::new("/root"), cx)

script/seed-db 🔗

@@ -1,5 +1,4 @@
 #!/bin/bash
 
 set -e
-cd server
-cargo run --features seed-support --bin seed
+cargo run --package=zed-server --features seed-support --bin seed