Merge pull request #218 from zed-industries/lsp

Antonio Scandurra created

Integrate rust-analyzer and highlight diagnostics

Change summary

.github/workflows/ci.yml                  |   8 
Cargo.lock                                |  61 +
README.md                                 |   8 
crates/buffer/Cargo.toml                  |   3 
crates/buffer/src/anchor.rs               | 246 +++++++
crates/buffer/src/lib.rs                  | 763 ++++++++++--------------
crates/buffer/src/point.rs                |  28 
crates/buffer/src/point_utf16.rs          | 111 +++
crates/buffer/src/rope.rs                 | 261 +++++++
crates/buffer/src/selection.rs            |  54 -
crates/buffer/src/tests.rs                |  17 
crates/editor/src/display_map.rs          |  22 
crates/editor/src/display_map/fold_map.rs | 144 ++--
crates/editor/src/display_map/tab_map.rs  |  40 
crates/editor/src/display_map/wrap_map.rs |  34 
crates/editor/src/element.rs              |  35 
crates/editor/src/lib.rs                  |  46 
crates/gpui/Cargo.toml                    |   1 
crates/gpui/examples/text.rs              |   4 
crates/gpui/src/app.rs                    | 107 +++
crates/gpui/src/elements/label.rs         |   4 
crates/gpui/src/executor.rs               |  11 
crates/gpui/src/fonts.rs                  |  39 
crates/gpui/src/platform.rs               |   3 
crates/gpui/src/platform/mac/fonts.rs     |  12 
crates/gpui/src/platform/mac/platform.rs  |  44 +
crates/gpui/src/platform/test.rs          |   8 
crates/gpui/src/text_layout.rs            |  34 
crates/language/Cargo.toml                |  11 
crates/language/build.rs                  |   6 
crates/language/src/language.rs           |  69 ++
crates/language/src/lib.rs                | 648 +++++++++++++++++---
crates/language/src/proto.rs              | 315 ++++++++++
crates/language/src/tests.rs              | 316 +++++++++-
crates/lsp/Cargo.toml                     |  28 
crates/lsp/src/lib.rs                     | 710 +++++++++++++++++++++++
crates/project/Cargo.toml                 |  10 
crates/project/src/worktree.rs            | 398 ++++++++++--
crates/rpc/proto/zed.proto                |  23 
crates/rpc/src/peer.rs                    |   4 
crates/server/src/rpc.rs                  | 136 ++++
crates/sum_tree/src/cursor.rs             |  18 
crates/sum_tree/src/lib.rs                |   2 
crates/theme/src/lib.rs                   |  10 
crates/workspace/src/items.rs             |  19 
crates/zed/Cargo.toml                     |   5 
crates/zed/assets/themes/_base.toml       |   5 
crates/zed/assets/themes/light.toml       |   2 
crates/zed/languages/rust/config.toml     |   4 
crates/zed/src/language.rs                |   2 
crates/zed/src/lib.rs                     |   1 
script/bundle                             |   6 
script/download-rust-analyzer             |  19 
script/server                             |   2 
script/sqlx                               |   2 
55 files changed, 3,916 insertions(+), 1,003 deletions(-)

Detailed changes

.github/workflows/ci.yml 🔗

@@ -32,6 +32,11 @@ jobs:
         with:
           clean: false
 
+      - name: Download rust-analyzer
+        run: |
+          script/download-rust-analyzer
+          echo "$PWD/vendor/bin" >> $GITHUB_PATH
+
       - name: Run tests
         run: cargo test --workspace --no-fail-fast
 
@@ -63,6 +68,9 @@ jobs:
         with:
           clean: false
 
+      - name: Download rust-analyzer
+        run: script/download-rust-analyzer
+
       - name: Create app bundle
         run: script/bundle
 

Cargo.lock 🔗

@@ -328,6 +328,15 @@ dependencies = [
  "futures-lite",
 ]
 
+[[package]]
+name = "async-pipe"
+version = "0.1.3"
+source = "git+https://github.com/routerify/async-pipe-rs?rev=feeb77e83142a9ff837d0767652ae41bfc5d8e47#feeb77e83142a9ff837d0767652ae41bfc5d8e47"
+dependencies = [
+ "futures",
+ "log",
+]
+
 [[package]]
 name = "async-process"
 version = "1.0.2"
@@ -752,7 +761,6 @@ dependencies = [
  "gpui",
  "log",
  "rand 0.8.3",
- "rpc",
  "seahash",
  "smallvec",
  "sum_tree",
@@ -2300,6 +2308,7 @@ dependencies = [
  "etagere",
  "font-kit",
  "foreign-types",
+ "futures",
  "gpui_macros",
  "image 0.23.14",
  "lazy_static",
@@ -2819,7 +2828,9 @@ dependencies = [
  "gpui",
  "lazy_static",
  "log",
+ "lsp",
  "parking_lot",
+ "postage",
  "rand 0.8.3",
  "rpc",
  "serde 1.0.125",
@@ -2966,6 +2977,39 @@ dependencies = [
  "scoped-tls",
 ]
 
+[[package]]
+name = "lsp"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "async-pipe",
+ "futures",
+ "gpui",
+ "log",
+ "lsp-types",
+ "parking_lot",
+ "postage",
+ "serde 1.0.125",
+ "serde_json 1.0.64",
+ "simplelog",
+ "smol",
+ "unindent",
+ "util",
+]
+
+[[package]]
+name = "lsp-types"
+version = "0.91.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "be7801b458592d0998af808d97f6a85a6057af3aaf2a2a5c3c677702bbeb4ed7"
+dependencies = [
+ "bitflags 1.2.1",
+ "serde 1.0.125",
+ "serde_json 1.0.64",
+ "serde_repr",
+ "url",
+]
+
 [[package]]
 name = "lzw"
 version = "0.10.0"
@@ -3780,16 +3824,19 @@ dependencies = [
  "lazy_static",
  "libc",
  "log",
+ "lsp",
  "parking_lot",
  "postage",
  "rand 0.8.3",
  "rpc",
  "serde 1.0.125",
  "serde_json 1.0.64",
+ "simplelog",
  "smol",
  "sum_tree",
  "tempdir",
  "toml 0.5.8",
+ "unindent",
  "util",
 ]
 
@@ -4589,6 +4636,17 @@ dependencies = [
  "thiserror",
 ]
 
+[[package]]
+name = "serde_repr"
+version = "0.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "98d0516900518c29efa217c298fa1f4e6c6ffc85ae29fd7f4ee48f176e1a9ed5"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
 [[package]]
 name = "serde_urlencoded"
 version = "0.7.0"
@@ -6199,6 +6257,7 @@ dependencies = [
  "libc",
  "log",
  "log-panics",
+ "lsp",
  "num_cpus",
  "parking_lot",
  "people_panel",

README.md 🔗

@@ -6,6 +6,14 @@ Welcome to Zed, a lightning-fast, collaborative code editor that makes your drea
 
 ## Development tips
 
+### Compiling on macOS Monterey
+
+The Zed server uses libcurl, which currently triggers [a bug](https://github.com/rust-lang/rust/issues/90342) in `rustc`. To work around this bug, export the following environment variable:
+
+```
+export MACOSX_DEPLOYMENT_TARGET=10.7
+```
+
 ### Dump element JSON
 
 If you trigger `cmd-shift-i`, Zed will copy a JSON representation of the current window contents to the clipboard. You can paste this in a tool like [DJSON](https://chrome.google.com/webstore/detail/djson-json-viewer-formatt/chaeijjekipecdajnijdldjjipaegdjc?hl=en) to navigate the state of on-screen elements in a structured way.

crates/buffer/Cargo.toml 🔗

@@ -1,14 +1,13 @@
 [package]
 name = "buffer"
 version = "0.1.0"
-edition = "2018"
+edition = "2021"
 
 [features]
 test-support = ["rand", "seahash"]
 
 [dependencies]
 clock = { path = "../clock" }
-rpc = { path = "../rpc" }
 sum_tree = { path = "../sum_tree" }
 anyhow = "1.0.38"
 arrayvec = "0.7.1"

crates/buffer/src/anchor.rs 🔗

@@ -1,15 +1,15 @@
-use super::{Buffer, Content, Point};
+use super::{Buffer, Content, FromAnchor, FullOffset, Point, ToOffset};
 use anyhow::Result;
 use std::{
     cmp::Ordering,
     fmt::{Debug, Formatter},
     ops::Range,
 };
-use sum_tree::Bias;
+use sum_tree::{Bias, SumTree};
 
 #[derive(Clone, Eq, PartialEq, Debug, Hash)]
 pub struct Anchor {
-    pub offset: usize,
+    pub full_offset: FullOffset,
     pub bias: Bias,
     pub version: clock::Global,
 }
@@ -17,7 +17,7 @@ pub struct Anchor {
 #[derive(Clone)]
 pub struct AnchorMap<T> {
     pub(crate) version: clock::Global,
-    pub(crate) entries: Vec<((usize, Bias), T)>,
+    pub(crate) entries: Vec<((FullOffset, Bias), T)>,
 }
 
 #[derive(Clone)]
@@ -26,16 +26,45 @@ pub struct AnchorSet(pub(crate) AnchorMap<()>);
 #[derive(Clone)]
 pub struct AnchorRangeMap<T> {
     pub(crate) version: clock::Global,
-    pub(crate) entries: Vec<(Range<(usize, Bias)>, T)>,
+    pub(crate) entries: Vec<(Range<(FullOffset, Bias)>, T)>,
 }
 
 #[derive(Clone)]
 pub struct AnchorRangeSet(pub(crate) AnchorRangeMap<()>);
 
+#[derive(Clone)]
+pub struct AnchorRangeMultimap<T: Clone> {
+    pub(crate) entries: SumTree<AnchorRangeMultimapEntry<T>>,
+    pub(crate) version: clock::Global,
+    pub(crate) start_bias: Bias,
+    pub(crate) end_bias: Bias,
+}
+
+#[derive(Clone)]
+pub(crate) struct AnchorRangeMultimapEntry<T> {
+    pub(crate) range: FullOffsetRange,
+    pub(crate) value: T,
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct FullOffsetRange {
+    pub(crate) start: FullOffset,
+    pub(crate) end: FullOffset,
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct AnchorRangeMultimapSummary {
+    start: FullOffset,
+    end: FullOffset,
+    min_start: FullOffset,
+    max_end: FullOffset,
+    count: usize,
+}
+
 impl Anchor {
     pub fn min() -> Self {
         Self {
-            offset: 0,
+            full_offset: FullOffset(0),
             bias: Bias::Left,
             version: Default::default(),
         }
@@ -43,7 +72,7 @@ impl Anchor {
 
     pub fn max() -> Self {
         Self {
-            offset: usize::MAX,
+            full_offset: FullOffset::MAX,
             bias: Bias::Right,
             version: Default::default(),
         }
@@ -57,7 +86,7 @@ impl Anchor {
         }
 
         let offset_comparison = if self.version == other.version {
-            self.offset.cmp(&other.offset)
+            self.full_offset.cmp(&other.full_offset)
         } else {
             buffer
                 .full_offset_for_anchor(self)
@@ -147,12 +176,17 @@ impl<T> AnchorRangeMap<T> {
         self.entries.len()
     }
 
-    pub fn from_raw(version: clock::Global, entries: Vec<(Range<(usize, Bias)>, T)>) -> Self {
+    pub fn from_full_offset_ranges(
+        version: clock::Global,
+        entries: Vec<(Range<(FullOffset, Bias)>, T)>,
+    ) -> Self {
         Self { version, entries }
     }
 
-    pub fn raw_entries(&self) -> &[(Range<(usize, Bias)>, T)] {
-        &self.entries
+    pub fn full_offset_ranges(&self) -> impl Iterator<Item = (Range<FullOffset>, &T)> {
+        self.entries
+            .iter()
+            .map(|(range, value)| (range.start.0..range.end.0, value))
     }
 
     pub fn point_ranges<'a>(
@@ -229,6 +263,196 @@ impl AnchorRangeSet {
     }
 }
 
+impl<T: Clone> Default for AnchorRangeMultimap<T> {
+    fn default() -> Self {
+        Self {
+            entries: Default::default(),
+            version: Default::default(),
+            start_bias: Bias::Left,
+            end_bias: Bias::Left,
+        }
+    }
+}
+
+impl<T: Clone> AnchorRangeMultimap<T> {
+    pub fn version(&self) -> &clock::Global {
+        &self.version
+    }
+
+    pub fn intersecting_ranges<'a, I, O>(
+        &'a self,
+        range: Range<I>,
+        content: Content<'a>,
+        inclusive: bool,
+    ) -> impl Iterator<Item = (usize, Range<O>, &T)> + 'a
+    where
+        I: ToOffset,
+        O: FromAnchor,
+    {
+        let end_bias = if inclusive { Bias::Right } else { Bias::Left };
+        let range = range.start.to_full_offset(&content, Bias::Left)
+            ..range.end.to_full_offset(&content, end_bias);
+        let mut cursor = self.entries.filter::<_, usize>(
+            {
+                let content = content.clone();
+                let mut endpoint = Anchor {
+                    full_offset: FullOffset(0),
+                    bias: Bias::Right,
+                    version: self.version.clone(),
+                };
+                move |summary: &AnchorRangeMultimapSummary| {
+                    endpoint.full_offset = summary.max_end;
+                    endpoint.bias = self.end_bias;
+                    let max_end = endpoint.to_full_offset(&content, self.end_bias);
+                    let start_cmp = range.start.cmp(&max_end);
+
+                    endpoint.full_offset = summary.min_start;
+                    endpoint.bias = self.start_bias;
+                    let min_start = endpoint.to_full_offset(&content, self.start_bias);
+                    let end_cmp = range.end.cmp(&min_start);
+
+                    if inclusive {
+                        start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal
+                    } else {
+                        start_cmp == Ordering::Less && end_cmp == Ordering::Greater
+                    }
+                }
+            },
+            &(),
+        );
+
+        std::iter::from_fn({
+            let mut endpoint = Anchor {
+                full_offset: FullOffset(0),
+                bias: Bias::Left,
+                version: self.version.clone(),
+            };
+            move || {
+                if let Some(item) = cursor.item() {
+                    let ix = *cursor.start();
+                    endpoint.full_offset = item.range.start;
+                    endpoint.bias = self.start_bias;
+                    let start = O::from_anchor(&endpoint, &content);
+                    endpoint.full_offset = item.range.end;
+                    endpoint.bias = self.end_bias;
+                    let end = O::from_anchor(&endpoint, &content);
+                    let value = &item.value;
+                    cursor.next(&());
+                    Some((ix, start..end, value))
+                } else {
+                    None
+                }
+            }
+        })
+    }
+
+    pub fn from_full_offset_ranges(
+        version: clock::Global,
+        start_bias: Bias,
+        end_bias: Bias,
+        entries: impl Iterator<Item = (Range<FullOffset>, T)>,
+    ) -> Self {
+        Self {
+            version,
+            start_bias,
+            end_bias,
+            entries: SumTree::from_iter(
+                entries.map(|(range, value)| AnchorRangeMultimapEntry {
+                    range: FullOffsetRange {
+                        start: range.start,
+                        end: range.end,
+                    },
+                    value,
+                }),
+                &(),
+            ),
+        }
+    }
+
+    pub fn full_offset_ranges(&self) -> impl Iterator<Item = (Range<FullOffset>, &T)> {
+        self.entries
+            .cursor::<()>()
+            .map(|entry| (entry.range.start..entry.range.end, &entry.value))
+    }
+}
+
+impl<T: Clone> sum_tree::Item for AnchorRangeMultimapEntry<T> {
+    type Summary = AnchorRangeMultimapSummary;
+
+    fn summary(&self) -> Self::Summary {
+        AnchorRangeMultimapSummary {
+            start: self.range.start,
+            end: self.range.end,
+            min_start: self.range.start,
+            max_end: self.range.end,
+            count: 1,
+        }
+    }
+}
+
+impl Default for AnchorRangeMultimapSummary {
+    fn default() -> Self {
+        Self {
+            start: FullOffset(0),
+            end: FullOffset::MAX,
+            min_start: FullOffset::MAX,
+            max_end: FullOffset(0),
+            count: 0,
+        }
+    }
+}
+
+impl sum_tree::Summary for AnchorRangeMultimapSummary {
+    type Context = ();
+
+    fn add_summary(&mut self, other: &Self, _: &Self::Context) {
+        self.min_start = self.min_start.min(other.min_start);
+        self.max_end = self.max_end.max(other.max_end);
+
+        #[cfg(debug_assertions)]
+        {
+            let start_comparison = self.start.cmp(&other.start);
+            assert!(start_comparison <= Ordering::Equal);
+            if start_comparison == Ordering::Equal {
+                assert!(self.end.cmp(&other.end) >= Ordering::Equal);
+            }
+        }
+
+        self.start = other.start;
+        self.end = other.end;
+        self.count += other.count;
+    }
+}
+
+impl Default for FullOffsetRange {
+    fn default() -> Self {
+        Self {
+            start: FullOffset(0),
+            end: FullOffset::MAX,
+        }
+    }
+}
+
+impl<'a> sum_tree::Dimension<'a, AnchorRangeMultimapSummary> for usize {
+    fn add_summary(&mut self, summary: &'a AnchorRangeMultimapSummary, _: &()) {
+        *self += summary.count;
+    }
+}
+
+impl<'a> sum_tree::Dimension<'a, AnchorRangeMultimapSummary> for FullOffsetRange {
+    fn add_summary(&mut self, summary: &'a AnchorRangeMultimapSummary, _: &()) {
+        self.start = summary.start;
+        self.end = summary.end;
+    }
+}
+
+impl<'a> sum_tree::SeekTarget<'a, AnchorRangeMultimapSummary, FullOffsetRange> for FullOffsetRange {
+    fn cmp(&self, cursor_location: &FullOffsetRange, _: &()) -> Ordering {
+        Ord::cmp(&self.start, &cursor_location.start)
+            .then_with(|| Ord::cmp(&cursor_location.end, &self.end))
+    }
+}
+
 pub trait AnchorRangeExt {
     fn cmp<'a>(&self, b: &Range<Anchor>, buffer: impl Into<Content<'a>>) -> Result<Ordering>;
 }

crates/buffer/src/lib.rs 🔗

@@ -1,6 +1,7 @@
 mod anchor;
 mod operation_queue;
 mod point;
+mod point_utf16;
 #[cfg(any(test, feature = "test-support"))]
 pub mod random_char_iter;
 pub mod rope;
@@ -13,16 +14,16 @@ use anyhow::{anyhow, Result};
 use clock::ReplicaId;
 use operation_queue::OperationQueue;
 pub use point::*;
+pub use point_utf16::*;
 #[cfg(any(test, feature = "test-support"))]
 pub use random_char_iter::*;
+use rope::TextDimension;
 pub use rope::{Chunks, Rope, TextSummary};
-use rpc::proto;
 pub use selection::*;
 use std::{
-    cmp,
-    convert::TryFrom,
+    cmp::{self, Reverse},
     iter::Iterator,
-    ops::Range,
+    ops::{self, Range},
     str,
     sync::Arc,
     time::{Duration, Instant},
@@ -32,7 +33,7 @@ use sum_tree::{FilterCursor, SumTree};
 
 #[cfg(any(test, feature = "test-support"))]
 #[derive(Clone, Default)]
-struct DeterministicState;
+pub struct DeterministicState;
 
 #[cfg(any(test, feature = "test-support"))]
 impl std::hash::BuildHasher for DeterministicState {
@@ -78,7 +79,7 @@ pub struct Transaction {
     start: clock::Global,
     end: clock::Global,
     edits: Vec<clock::Local>,
-    ranges: Vec<Range<usize>>,
+    ranges: Vec<Range<FullOffset>>,
     selections_before: HashMap<SelectionSetId, Arc<AnchorRangeMap<SelectionState>>>,
     selections_after: HashMap<SelectionSetId, Arc<AnchorRangeMap<SelectionState>>>,
     first_edit_at: Instant,
@@ -95,7 +96,7 @@ impl Transaction {
         self.end.observe(edit.timestamp.local());
 
         let mut other_ranges = edit.ranges.iter().peekable();
-        let mut new_ranges: Vec<Range<usize>> = Vec::new();
+        let mut new_ranges = Vec::new();
         let insertion_len = edit.new_text.as_ref().map_or(0, |t| t.len());
         let mut delta = 0;
 
@@ -309,48 +310,42 @@ impl UndoMap {
     }
 }
 
-struct Edits<'a, F: Fn(&FragmentSummary) -> bool> {
-    visible_text: &'a Rope,
-    deleted_text: &'a Rope,
-    cursor: Option<FilterCursor<'a, F, Fragment, FragmentTextSummary>>,
+struct Edits<'a, D: TextDimension<'a>, F: FnMut(&FragmentSummary) -> bool> {
+    visible_cursor: rope::Cursor<'a>,
+    deleted_cursor: rope::Cursor<'a>,
+    fragments_cursor: Option<FilterCursor<'a, F, Fragment, FragmentTextSummary>>,
     undos: &'a UndoMap,
-    since: clock::Global,
-    old_offset: usize,
-    new_offset: usize,
-    old_point: Point,
-    new_point: Point,
+    since: &'a clock::Global,
+    old_end: D,
+    new_end: D,
 }
 
 #[derive(Clone, Debug, Default, Eq, PartialEq)]
-pub struct Edit {
-    pub old_bytes: Range<usize>,
-    pub new_bytes: Range<usize>,
-    pub old_lines: Range<Point>,
+pub struct Edit<D> {
+    pub old: Range<D>,
+    pub new: Range<D>,
 }
 
-impl Edit {
-    pub fn delta(&self) -> isize {
-        self.inserted_bytes() as isize - self.deleted_bytes() as isize
-    }
-
-    pub fn deleted_bytes(&self) -> usize {
-        self.old_bytes.end - self.old_bytes.start
-    }
-
-    pub fn inserted_bytes(&self) -> usize {
-        self.new_bytes.end - self.new_bytes.start
-    }
-
-    pub fn deleted_lines(&self) -> Point {
-        self.old_lines.end - self.old_lines.start
+impl<D1, D2> Edit<(D1, D2)> {
+    pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
+        (
+            Edit {
+                old: self.old.start.0..self.old.end.0,
+                new: self.new.start.0..self.new.end.0,
+            },
+            Edit {
+                old: self.old.start.1..self.old.end.1,
+                new: self.new.start.1..self.new.end.1,
+            },
+        )
     }
 }
 
 #[derive(Copy, Clone, Debug, Default, Eq, PartialEq)]
-struct InsertionTimestamp {
-    replica_id: ReplicaId,
-    local: clock::Seq,
-    lamport: clock::Seq,
+pub struct InsertionTimestamp {
+    pub replica_id: ReplicaId,
+    pub local: clock::Seq,
+    pub lamport: clock::Seq,
 }
 
 impl InsertionTimestamp {
@@ -425,18 +420,18 @@ pub enum Operation {
 
 #[derive(Clone, Debug, Eq, PartialEq)]
 pub struct EditOperation {
-    timestamp: InsertionTimestamp,
-    version: clock::Global,
-    ranges: Vec<Range<usize>>,
-    new_text: Option<String>,
+    pub timestamp: InsertionTimestamp,
+    pub version: clock::Global,
+    pub ranges: Vec<Range<FullOffset>>,
+    pub new_text: Option<String>,
 }
 
 #[derive(Clone, Debug, Eq, PartialEq)]
 pub struct UndoOperation {
-    id: clock::Local,
-    counts: HashMap<clock::Local, u32>,
-    ranges: Vec<Range<usize>>,
-    version: clock::Global,
+    pub id: clock::Local,
+    pub counts: HashMap<clock::Local, u32>,
+    pub ranges: Vec<Range<FullOffset>>,
+    pub version: clock::Global,
 }
 
 impl Buffer {
@@ -475,34 +470,6 @@ impl Buffer {
         }
     }
 
-    pub fn from_proto(replica_id: u16, message: proto::Buffer) -> Result<Self> {
-        let mut buffer = Buffer::new(replica_id, message.id, History::new(message.content.into()));
-        let ops = message
-            .history
-            .into_iter()
-            .map(|op| Operation::Edit(op.into()));
-        buffer.apply_ops(ops)?;
-        buffer.selections = message
-            .selections
-            .into_iter()
-            .map(|set| {
-                let set = SelectionSet::try_from(set)?;
-                Result::<_, anyhow::Error>::Ok((set.id, set))
-            })
-            .collect::<Result<_, _>>()?;
-        Ok(buffer)
-    }
-
-    pub fn to_proto(&self) -> proto::Buffer {
-        let ops = self.history.ops.values().map(Into::into).collect();
-        proto::Buffer {
-            id: self.remote_id,
-            content: self.history.base_text.to_string(),
-            history: ops,
-            selections: self.selections.iter().map(|(_, set)| set.into()).collect(),
-        }
-    }
-
     pub fn version(&self) -> clock::Global {
         self.version.clone()
     }
@@ -510,6 +477,8 @@ impl Buffer {
     pub fn snapshot(&self) -> Snapshot {
         Snapshot {
             visible_text: self.visible_text.clone(),
+            deleted_text: self.deleted_text.clone(),
+            undo_map: self.undo_map.clone(),
             fragments: self.fragments.clone(),
             version: self.version.clone(),
         }
@@ -551,7 +520,7 @@ impl Buffer {
     }
 
     pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
-        self.visible_text.clip_point(point, bias)
+        self.content().clip_point(point, bias)
     }
 
     pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
@@ -717,7 +686,7 @@ impl Buffer {
                 fragment_start = old_fragments.start().visible;
             }
 
-            let full_range_start = range.start + old_fragments.start().deleted;
+            let full_range_start = FullOffset(range.start + old_fragments.start().deleted);
 
             // Preserve any portion of the current fragment that precedes this range.
             if fragment_start < range.start {
@@ -765,7 +734,7 @@ impl Buffer {
                 }
             }
 
-            let full_range_end = range.end + old_fragments.start().deleted;
+            let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
             edit.ranges.push(full_range_start..full_range_end);
         }
 
@@ -884,7 +853,7 @@ impl Buffer {
     fn apply_remote_edit(
         &mut self,
         version: &clock::Global,
-        ranges: &[Range<usize>],
+        ranges: &[Range<FullOffset>],
         new_text: Option<&str>,
         timestamp: InsertionTimestamp,
     ) {
@@ -895,24 +864,27 @@ impl Buffer {
         let cx = Some(version.clone());
         let mut new_ropes =
             RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
-        let mut old_fragments = self.fragments.cursor::<VersionedOffset>();
-        let mut new_fragments =
-            old_fragments.slice(&VersionedOffset::Offset(ranges[0].start), Bias::Left, &cx);
+        let mut old_fragments = self.fragments.cursor::<VersionedFullOffset>();
+        let mut new_fragments = old_fragments.slice(
+            &VersionedFullOffset::Offset(ranges[0].start),
+            Bias::Left,
+            &cx,
+        );
         new_ropes.push_tree(new_fragments.summary().text);
 
-        let mut fragment_start = old_fragments.start().offset();
+        let mut fragment_start = old_fragments.start().full_offset();
         for range in ranges {
-            let fragment_end = old_fragments.end(&cx).offset();
+            let fragment_end = old_fragments.end(&cx).full_offset();
 
             // If the current fragment ends before this range, then jump ahead to the first fragment
             // that extends past the start of this range, reusing any intervening fragments.
             if fragment_end < range.start {
                 // If the current fragment has been partially consumed, then consume the rest of it
                 // and advance to the next fragment before slicing.
-                if fragment_start > old_fragments.start().offset() {
+                if fragment_start > old_fragments.start().full_offset() {
                     if fragment_end > fragment_start {
                         let mut suffix = old_fragments.item().unwrap().clone();
-                        suffix.len = fragment_end - fragment_start;
+                        suffix.len = fragment_end.0 - fragment_start.0;
                         new_ropes.push_fragment(&suffix, suffix.visible);
                         new_fragments.push(suffix, &None);
                     }
@@ -920,21 +892,21 @@ impl Buffer {
                 }
 
                 let slice =
-                    old_fragments.slice(&VersionedOffset::Offset(range.start), Bias::Left, &cx);
+                    old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left, &cx);
                 new_ropes.push_tree(slice.summary().text);
                 new_fragments.push_tree(slice, &None);
-                fragment_start = old_fragments.start().offset();
+                fragment_start = old_fragments.start().full_offset();
             }
 
             // If we are at the end of a non-concurrent fragment, advance to the next one.
-            let fragment_end = old_fragments.end(&cx).offset();
+            let fragment_end = old_fragments.end(&cx).full_offset();
             if fragment_end == range.start && fragment_end > fragment_start {
                 let mut fragment = old_fragments.item().unwrap().clone();
-                fragment.len = fragment_end - fragment_start;
+                fragment.len = fragment_end.0 - fragment_start.0;
                 new_ropes.push_fragment(&fragment, fragment.visible);
                 new_fragments.push(fragment, &None);
                 old_fragments.next(&cx);
-                fragment_start = old_fragments.start().offset();
+                fragment_start = old_fragments.start().full_offset();
             }
 
             // Skip over insertions that are concurrent to this edit, but have a lower lamport
@@ -956,7 +928,7 @@ impl Buffer {
             // Preserve any portion of the current fragment that precedes this range.
             if fragment_start < range.start {
                 let mut prefix = old_fragments.item().unwrap().clone();
-                prefix.len = range.start - fragment_start;
+                prefix.len = range.start.0 - fragment_start.0;
                 fragment_start = range.start;
                 new_ropes.push_fragment(&prefix, prefix.visible);
                 new_fragments.push(prefix, &None);
@@ -981,11 +953,11 @@ impl Buffer {
             // portions as deleted.
             while fragment_start < range.end {
                 let fragment = old_fragments.item().unwrap();
-                let fragment_end = old_fragments.end(&cx).offset();
+                let fragment_end = old_fragments.end(&cx).full_offset();
                 let mut intersection = fragment.clone();
                 let intersection_end = cmp::min(range.end, fragment_end);
                 if fragment.was_visible(version, &self.undo_map) {
-                    intersection.len = intersection_end - fragment_start;
+                    intersection.len = intersection_end.0 - fragment_start.0;
                     intersection.deletions.insert(timestamp.local());
                     intersection.visible = false;
                 }
@@ -1002,11 +974,11 @@ impl Buffer {
 
         // If the current fragment has been partially consumed, then consume the rest of it
         // and advance to the next fragment before slicing.
-        if fragment_start > old_fragments.start().offset() {
-            let fragment_end = old_fragments.end(&cx).offset();
+        if fragment_start > old_fragments.start().full_offset() {
+            let fragment_end = old_fragments.end(&cx).full_offset();
             if fragment_end > fragment_start {
                 let mut suffix = old_fragments.item().unwrap().clone();
-                suffix.len = fragment_end - fragment_start;
+                suffix.len = fragment_end.0 - fragment_start.0;
                 new_ropes.push_fragment(&suffix, suffix.visible);
                 new_fragments.push(suffix, &None);
             }
@@ -1035,9 +1007,9 @@ impl Buffer {
         }
         let cx = Some(cx);
 
-        let mut old_fragments = self.fragments.cursor::<VersionedOffset>();
+        let mut old_fragments = self.fragments.cursor::<VersionedFullOffset>();
         let mut new_fragments = old_fragments.slice(
-            &VersionedOffset::Offset(undo.ranges[0].start),
+            &VersionedFullOffset::Offset(undo.ranges[0].start),
             Bias::Right,
             &cx,
         );
@@ -1046,11 +1018,14 @@ impl Buffer {
         new_ropes.push_tree(new_fragments.summary().text);
 
         for range in &undo.ranges {
-            let mut end_offset = old_fragments.end(&cx).offset();
+            let mut end_offset = old_fragments.end(&cx).full_offset();
 
             if end_offset < range.start {
-                let preceding_fragments =
-                    old_fragments.slice(&VersionedOffset::Offset(range.start), Bias::Right, &cx);
+                let preceding_fragments = old_fragments.slice(
+                    &VersionedFullOffset::Offset(range.start),
+                    Bias::Right,
+                    &cx,
+                );
                 new_ropes.push_tree(preceding_fragments.summary().text);
                 new_fragments.push_tree(preceding_fragments, &None);
             }
@@ -1070,16 +1045,16 @@ impl Buffer {
                     new_fragments.push(fragment, &None);
 
                     old_fragments.next(&cx);
-                    if end_offset == old_fragments.end(&cx).offset() {
+                    if end_offset == old_fragments.end(&cx).full_offset() {
                         let unseen_fragments = old_fragments.slice(
-                            &VersionedOffset::Offset(end_offset),
+                            &VersionedFullOffset::Offset(end_offset),
                             Bias::Right,
                             &cx,
                         );
                         new_ropes.push_tree(unseen_fragments.summary().text);
                         new_fragments.push_tree(unseen_fragments, &None);
                     }
-                    end_offset = old_fragments.end(&cx).offset();
+                    end_offset = old_fragments.end(&cx).full_offset();
                 } else {
                     break;
                 }
@@ -1198,6 +1173,14 @@ impl Buffer {
             .retain(|set_id, _| set_id.replica_id != replica_id)
     }
 
+    pub fn base_text(&self) -> &Arc<str> {
+        &self.history.base_text
+    }
+
+    pub fn history(&self) -> impl Iterator<Item = &EditOperation> {
+        self.history.ops.values()
+    }
+
     pub fn undo(&mut self) -> Vec<Operation> {
         let mut ops = Vec::new();
         if let Some(transaction) = self.history.pop_undo().cloned() {
@@ -1326,6 +1309,10 @@ impl Buffer {
         }
     }
 
+    pub fn add_raw_selection_set(&mut self, id: SelectionSetId, selections: SelectionSet) {
+        self.selections.insert(id, selections);
+    }
+
     pub fn set_active_selection_set(
         &mut self,
         set_id: Option<SelectionSetId>,
@@ -1360,28 +1347,14 @@ impl Buffer {
         })
     }
 
-    pub fn edits_since<'a>(&'a self, since: clock::Global) -> impl 'a + Iterator<Item = Edit> {
-        let since_2 = since.clone();
-        let cursor = if since == self.version {
-            None
-        } else {
-            Some(self.fragments.filter(
-                move |summary| summary.max_version.changed_since(&since_2),
-                &None,
-            ))
-        };
-
-        Edits {
-            visible_text: &self.visible_text,
-            deleted_text: &self.deleted_text,
-            cursor,
-            undos: &self.undo_map,
-            since,
-            old_offset: 0,
-            new_offset: 0,
-            old_point: Point::zero(),
-            new_point: Point::zero(),
-        }
+    pub fn edits_since<'a, D>(
+        &'a self,
+        since: &'a clock::Global,
+    ) -> impl 'a + Iterator<Item = Edit<D>>
+    where
+        D: 'a + TextDimension<'a> + Ord,
+    {
+        self.content().edits_since(since)
     }
 }
 
@@ -1539,6 +1512,8 @@ impl Buffer {
 #[derive(Clone)]
 pub struct Snapshot {
     visible_text: Rope,
+    deleted_text: Rope,
+    undo_map: UndoMap,
     fragments: SumTree<Fragment>,
     version: clock::Global,
 }
@@ -1598,11 +1573,11 @@ impl Snapshot {
     }
 
     pub fn to_offset(&self, point: Point) -> usize {
-        self.visible_text.to_offset(point)
+        self.visible_text.point_to_offset(point)
     }
 
     pub fn to_point(&self, offset: usize) -> Point {
-        self.visible_text.to_point(offset)
+        self.visible_text.offset_to_point(offset)
     }
 
     pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
@@ -1613,13 +1588,30 @@ impl Snapshot {
         self.content().anchor_at(position, Bias::Right)
     }
 
+    pub fn edits_since<'a, D>(
+        &'a self,
+        since: &'a clock::Global,
+    ) -> impl 'a + Iterator<Item = Edit<D>>
+    where
+        D: 'a + TextDimension<'a> + Ord,
+    {
+        self.content().edits_since(since)
+    }
+
+    pub fn version(&self) -> &clock::Global {
+        &self.version
+    }
+
     pub fn content(&self) -> Content {
         self.into()
     }
 }
 
+#[derive(Clone)]
 pub struct Content<'a> {
     visible_text: &'a Rope,
+    deleted_text: &'a Rope,
+    undo_map: &'a UndoMap,
     fragments: &'a SumTree<Fragment>,
     version: &'a clock::Global,
 }
@@ -1628,6 +1620,8 @@ impl<'a> From<&'a Snapshot> for Content<'a> {
     fn from(snapshot: &'a Snapshot) -> Self {
         Self {
             visible_text: &snapshot.visible_text,
+            deleted_text: &snapshot.deleted_text,
+            undo_map: &snapshot.undo_map,
             fragments: &snapshot.fragments,
             version: &snapshot.version,
         }
@@ -1638,6 +1632,8 @@ impl<'a> From<&'a Buffer> for Content<'a> {
     fn from(buffer: &'a Buffer) -> Self {
         Self {
             visible_text: &buffer.visible_text,
+            deleted_text: &buffer.deleted_text,
+            undo_map: &buffer.undo_map,
             fragments: &buffer.fragments,
             version: &buffer.version,
         }
@@ -1648,6 +1644,8 @@ impl<'a> From<&'a mut Buffer> for Content<'a> {
     fn from(buffer: &'a mut Buffer) -> Self {
         Self {
             visible_text: &buffer.visible_text,
+            deleted_text: &buffer.deleted_text,
+            undo_map: &buffer.undo_map,
             fragments: &buffer.fragments,
             version: &buffer.version,
         }
@@ -1658,6 +1656,8 @@ impl<'a> From<&'a Content<'a>> for Content<'a> {
     fn from(content: &'a Content) -> Self {
         Self {
             visible_text: &content.visible_text,
+            deleted_text: &content.deleted_text,
+            undo_map: &content.undo_map,
             fragments: &content.fragments,
             version: &content.version,
         }
@@ -1713,10 +1713,14 @@ impl<'a> Content<'a> {
 
     fn summary_for_anchor(&self, anchor: &Anchor) -> TextSummary {
         let cx = Some(anchor.version.clone());
-        let mut cursor = self.fragments.cursor::<(VersionedOffset, usize)>();
-        cursor.seek(&VersionedOffset::Offset(anchor.offset), anchor.bias, &cx);
+        let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>();
+        cursor.seek(
+            &VersionedFullOffset::Offset(anchor.full_offset),
+            anchor.bias,
+            &cx,
+        );
         let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) {
-            anchor.offset - cursor.start().0.offset()
+            anchor.full_offset - cursor.start().0.full_offset()
         } else {
             0
         };
@@ -1734,15 +1738,15 @@ impl<'a> Content<'a> {
         let cx = Some(map.version.clone());
         let mut summary = TextSummary::default();
         let mut rope_cursor = self.visible_text.cursor(0);
-        let mut cursor = self.fragments.cursor::<(VersionedOffset, usize)>();
+        let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>();
         map.entries.iter().map(move |((offset, bias), value)| {
-            cursor.seek_forward(&VersionedOffset::Offset(*offset), *bias, &cx);
+            cursor.seek_forward(&VersionedFullOffset::Offset(*offset), *bias, &cx);
             let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) {
-                offset - cursor.start().0.offset()
+                *offset - cursor.start().0.full_offset()
             } else {
                 0
             };
-            summary += rope_cursor.summary(cursor.start().1 + overshoot);
+            summary += rope_cursor.summary::<TextSummary>(cursor.start().1 + overshoot);
             (summary.clone(), value)
         })
     }
@@ -1754,29 +1758,33 @@ impl<'a> Content<'a> {
         let cx = Some(map.version.clone());
         let mut summary = TextSummary::default();
         let mut rope_cursor = self.visible_text.cursor(0);
-        let mut cursor = self.fragments.cursor::<(VersionedOffset, usize)>();
+        let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>();
         map.entries.iter().map(move |(range, value)| {
             let Range {
                 start: (start_offset, start_bias),
                 end: (end_offset, end_bias),
             } = range;
 
-            cursor.seek_forward(&VersionedOffset::Offset(*start_offset), *start_bias, &cx);
+            cursor.seek_forward(
+                &VersionedFullOffset::Offset(*start_offset),
+                *start_bias,
+                &cx,
+            );
             let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) {
-                start_offset - cursor.start().0.offset()
+                *start_offset - cursor.start().0.full_offset()
             } else {
                 0
             };
-            summary += rope_cursor.summary(cursor.start().1 + overshoot);
+            summary += rope_cursor.summary::<TextSummary>(cursor.start().1 + overshoot);
             let start_summary = summary.clone();
 
-            cursor.seek_forward(&VersionedOffset::Offset(*end_offset), *end_bias, &cx);
+            cursor.seek_forward(&VersionedFullOffset::Offset(*end_offset), *end_bias, &cx);
             let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) {
-                end_offset - cursor.start().0.offset()
+                *end_offset - cursor.start().0.full_offset()
             } else {
                 0
             };
-            summary += rope_cursor.summary(cursor.start().1 + overshoot);
+            summary += rope_cursor.summary::<TextSummary>(cursor.start().1 + overshoot);
             let end_summary = summary.clone();
 
             (start_summary..end_summary, value)
@@ -1784,13 +1792,8 @@ impl<'a> Content<'a> {
     }
 
     fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
-        let offset = position.to_offset(self);
-        let max_offset = self.len();
-        assert!(offset <= max_offset, "offset is out of range");
-        let mut cursor = self.fragments.cursor::<FragmentTextSummary>();
-        cursor.seek(&offset, bias, &None);
         Anchor {
-            offset: offset + cursor.start().deleted,
+            full_offset: position.to_full_offset(self, bias),
             bias,
             version: self.version.clone(),
         }
@@ -1806,7 +1809,7 @@ impl<'a> Content<'a> {
             .into_iter()
             .map(|((offset, bias), value)| {
                 cursor.seek_forward(&offset, bias, &None);
-                let full_offset = cursor.start().deleted + offset;
+                let full_offset = FullOffset(cursor.start().deleted + offset);
                 ((full_offset, bias), value)
             })
             .collect();
@@ -1828,9 +1831,9 @@ impl<'a> Content<'a> {
                     end: (end_offset, end_bias),
                 } = range;
                 cursor.seek_forward(&start_offset, start_bias, &None);
-                let full_start_offset = cursor.start().deleted + start_offset;
+                let full_start_offset = FullOffset(cursor.start().deleted + start_offset);
                 cursor.seek_forward(&end_offset, end_bias, &None);
-                let full_end_offset = cursor.start().deleted + end_offset;
+                let full_end_offset = FullOffset(cursor.start().deleted + end_offset);
                 (
                     (full_start_offset, start_bias)..(full_end_offset, end_bias),
                     value,
@@ -1855,19 +1858,61 @@ impl<'a> Content<'a> {
         AnchorRangeSet(self.anchor_range_map(entries.into_iter().map(|range| (range, ()))))
     }
 
-    fn full_offset_for_anchor(&self, anchor: &Anchor) -> usize {
+    pub fn anchor_range_multimap<T, E, O>(
+        &self,
+        start_bias: Bias,
+        end_bias: Bias,
+        entries: E,
+    ) -> AnchorRangeMultimap<T>
+    where
+        T: Clone,
+        E: IntoIterator<Item = (Range<O>, T)>,
+        O: ToOffset,
+    {
+        let mut entries = entries
+            .into_iter()
+            .map(|(range, value)| AnchorRangeMultimapEntry {
+                range: FullOffsetRange {
+                    start: range.start.to_full_offset(self, start_bias),
+                    end: range.end.to_full_offset(self, end_bias),
+                },
+                value,
+            })
+            .collect::<Vec<_>>();
+        entries.sort_unstable_by_key(|i| (i.range.start, Reverse(i.range.end)));
+        AnchorRangeMultimap {
+            entries: SumTree::from_iter(entries, &()),
+            version: self.version.clone(),
+            start_bias,
+            end_bias,
+        }
+    }
+
+    fn full_offset_for_anchor(&self, anchor: &Anchor) -> FullOffset {
         let cx = Some(anchor.version.clone());
         let mut cursor = self
             .fragments
-            .cursor::<(VersionedOffset, FragmentTextSummary)>();
-        cursor.seek(&VersionedOffset::Offset(anchor.offset), anchor.bias, &cx);
+            .cursor::<(VersionedFullOffset, FragmentTextSummary)>();
+        cursor.seek(
+            &VersionedFullOffset::Offset(anchor.full_offset),
+            anchor.bias,
+            &cx,
+        );
         let overshoot = if cursor.item().is_some() {
-            anchor.offset - cursor.start().0.offset()
+            anchor.full_offset - cursor.start().0.full_offset()
         } else {
             0
         };
         let summary = cursor.start().1;
-        summary.visible + summary.deleted + overshoot
+        FullOffset(summary.visible + summary.deleted + overshoot)
+    }
+
+    pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
+        self.visible_text.clip_point(point, bias)
+    }
+
+    pub fn clip_point_utf16(&self, point: PointUtf16, bias: Bias) -> PointUtf16 {
+        self.visible_text.clip_point_utf16(point, bias)
     }
 
     fn point_for_offset(&self, offset: usize) -> Result<Point> {
@@ -1877,6 +1922,30 @@ impl<'a> Content<'a> {
             Err(anyhow!("offset out of bounds"))
         }
     }
+
+    pub fn edits_since<D>(&self, since: &'a clock::Global) -> impl 'a + Iterator<Item = Edit<D>>
+    where
+        D: 'a + TextDimension<'a> + Ord,
+    {
+        let fragments_cursor = if since == self.version {
+            None
+        } else {
+            Some(self.fragments.filter(
+                move |summary| summary.max_version.changed_since(since),
+                &None,
+            ))
+        };
+
+        Edits {
+            visible_cursor: self.visible_text.cursor(0),
+            deleted_cursor: self.deleted_text.cursor(0),
+            fragments_cursor,
+            undos: &self.undo_map,
+            since,
+            old_end: Default::default(),
+            new_end: Default::default(),
+        }
+    }
 }
 
 struct RopeBuilder<'a> {
@@ -1932,67 +2001,61 @@ impl<'a> RopeBuilder<'a> {
     }
 }
 
-impl<'a, F: Fn(&FragmentSummary) -> bool> Iterator for Edits<'a, F> {
-    type Item = Edit;
+impl<'a, D: TextDimension<'a> + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator
+    for Edits<'a, D, F>
+{
+    type Item = Edit<D>;
 
     fn next(&mut self) -> Option<Self::Item> {
-        let mut change: Option<Edit> = None;
-        let cursor = self.cursor.as_mut()?;
+        let mut pending_edit: Option<Edit<D>> = None;
+        let cursor = self.fragments_cursor.as_mut()?;
 
         while let Some(fragment) = cursor.item() {
-            let bytes = cursor.start().visible - self.new_offset;
-            let lines = self.visible_text.to_point(cursor.start().visible) - self.new_point;
-            self.old_offset += bytes;
-            self.old_point += &lines;
-            self.new_offset += bytes;
-            self.new_point += &lines;
+            let summary = self.visible_cursor.summary(cursor.start().visible);
+            self.old_end.add_assign(&summary);
+            self.new_end.add_assign(&summary);
+            if pending_edit
+                .as_ref()
+                .map_or(false, |change| change.new.end < self.new_end)
+            {
+                break;
+            }
 
             if !fragment.was_visible(&self.since, &self.undos) && fragment.visible {
-                let fragment_lines =
-                    self.visible_text.to_point(self.new_offset + fragment.len) - self.new_point;
-                if let Some(ref mut change) = change {
-                    if change.new_bytes.end == self.new_offset {
-                        change.new_bytes.end += fragment.len;
-                    } else {
-                        break;
-                    }
+                let fragment_summary = self.visible_cursor.summary(cursor.end(&None).visible);
+                let mut new_end = self.new_end.clone();
+                new_end.add_assign(&fragment_summary);
+                if let Some(pending_edit) = pending_edit.as_mut() {
+                    pending_edit.new.end = new_end.clone();
                 } else {
-                    change = Some(Edit {
-                        old_bytes: self.old_offset..self.old_offset,
-                        new_bytes: self.new_offset..self.new_offset + fragment.len,
-                        old_lines: self.old_point..self.old_point,
+                    pending_edit = Some(Edit {
+                        old: self.old_end.clone()..self.old_end.clone(),
+                        new: self.new_end.clone()..new_end.clone(),
                     });
                 }
 
-                self.new_offset += fragment.len;
-                self.new_point += &fragment_lines;
+                self.new_end = new_end;
             } else if fragment.was_visible(&self.since, &self.undos) && !fragment.visible {
-                let deleted_start = cursor.start().deleted;
-                let fragment_lines = self.deleted_text.to_point(deleted_start + fragment.len)
-                    - self.deleted_text.to_point(deleted_start);
-                if let Some(ref mut change) = change {
-                    if change.new_bytes.end == self.new_offset {
-                        change.old_bytes.end += fragment.len;
-                        change.old_lines.end += &fragment_lines;
-                    } else {
-                        break;
-                    }
+                self.deleted_cursor.seek_forward(cursor.start().deleted);
+                let fragment_summary = self.deleted_cursor.summary(cursor.end(&None).deleted);
+                let mut old_end = self.old_end.clone();
+                old_end.add_assign(&fragment_summary);
+                if let Some(pending_edit) = pending_edit.as_mut() {
+                    pending_edit.old.end = old_end.clone();
                 } else {
-                    change = Some(Edit {
-                        old_bytes: self.old_offset..self.old_offset + fragment.len,
-                        new_bytes: self.new_offset..self.new_offset,
-                        old_lines: self.old_point..self.old_point + &fragment_lines,
+                    pending_edit = Some(Edit {
+                        old: self.old_end.clone()..old_end.clone(),
+                        new: self.new_end.clone()..self.new_end.clone(),
                     });
                 }
 
-                self.old_offset += fragment.len;
-                self.old_point += &fragment_lines;
+                self.old_end = old_end;
             }
 
             cursor.next(&None);
         }
 
-        change
+        pending_edit
     }
 }
 
@@ -2075,12 +2138,48 @@ impl Default for FragmentSummary {
     }
 }
 
+#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct FullOffset(pub usize);
+
+impl FullOffset {
+    const MAX: Self = FullOffset(usize::MAX);
+}
+
+impl ops::AddAssign<usize> for FullOffset {
+    fn add_assign(&mut self, rhs: usize) {
+        self.0 += rhs;
+    }
+}
+
+impl ops::Add<usize> for FullOffset {
+    type Output = Self;
+
+    fn add(mut self, rhs: usize) -> Self::Output {
+        self += rhs;
+        self
+    }
+}
+
+impl ops::Sub for FullOffset {
+    type Output = usize;
+
+    fn sub(self, rhs: Self) -> Self::Output {
+        self.0 - rhs.0
+    }
+}
+
 impl<'a> sum_tree::Dimension<'a, FragmentSummary> for usize {
     fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
         *self += summary.text.visible;
     }
 }
 
+impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FullOffset {
+    fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
+        self.0 += summary.text.visible + summary.text.deleted;
+    }
+}
+
 impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, FragmentTextSummary> for usize {
     fn cmp(
         &self,
@@ -2092,28 +2191,28 @@ impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, FragmentTextSummary> for usiz
 }
 
 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
-enum VersionedOffset {
-    Offset(usize),
-    InvalidVersion,
+enum VersionedFullOffset {
+    Offset(FullOffset),
+    Invalid,
 }
 
-impl VersionedOffset {
-    fn offset(&self) -> usize {
-        if let Self::Offset(offset) = self {
-            *offset
+impl VersionedFullOffset {
+    fn full_offset(&self) -> FullOffset {
+        if let Self::Offset(position) = self {
+            *position
         } else {
             panic!("invalid version")
         }
     }
 }
 
-impl Default for VersionedOffset {
+impl Default for VersionedFullOffset {
     fn default() -> Self {
-        Self::Offset(0)
+        Self::Offset(Default::default())
     }
 }
 
-impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedOffset {
+impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset {
     fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<clock::Global>) {
         if let Self::Offset(offset) = self {
             let version = cx.as_ref().unwrap();
@@ -2124,18 +2223,18 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedOffset {
                 .iter()
                 .all(|t| !version.observed(*t))
             {
-                *self = Self::InvalidVersion;
+                *self = Self::Invalid;
             }
         }
     }
 }
 
-impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, Self> for VersionedOffset {
-    fn cmp(&self, other: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
-        match (self, other) {
+impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, Self> for VersionedFullOffset {
+    fn cmp(&self, cursor_position: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
+        match (self, cursor_position) {
             (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
-            (Self::Offset(_), Self::InvalidVersion) => cmp::Ordering::Less,
-            (Self::InvalidVersion, _) => unreachable!(),
+            (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less,
+            (Self::Invalid, _) => unreachable!(),
         }
     }
 }
@@ -2173,239 +2272,33 @@ impl Operation {
     }
 }
 
-impl<'a> Into<proto::Operation> for &'a Operation {
-    fn into(self) -> proto::Operation {
-        proto::Operation {
-            variant: Some(match self {
-                Operation::Edit(edit) => proto::operation::Variant::Edit(edit.into()),
-                Operation::Undo {
-                    undo,
-                    lamport_timestamp,
-                } => proto::operation::Variant::Undo(proto::operation::Undo {
-                    replica_id: undo.id.replica_id as u32,
-                    local_timestamp: undo.id.value,
-                    lamport_timestamp: lamport_timestamp.value,
-                    ranges: undo
-                        .ranges
-                        .iter()
-                        .map(|r| proto::Range {
-                            start: r.start as u64,
-                            end: r.end as u64,
-                        })
-                        .collect(),
-                    counts: undo
-                        .counts
-                        .iter()
-                        .map(|(edit_id, count)| proto::operation::UndoCount {
-                            replica_id: edit_id.replica_id as u32,
-                            local_timestamp: edit_id.value,
-                            count: *count,
-                        })
-                        .collect(),
-                    version: From::from(&undo.version),
-                }),
-                Operation::UpdateSelections {
-                    set_id,
-                    selections,
-                    lamport_timestamp,
-                } => proto::operation::Variant::UpdateSelections(
-                    proto::operation::UpdateSelections {
-                        replica_id: set_id.replica_id as u32,
-                        local_timestamp: set_id.value,
-                        lamport_timestamp: lamport_timestamp.value,
-                        version: selections.version().into(),
-                        selections: selections
-                            .raw_entries()
-                            .iter()
-                            .map(|(range, state)| proto::Selection {
-                                id: state.id as u64,
-                                start: range.start.0 as u64,
-                                end: range.end.0 as u64,
-                                reversed: state.reversed,
-                            })
-                            .collect(),
-                    },
-                ),
-                Operation::RemoveSelections {
-                    set_id,
-                    lamport_timestamp,
-                } => proto::operation::Variant::RemoveSelections(
-                    proto::operation::RemoveSelections {
-                        replica_id: set_id.replica_id as u32,
-                        local_timestamp: set_id.value,
-                        lamport_timestamp: lamport_timestamp.value,
-                    },
-                ),
-                Operation::SetActiveSelections {
-                    set_id,
-                    lamport_timestamp,
-                } => proto::operation::Variant::SetActiveSelections(
-                    proto::operation::SetActiveSelections {
-                        replica_id: lamport_timestamp.replica_id as u32,
-                        local_timestamp: set_id.map(|set_id| set_id.value),
-                        lamport_timestamp: lamport_timestamp.value,
-                    },
-                ),
-                #[cfg(test)]
-                Operation::Test(_) => unimplemented!(),
-            }),
-        }
-    }
-}
-
-impl<'a> Into<proto::operation::Edit> for &'a EditOperation {
-    fn into(self) -> proto::operation::Edit {
-        let ranges = self
-            .ranges
-            .iter()
-            .map(|range| proto::Range {
-                start: range.start as u64,
-                end: range.end as u64,
-            })
-            .collect();
-        proto::operation::Edit {
-            replica_id: self.timestamp.replica_id as u32,
-            local_timestamp: self.timestamp.local,
-            lamport_timestamp: self.timestamp.lamport,
-            version: From::from(&self.version),
-            ranges,
-            new_text: self.new_text.clone(),
-        }
-    }
-}
-
-impl TryFrom<proto::Operation> for Operation {
-    type Error = anyhow::Error;
+pub trait ToOffset {
+    fn to_offset<'a>(&self, content: impl Into<Content<'a>>) -> usize;
 
-    fn try_from(message: proto::Operation) -> Result<Self, Self::Error> {
-        Ok(
-            match message
-                .variant
-                .ok_or_else(|| anyhow!("missing operation variant"))?
-            {
-                proto::operation::Variant::Edit(edit) => Operation::Edit(edit.into()),
-                proto::operation::Variant::Undo(undo) => Operation::Undo {
-                    lamport_timestamp: clock::Lamport {
-                        replica_id: undo.replica_id as ReplicaId,
-                        value: undo.lamport_timestamp,
-                    },
-                    undo: UndoOperation {
-                        id: clock::Local {
-                            replica_id: undo.replica_id as ReplicaId,
-                            value: undo.local_timestamp,
-                        },
-                        counts: undo
-                            .counts
-                            .into_iter()
-                            .map(|c| {
-                                (
-                                    clock::Local {
-                                        replica_id: c.replica_id as ReplicaId,
-                                        value: c.local_timestamp,
-                                    },
-                                    c.count,
-                                )
-                            })
-                            .collect(),
-                        ranges: undo
-                            .ranges
-                            .into_iter()
-                            .map(|r| r.start as usize..r.end as usize)
-                            .collect(),
-                        version: undo.version.into(),
-                    },
-                },
-                proto::operation::Variant::UpdateSelections(message) => {
-                    let version = message.version.into();
-                    let entries = message
-                        .selections
-                        .iter()
-                        .map(|selection| {
-                            let range = (selection.start as usize, Bias::Left)
-                                ..(selection.end as usize, Bias::Right);
-                            let state = SelectionState {
-                                id: selection.id as usize,
-                                reversed: selection.reversed,
-                                goal: SelectionGoal::None,
-                            };
-                            (range, state)
-                        })
-                        .collect();
-                    let selections = AnchorRangeMap::from_raw(version, entries);
-
-                    Operation::UpdateSelections {
-                        set_id: clock::Lamport {
-                            replica_id: message.replica_id as ReplicaId,
-                            value: message.local_timestamp,
-                        },
-                        lamport_timestamp: clock::Lamport {
-                            replica_id: message.replica_id as ReplicaId,
-                            value: message.lamport_timestamp,
-                        },
-                        selections: Arc::from(selections),
-                    }
-                }
-                proto::operation::Variant::RemoveSelections(message) => {
-                    Operation::RemoveSelections {
-                        set_id: clock::Lamport {
-                            replica_id: message.replica_id as ReplicaId,
-                            value: message.local_timestamp,
-                        },
-                        lamport_timestamp: clock::Lamport {
-                            replica_id: message.replica_id as ReplicaId,
-                            value: message.lamport_timestamp,
-                        },
-                    }
-                }
-                proto::operation::Variant::SetActiveSelections(message) => {
-                    Operation::SetActiveSelections {
-                        set_id: message.local_timestamp.map(|value| clock::Lamport {
-                            replica_id: message.replica_id as ReplicaId,
-                            value,
-                        }),
-                        lamport_timestamp: clock::Lamport {
-                            replica_id: message.replica_id as ReplicaId,
-                            value: message.lamport_timestamp,
-                        },
-                    }
-                }
-            },
-        )
+    fn to_full_offset<'a>(&self, content: impl Into<Content<'a>>, bias: Bias) -> FullOffset {
+        let content = content.into();
+        let offset = self.to_offset(&content);
+        let mut cursor = content.fragments.cursor::<FragmentTextSummary>();
+        cursor.seek(&offset, bias, &None);
+        FullOffset(offset + cursor.start().deleted)
     }
 }
 
-impl From<proto::operation::Edit> for EditOperation {
-    fn from(edit: proto::operation::Edit) -> Self {
-        let ranges = edit
-            .ranges
-            .into_iter()
-            .map(|range| range.start as usize..range.end as usize)
-            .collect();
-        EditOperation {
-            timestamp: InsertionTimestamp {
-                replica_id: edit.replica_id as ReplicaId,
-                local: edit.local_timestamp,
-                lamport: edit.lamport_timestamp,
-            },
-            version: edit.version.into(),
-            ranges,
-            new_text: edit.new_text,
-        }
+impl ToOffset for Point {
+    fn to_offset<'a>(&self, content: impl Into<Content<'a>>) -> usize {
+        content.into().visible_text.point_to_offset(*self)
     }
 }
 
-pub trait ToOffset {
-    fn to_offset<'a>(&self, content: impl Into<Content<'a>>) -> usize;
-}
-
-impl ToOffset for Point {
+impl ToOffset for PointUtf16 {
     fn to_offset<'a>(&self, content: impl Into<Content<'a>>) -> usize {
-        content.into().visible_text.to_offset(*self)
+        content.into().visible_text.point_utf16_to_offset(*self)
     }
 }
 
 impl ToOffset for usize {
-    fn to_offset<'a>(&self, _: impl Into<Content<'a>>) -> usize {
+    fn to_offset<'a>(&self, content: impl Into<Content<'a>>) -> usize {
+        assert!(*self <= content.into().len(), "offset is out of range");
         *self
     }
 }

crates/buffer/src/point.rs 🔗

@@ -32,11 +32,7 @@ impl<'a> Add<&'a Self> for Point {
     type Output = Point;
 
     fn add(self, other: &'a Self) -> Self::Output {
-        if other.row == 0 {
-            Point::new(self.row, self.column + other.column)
-        } else {
-            Point::new(self.row + other.row, other.column)
-        }
+        self + *other
     }
 }
 
@@ -44,7 +40,11 @@ impl Add for Point {
     type Output = Point;
 
     fn add(self, other: Self) -> Self::Output {
-        self + &other
+        if other.row == 0 {
+            Point::new(self.row, self.column + other.column)
+        } else {
+            Point::new(self.row + other.row, other.column)
+        }
     }
 }
 
@@ -52,13 +52,7 @@ impl<'a> Sub<&'a Self> for Point {
     type Output = Point;
 
     fn sub(self, other: &'a Self) -> Self::Output {
-        debug_assert!(*other <= self);
-
-        if self.row == other.row {
-            Point::new(0, self.column - other.column)
-        } else {
-            Point::new(self.row - other.row, self.column)
-        }
+        self - *other
     }
 }
 
@@ -66,7 +60,13 @@ impl Sub for Point {
     type Output = Point;
 
     fn sub(self, other: Self) -> Self::Output {
-        self - &other
+        debug_assert!(other <= self);
+
+        if self.row == other.row {
+            Point::new(0, self.column - other.column)
+        } else {
+            Point::new(self.row - other.row, self.column)
+        }
     }
 }
 

crates/buffer/src/point_utf16.rs 🔗

@@ -0,0 +1,111 @@
+use std::{
+    cmp::Ordering,
+    ops::{Add, AddAssign, Sub},
+};
+
+#[derive(Clone, Copy, Default, Eq, PartialEq, Debug, Hash)]
+pub struct PointUtf16 {
+    pub row: u32,
+    pub column: u32,
+}
+
+impl PointUtf16 {
+    pub const MAX: Self = Self {
+        row: u32::MAX,
+        column: u32::MAX,
+    };
+
+    pub fn new(row: u32, column: u32) -> Self {
+        PointUtf16 { row, column }
+    }
+
+    pub fn zero() -> Self {
+        PointUtf16::new(0, 0)
+    }
+
+    pub fn is_zero(&self) -> bool {
+        self.row == 0 && self.column == 0
+    }
+}
+
+impl<'a> Add<&'a Self> for PointUtf16 {
+    type Output = PointUtf16;
+
+    fn add(self, other: &'a Self) -> Self::Output {
+        self + *other
+    }
+}
+
+impl Add for PointUtf16 {
+    type Output = PointUtf16;
+
+    fn add(self, other: Self) -> Self::Output {
+        if other.row == 0 {
+            PointUtf16::new(self.row, self.column + other.column)
+        } else {
+            PointUtf16::new(self.row + other.row, other.column)
+        }
+    }
+}
+
+impl<'a> Sub<&'a Self> for PointUtf16 {
+    type Output = PointUtf16;
+
+    fn sub(self, other: &'a Self) -> Self::Output {
+        self - *other
+    }
+}
+
+impl Sub for PointUtf16 {
+    type Output = PointUtf16;
+
+    fn sub(self, other: Self) -> Self::Output {
+        debug_assert!(other <= self);
+
+        if self.row == other.row {
+            PointUtf16::new(0, self.column - other.column)
+        } else {
+            PointUtf16::new(self.row - other.row, self.column)
+        }
+    }
+}
+
+impl<'a> AddAssign<&'a Self> for PointUtf16 {
+    fn add_assign(&mut self, other: &'a Self) {
+        *self += *other;
+    }
+}
+
+impl AddAssign<Self> for PointUtf16 {
+    fn add_assign(&mut self, other: Self) {
+        if other.row == 0 {
+            self.column += other.column;
+        } else {
+            self.row += other.row;
+            self.column = other.column;
+        }
+    }
+}
+
+impl PartialOrd for PointUtf16 {
+    fn partial_cmp(&self, other: &PointUtf16) -> Option<Ordering> {
+        Some(self.cmp(other))
+    }
+}
+
+impl Ord for PointUtf16 {
+    #[cfg(target_pointer_width = "64")]
+    fn cmp(&self, other: &PointUtf16) -> Ordering {
+        let a = (self.row as usize) << 32 | self.column as usize;
+        let b = (other.row as usize) << 32 | other.column as usize;
+        a.cmp(&b)
+    }
+
+    #[cfg(target_pointer_width = "32")]
+    fn cmp(&self, other: &PointUtf16) -> Ordering {
+        match self.row.cmp(&other.row) {
+            Ordering::Equal => self.column.cmp(&other.column),
+            comparison @ _ => comparison,
+        }
+    }
+}

crates/buffer/src/rope.rs 🔗

@@ -1,8 +1,10 @@
+use crate::PointUtf16;
+
 use super::Point;
 use arrayvec::ArrayString;
 use smallvec::SmallVec;
 use std::{cmp, ops::Range, str};
-use sum_tree::{Bias, SumTree};
+use sum_tree::{Bias, Dimension, SumTree};
 
 #[cfg(test)]
 const CHUNK_BASE: usize = 6;
@@ -136,7 +138,7 @@ impl Rope {
         Chunks::new(self, range, true)
     }
 
-    pub fn to_point(&self, offset: usize) -> Point {
+    pub fn offset_to_point(&self, offset: usize) -> Point {
         assert!(offset <= self.summary().bytes);
         let mut cursor = self.chunks.cursor::<(usize, Point)>();
         cursor.seek(&offset, Bias::Left, &());
@@ -144,15 +146,40 @@ impl Rope {
         cursor.start().1
             + cursor
                 .item()
-                .map_or(Point::zero(), |chunk| chunk.to_point(overshoot))
+                .map_or(Point::zero(), |chunk| chunk.offset_to_point(overshoot))
+    }
+
+    pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
+        assert!(offset <= self.summary().bytes);
+        let mut cursor = self.chunks.cursor::<(usize, PointUtf16)>();
+        cursor.seek(&offset, Bias::Left, &());
+        let overshoot = offset - cursor.start().0;
+        cursor.start().1
+            + cursor.item().map_or(PointUtf16::zero(), |chunk| {
+                chunk.offset_to_point_utf16(overshoot)
+            })
     }
 
-    pub fn to_offset(&self, point: Point) -> usize {
+    pub fn point_to_offset(&self, point: Point) -> usize {
         assert!(point <= self.summary().lines);
         let mut cursor = self.chunks.cursor::<(Point, usize)>();
         cursor.seek(&point, Bias::Left, &());
         let overshoot = point - cursor.start().0;
-        cursor.start().1 + cursor.item().map_or(0, |chunk| chunk.to_offset(overshoot))
+        cursor.start().1
+            + cursor
+                .item()
+                .map_or(0, |chunk| chunk.point_to_offset(overshoot))
+    }
+
+    pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
+        assert!(point <= self.summary().lines_utf16);
+        let mut cursor = self.chunks.cursor::<(PointUtf16, usize)>();
+        cursor.seek(&point, Bias::Left, &());
+        let overshoot = point - cursor.start().0;
+        cursor.start().1
+            + cursor
+                .item()
+                .map_or(0, |chunk| chunk.point_utf16_to_offset(overshoot))
     }
 
     pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize {
@@ -188,6 +215,17 @@ impl Rope {
             self.summary().lines
         }
     }
+
+    pub fn clip_point_utf16(&self, point: PointUtf16, bias: Bias) -> PointUtf16 {
+        let mut cursor = self.chunks.cursor::<PointUtf16>();
+        cursor.seek(&point, Bias::Right, &());
+        if let Some(chunk) = cursor.item() {
+            let overshoot = point - cursor.start();
+            *cursor.start() + chunk.clip_point_utf16(overshoot, bias)
+        } else {
+            self.summary().lines_utf16
+        }
+    }
 }
 
 impl<'a> From<&'a str> for Rope {
@@ -258,22 +296,24 @@ impl<'a> Cursor<'a> {
         slice
     }
 
-    pub fn summary(&mut self, end_offset: usize) -> TextSummary {
+    pub fn summary<D: TextDimension<'a>>(&mut self, end_offset: usize) -> D {
         debug_assert!(end_offset >= self.offset);
 
-        let mut summary = TextSummary::default();
+        let mut summary = D::default();
         if let Some(start_chunk) = self.chunks.item() {
             let start_ix = self.offset - self.chunks.start();
             let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start();
-            summary = TextSummary::from(&start_chunk.0[start_ix..end_ix]);
+            summary.add_assign(&D::from_summary(&TextSummary::from(
+                &start_chunk.0[start_ix..end_ix],
+            )));
         }
 
         if end_offset > self.chunks.end(&()) {
             self.chunks.next(&());
-            summary += &self.chunks.summary(&end_offset, Bias::Right, &());
+            summary.add_assign(&self.chunks.summary(&end_offset, Bias::Right, &()));
             if let Some(end_chunk) = self.chunks.item() {
                 let end_ix = end_offset - self.chunks.start();
-                summary += TextSummary::from(&end_chunk.0[..end_ix]);
+                summary.add_assign(&D::from_summary(&TextSummary::from(&end_chunk.0[..end_ix])));
             }
         }
 
@@ -375,7 +415,7 @@ impl<'a> Iterator for Chunks<'a> {
 struct Chunk(ArrayString<{ 2 * CHUNK_BASE }>);
 
 impl Chunk {
-    fn to_point(&self, target: usize) -> Point {
+    fn offset_to_point(&self, target: usize) -> Point {
         let mut offset = 0;
         let mut point = Point::new(0, 0);
         for ch in self.0.chars() {
@@ -394,7 +434,26 @@ impl Chunk {
         point
     }
 
-    fn to_offset(&self, target: Point) -> usize {
+    fn offset_to_point_utf16(&self, target: usize) -> PointUtf16 {
+        let mut offset = 0;
+        let mut point = PointUtf16::new(0, 0);
+        for ch in self.0.chars() {
+            if offset >= target {
+                break;
+            }
+
+            if ch == '\n' {
+                point.row += 1;
+                point.column = 0;
+            } else {
+                point.column += ch.len_utf16() as u32;
+            }
+            offset += ch.len_utf8();
+        }
+        point
+    }
+
+    fn point_to_offset(&self, target: Point) -> usize {
         let mut offset = 0;
         let mut point = Point::new(0, 0);
         for ch in self.0.chars() {
@@ -416,6 +475,28 @@ impl Chunk {
         offset
     }
 
+    fn point_utf16_to_offset(&self, target: PointUtf16) -> usize {
+        let mut offset = 0;
+        let mut point = PointUtf16::new(0, 0);
+        for ch in self.0.chars() {
+            if point >= target {
+                if point > target {
+                    panic!("point {:?} is inside of character {:?}", target, ch);
+                }
+                break;
+            }
+
+            if ch == '\n' {
+                point.row += 1;
+                point.column = 0;
+            } else {
+                point.column += ch.len_utf16() as u32;
+            }
+            offset += ch.len_utf8();
+        }
+        offset
+    }
+
     fn clip_point(&self, target: Point, bias: Bias) -> Point {
         for (row, line) in self.0.split('\n').enumerate() {
             if row == target.row as usize {
@@ -431,6 +512,23 @@ impl Chunk {
         }
         unreachable!()
     }
+
+    fn clip_point_utf16(&self, target: PointUtf16, bias: Bias) -> PointUtf16 {
+        for (row, line) in self.0.split('\n').enumerate() {
+            if row == target.row as usize {
+                let mut code_units = line.encode_utf16();
+                let mut column = code_units.by_ref().take(target.column as usize).count();
+                if char::decode_utf16(code_units).next().transpose().is_err() {
+                    match bias {
+                        Bias::Left => column -= 1,
+                        Bias::Right => column += 1,
+                    }
+                }
+                return PointUtf16::new(row as u32, column as u32);
+            }
+        }
+        unreachable!()
+    }
 }
 
 impl sum_tree::Item for Chunk {
@@ -445,6 +543,7 @@ impl sum_tree::Item for Chunk {
 pub struct TextSummary {
     pub bytes: usize,
     pub lines: Point,
+    pub lines_utf16: PointUtf16,
     pub first_line_chars: u32,
     pub last_line_chars: u32,
     pub longest_row: u32,
@@ -454,17 +553,19 @@ pub struct TextSummary {
 impl<'a> From<&'a str> for TextSummary {
     fn from(text: &'a str) -> Self {
         let mut lines = Point::new(0, 0);
+        let mut lines_utf16 = PointUtf16::new(0, 0);
         let mut first_line_chars = 0;
         let mut last_line_chars = 0;
         let mut longest_row = 0;
         let mut longest_row_chars = 0;
         for c in text.chars() {
             if c == '\n' {
-                lines.row += 1;
-                lines.column = 0;
+                lines += Point::new(1, 0);
+                lines_utf16 += PointUtf16::new(1, 0);
                 last_line_chars = 0;
             } else {
                 lines.column += c.len_utf8() as u32;
+                lines_utf16.column += c.len_utf16() as u32;
                 last_line_chars += 1;
             }
 
@@ -481,6 +582,7 @@ impl<'a> From<&'a str> for TextSummary {
         TextSummary {
             bytes: text.len(),
             lines,
+            lines_utf16,
             first_line_chars,
             last_line_chars,
             longest_row,
@@ -520,7 +622,8 @@ impl<'a> std::ops::AddAssign<&'a Self> for TextSummary {
         }
 
         self.bytes += other.bytes;
-        self.lines += &other.lines;
+        self.lines += other.lines;
+        self.lines_utf16 += other.lines_utf16;
     }
 }
 
@@ -530,15 +633,77 @@ impl std::ops::AddAssign<Self> for TextSummary {
     }
 }
 
+pub trait TextDimension<'a>: Dimension<'a, TextSummary> {
+    fn from_summary(summary: &TextSummary) -> Self;
+    fn add_assign(&mut self, other: &Self);
+}
+
+impl<'a, D1: TextDimension<'a>, D2: TextDimension<'a>> TextDimension<'a> for (D1, D2) {
+    fn from_summary(summary: &TextSummary) -> Self {
+        (D1::from_summary(summary), D2::from_summary(summary))
+    }
+
+    fn add_assign(&mut self, other: &Self) {
+        self.0.add_assign(&other.0);
+        self.1.add_assign(&other.1);
+    }
+}
+
+impl<'a> TextDimension<'a> for TextSummary {
+    fn from_summary(summary: &TextSummary) -> Self {
+        summary.clone()
+    }
+
+    fn add_assign(&mut self, other: &Self) {
+        *self += other;
+    }
+}
+
 impl<'a> sum_tree::Dimension<'a, TextSummary> for usize {
     fn add_summary(&mut self, summary: &'a TextSummary, _: &()) {
         *self += summary.bytes;
     }
 }
 
+impl<'a> TextDimension<'a> for usize {
+    fn from_summary(summary: &TextSummary) -> Self {
+        summary.bytes
+    }
+
+    fn add_assign(&mut self, other: &Self) {
+        *self += other;
+    }
+}
+
 impl<'a> sum_tree::Dimension<'a, TextSummary> for Point {
     fn add_summary(&mut self, summary: &'a TextSummary, _: &()) {
-        *self += &summary.lines;
+        *self += summary.lines;
+    }
+}
+
+impl<'a> TextDimension<'a> for Point {
+    fn from_summary(summary: &TextSummary) -> Self {
+        summary.lines
+    }
+
+    fn add_assign(&mut self, other: &Self) {
+        *self += other;
+    }
+}
+
+impl<'a> sum_tree::Dimension<'a, TextSummary> for PointUtf16 {
+    fn add_summary(&mut self, summary: &'a TextSummary, _: &()) {
+        *self += summary.lines_utf16;
+    }
+}
+
+impl<'a> TextDimension<'a> for PointUtf16 {
+    fn from_summary(summary: &TextSummary) -> Self {
+        summary.lines_utf16
+    }
+
+    fn add_assign(&mut self, other: &Self) {
+        *self += other;
     }
 }
 
@@ -577,6 +742,41 @@ mod tests {
         assert_eq!(rope.text(), text);
     }
 
+    #[test]
+    fn test_clip() {
+        let rope = Rope::from("🧘");
+
+        assert_eq!(rope.clip_offset(1, Bias::Left), 0);
+        assert_eq!(rope.clip_offset(1, Bias::Right), 4);
+        assert_eq!(rope.clip_offset(5, Bias::Right), 4);
+
+        assert_eq!(
+            rope.clip_point(Point::new(0, 1), Bias::Left),
+            Point::new(0, 0)
+        );
+        assert_eq!(
+            rope.clip_point(Point::new(0, 1), Bias::Right),
+            Point::new(0, 4)
+        );
+        assert_eq!(
+            rope.clip_point(Point::new(0, 5), Bias::Right),
+            Point::new(0, 4)
+        );
+
+        assert_eq!(
+            rope.clip_point_utf16(PointUtf16::new(0, 1), Bias::Left),
+            PointUtf16::new(0, 0)
+        );
+        assert_eq!(
+            rope.clip_point_utf16(PointUtf16::new(0, 1), Bias::Right),
+            PointUtf16::new(0, 2)
+        );
+        assert_eq!(
+            rope.clip_point_utf16(PointUtf16::new(0, 3), Bias::Right),
+            PointUtf16::new(0, 2)
+        );
+    }
+
     #[gpui::test(iterations = 100)]
     fn test_random(mut rng: StdRng) {
         let operations = env::var("OPERATIONS")
@@ -624,14 +824,33 @@ mod tests {
             }
 
             let mut point = Point::new(0, 0);
+            let mut point_utf16 = PointUtf16::new(0, 0);
             for (ix, ch) in expected.char_indices().chain(Some((expected.len(), '\0'))) {
-                assert_eq!(actual.to_point(ix), point, "to_point({})", ix);
-                assert_eq!(actual.to_offset(point), ix, "to_offset({:?})", point);
+                assert_eq!(actual.offset_to_point(ix), point, "offset_to_point({})", ix);
+                assert_eq!(
+                    actual.offset_to_point_utf16(ix),
+                    point_utf16,
+                    "offset_to_point_utf16({})",
+                    ix
+                );
+                assert_eq!(
+                    actual.point_to_offset(point),
+                    ix,
+                    "point_to_offset({:?})",
+                    point
+                );
+                assert_eq!(
+                    actual.point_utf16_to_offset(point_utf16),
+                    ix,
+                    "point_utf16_to_offset({:?})",
+                    point_utf16
+                );
                 if ch == '\n' {
-                    point.row += 1;
-                    point.column = 0
+                    point += Point::new(1, 0);
+                    point_utf16 += PointUtf16::new(1, 0);
                 } else {
                     point.column += ch.len_utf8() as u32;
+                    point_utf16.column += ch.len_utf16() as u32;
                 }
             }
 
@@ -639,7 +858,7 @@ mod tests {
                 let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right);
                 let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left);
                 assert_eq!(
-                    actual.cursor(start_ix).summary(end_ix),
+                    actual.cursor(start_ix).summary::<TextSummary>(end_ix),
                     TextSummary::from(&expected[start_ix..end_ix])
                 );
             }

crates/buffer/src/selection.rs 🔗

@@ -1,7 +1,5 @@
-use crate::{AnchorRangeMap, Buffer, Content, Point, ToOffset, ToPoint};
-use rpc::proto;
+use super::{AnchorRangeMap, Buffer, Content, Point, ToOffset, ToPoint};
 use std::{cmp::Ordering, ops::Range, sync::Arc};
-use sum_tree::Bias;
 
 pub type SelectionSetId = clock::Lamport;
 pub type SelectionsVersion = usize;
@@ -129,53 +127,3 @@ impl SelectionSet {
             })
     }
 }
-
-impl<'a> Into<proto::SelectionSet> for &'a SelectionSet {
-    fn into(self) -> proto::SelectionSet {
-        let version = self.selections.version();
-        let entries = self.selections.raw_entries();
-        proto::SelectionSet {
-            replica_id: self.id.replica_id as u32,
-            lamport_timestamp: self.id.value as u32,
-            is_active: self.active,
-            version: version.into(),
-            selections: entries
-                .iter()
-                .map(|(range, state)| proto::Selection {
-                    id: state.id as u64,
-                    start: range.start.0 as u64,
-                    end: range.end.0 as u64,
-                    reversed: state.reversed,
-                })
-                .collect(),
-        }
-    }
-}
-
-impl From<proto::SelectionSet> for SelectionSet {
-    fn from(set: proto::SelectionSet) -> Self {
-        Self {
-            id: clock::Lamport {
-                replica_id: set.replica_id as u16,
-                value: set.lamport_timestamp,
-            },
-            active: set.is_active,
-            selections: Arc::new(AnchorRangeMap::from_raw(
-                set.version.into(),
-                set.selections
-                    .into_iter()
-                    .map(|selection| {
-                        let range = (selection.start as usize, Bias::Left)
-                            ..(selection.end as usize, Bias::Right);
-                        let state = SelectionState {
-                            id: selection.id as usize,
-                            reversed: selection.reversed,
-                            goal: SelectionGoal::None,
-                        };
-                        (range, state)
-                    })
-                    .collect(),
-            )),
-        }
-    }
-}

crates/buffer/src/tests.rs 🔗

@@ -78,7 +78,7 @@ fn test_random_edits(mut rng: StdRng) {
 
     for mut old_buffer in buffer_versions {
         let edits = buffer
-            .edits_since(old_buffer.version.clone())
+            .edits_since::<usize>(&old_buffer.version)
             .collect::<Vec<_>>();
 
         log::info!(
@@ -88,12 +88,12 @@ fn test_random_edits(mut rng: StdRng) {
             edits,
         );
 
-        let mut delta = 0_isize;
         for edit in edits {
-            let old_start = (edit.old_bytes.start as isize + delta) as usize;
-            let new_text: String = buffer.text_for_range(edit.new_bytes.clone()).collect();
-            old_buffer.edit(Some(old_start..old_start + edit.deleted_bytes()), new_text);
-            delta += edit.delta();
+            let new_text: String = buffer.text_for_range(edit.new.clone()).collect();
+            old_buffer.edit(
+                Some(edit.new.start..edit.new.start + edit.old.len()),
+                new_text,
+            );
         }
         assert_eq!(old_buffer.text(), buffer.text());
     }
@@ -123,6 +123,7 @@ fn test_text_summary_for_range() {
         TextSummary {
             bytes: 2,
             lines: Point::new(1, 0),
+            lines_utf16: PointUtf16::new(1, 0),
             first_line_chars: 1,
             last_line_chars: 0,
             longest_row: 0,
@@ -134,6 +135,7 @@ fn test_text_summary_for_range() {
         TextSummary {
             bytes: 11,
             lines: Point::new(3, 0),
+            lines_utf16: PointUtf16::new(3, 0),
             first_line_chars: 1,
             last_line_chars: 0,
             longest_row: 2,
@@ -145,6 +147,7 @@ fn test_text_summary_for_range() {
         TextSummary {
             bytes: 20,
             lines: Point::new(4, 1),
+            lines_utf16: PointUtf16::new(4, 1),
             first_line_chars: 2,
             last_line_chars: 1,
             longest_row: 3,
@@ -156,6 +159,7 @@ fn test_text_summary_for_range() {
         TextSummary {
             bytes: 22,
             lines: Point::new(4, 3),
+            lines_utf16: PointUtf16::new(4, 3),
             first_line_chars: 2,
             last_line_chars: 3,
             longest_row: 3,
@@ -167,6 +171,7 @@ fn test_text_summary_for_range() {
         TextSummary {
             bytes: 15,
             lines: Point::new(2, 3),
+            lines_utf16: PointUtf16::new(2, 3),
             first_line_chars: 4,
             last_line_chars: 3,
             longest_row: 1,

crates/editor/src/display_map.rs 🔗

@@ -359,7 +359,7 @@ mod tests {
     use super::*;
     use crate::{movement, test::*};
     use gpui::{color::Color, MutableAppContext};
-    use language::{History, Language, LanguageConfig, RandomCharIter, SelectionGoal};
+    use language::{Language, LanguageConfig, RandomCharIter, SelectionGoal};
     use rand::{prelude::StdRng, Rng};
     use std::{env, sync::Arc};
     use theme::SyntaxTheme;
@@ -701,9 +701,8 @@ mod tests {
         );
         lang.set_theme(&theme);
 
-        let buffer = cx.add_model(|cx| {
-            Buffer::from_history(0, History::new(text.into()), None, Some(lang), cx)
-        });
+        let buffer =
+            cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Some(lang), None, cx));
         buffer.condition(&cx, |buf, _| !buf.is_parsing()).await;
 
         let tab_size = 2;
@@ -789,9 +788,8 @@ mod tests {
         );
         lang.set_theme(&theme);
 
-        let buffer = cx.add_model(|cx| {
-            Buffer::from_history(0, History::new(text.into()), None, Some(lang), cx)
-        });
+        let buffer =
+            cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Some(lang), None, cx));
         buffer.condition(&cx, |buf, _| !buf.is_parsing()).await;
 
         let font_cache = cx.font_cache();
@@ -974,16 +972,16 @@ mod tests {
     ) -> Vec<(String, Option<&'a str>)> {
         let mut snapshot = map.update(cx, |map, cx| map.snapshot(cx));
         let mut chunks: Vec<(String, Option<&str>)> = Vec::new();
-        for (chunk, style_id) in snapshot.highlighted_chunks_for_rows(rows) {
-            let style_name = style_id.name(theme);
+        for chunk in snapshot.highlighted_chunks_for_rows(rows) {
+            let style_name = chunk.highlight_id.name(theme);
             if let Some((last_chunk, last_style_name)) = chunks.last_mut() {
                 if style_name == *last_style_name {
-                    last_chunk.push_str(chunk);
+                    last_chunk.push_str(chunk.text);
                 } else {
-                    chunks.push((chunk.to_string(), style_name));
+                    chunks.push((chunk.text.to_string(), style_name));
                 }
             } else {
-                chunks.push((chunk.to_string(), style_name));
+                chunks.push((chunk.text.to_string(), style_name));
             }
         }
         chunks

crates/editor/src/display_map/fold_map.rs 🔗

@@ -1,5 +1,8 @@
 use gpui::{AppContext, ModelHandle};
-use language::{Anchor, AnchorRangeExt, Buffer, HighlightId, Point, TextSummary, ToOffset};
+use language::{
+    Anchor, AnchorRangeExt, Buffer, HighlightId, HighlightedChunk, Point, PointUtf16, TextSummary,
+    ToOffset,
+};
 use parking_lot::Mutex;
 use std::{
     cmp::{self, Ordering},
@@ -110,9 +113,8 @@ impl<'a> FoldMapWriter<'a> {
                 let fold = Fold(buffer.anchor_after(range.start)..buffer.anchor_before(range.end));
                 folds.push(fold);
                 edits.push(buffer::Edit {
-                    old_bytes: range.clone(),
-                    new_bytes: range.clone(),
-                    ..Default::default()
+                    old: range.clone(),
+                    new: range,
                 });
             }
         }
@@ -155,9 +157,8 @@ impl<'a> FoldMapWriter<'a> {
             while let Some(fold) = folds_cursor.item() {
                 let offset_range = fold.0.start.to_offset(&buffer)..fold.0.end.to_offset(&buffer);
                 edits.push(buffer::Edit {
-                    old_bytes: offset_range.clone(),
-                    new_bytes: offset_range,
-                    ..Default::default()
+                    old: offset_range.clone(),
+                    new: offset_range,
                 });
                 fold_ixs_to_delete.push(*folds_cursor.start());
                 folds_cursor.next(&buffer);
@@ -202,6 +203,7 @@ pub struct FoldMap {
 struct SyncState {
     version: clock::Global,
     parse_count: usize,
+    diagnostics_update_count: usize,
 }
 
 impl FoldMap {
@@ -223,6 +225,7 @@ impl FoldMap {
             last_sync: Mutex::new(SyncState {
                 version: buffer.version(),
                 parse_count: buffer.parse_count(),
+                diagnostics_update_count: buffer.diagnostics_update_count(),
             }),
             version: AtomicUsize::new(0),
         };
@@ -254,14 +257,17 @@ impl FoldMap {
             SyncState {
                 version: buffer.version(),
                 parse_count: buffer.parse_count(),
+                diagnostics_update_count: buffer.diagnostics_update_count(),
             },
         );
         let edits = buffer
-            .edits_since(last_sync.version)
+            .edits_since(&last_sync.version)
             .map(Into::into)
             .collect::<Vec<_>>();
         if edits.is_empty() {
-            if last_sync.parse_count != buffer.parse_count() {
+            if last_sync.parse_count != buffer.parse_count()
+                || last_sync.diagnostics_update_count != buffer.diagnostics_update_count()
+            {
                 self.version.fetch_add(1, SeqCst);
             }
             Vec::new()
@@ -281,7 +287,11 @@ impl FoldMap {
         }
     }
 
-    fn apply_edits(&self, buffer_edits: Vec<buffer::Edit>, cx: &AppContext) -> Vec<FoldEdit> {
+    fn apply_edits(
+        &self,
+        buffer_edits: Vec<buffer::Edit<usize>>,
+        cx: &AppContext,
+    ) -> Vec<FoldEdit> {
         let buffer = self.buffer.read(cx).snapshot();
         let mut buffer_edits_iter = buffer_edits.iter().cloned().peekable();
 
@@ -291,28 +301,28 @@ impl FoldMap {
         cursor.seek(&0, Bias::Right, &());
 
         while let Some(mut edit) = buffer_edits_iter.next() {
-            new_transforms.push_tree(cursor.slice(&edit.old_bytes.start, Bias::Left, &()), &());
-            edit.new_bytes.start -= edit.old_bytes.start - cursor.start();
-            edit.old_bytes.start = *cursor.start();
+            new_transforms.push_tree(cursor.slice(&edit.old.start, Bias::Left, &()), &());
+            edit.new.start -= edit.old.start - cursor.start();
+            edit.old.start = *cursor.start();
 
-            cursor.seek(&edit.old_bytes.end, Bias::Right, &());
+            cursor.seek(&edit.old.end, Bias::Right, &());
             cursor.next(&());
 
-            let mut delta = edit.delta();
+            let mut delta = edit.new.len() as isize - edit.old.len() as isize;
             loop {
-                edit.old_bytes.end = *cursor.start();
+                edit.old.end = *cursor.start();
 
                 if let Some(next_edit) = buffer_edits_iter.peek() {
-                    if next_edit.old_bytes.start > edit.old_bytes.end {
+                    if next_edit.old.start > edit.old.end {
                         break;
                     }
 
                     let next_edit = buffer_edits_iter.next().unwrap();
-                    delta += next_edit.delta();
+                    delta += next_edit.new.len() as isize - next_edit.old.len() as isize;
 
-                    if next_edit.old_bytes.end >= edit.old_bytes.end {
-                        edit.old_bytes.end = next_edit.old_bytes.end;
-                        cursor.seek(&edit.old_bytes.end, Bias::Right, &());
+                    if next_edit.old.end >= edit.old.end {
+                        edit.old.end = next_edit.old.end;
+                        cursor.seek(&edit.old.end, Bias::Right, &());
                         cursor.next(&());
                     }
                 } else {
@@ -320,10 +330,9 @@ impl FoldMap {
                 }
             }
 
-            edit.new_bytes.end =
-                ((edit.new_bytes.start + edit.deleted_bytes()) as isize + delta) as usize;
+            edit.new.end = ((edit.new.start + edit.old.len()) as isize + delta) as usize;
 
-            let anchor = buffer.anchor_before(edit.new_bytes.start);
+            let anchor = buffer.anchor_before(edit.new.start);
             let mut folds_cursor = self.folds.cursor::<Fold>();
             folds_cursor.seek(&Fold(anchor..Anchor::max()), Bias::Left, &buffer);
 
@@ -339,10 +348,7 @@ impl FoldMap {
             })
             .peekable();
 
-            while folds
-                .peek()
-                .map_or(false, |fold| fold.start < edit.new_bytes.end)
-            {
+            while folds.peek().map_or(false, |fold| fold.start < edit.new.end) {
                 let mut fold = folds.next().unwrap();
                 let sum = new_transforms.summary();
 
@@ -375,13 +381,15 @@ impl FoldMap {
                 if fold.end > fold.start {
                     let output_text = "…";
                     let chars = output_text.chars().count() as u32;
-                    let lines = super::Point::new(0, output_text.len() as u32);
+                    let lines = Point::new(0, output_text.len() as u32);
+                    let lines_utf16 = PointUtf16::new(0, output_text.encode_utf16().count() as u32);
                     new_transforms.push(
                         Transform {
                             summary: TransformSummary {
                                 output: TextSummary {
                                     bytes: output_text.len(),
                                     lines,
+                                    lines_utf16,
                                     first_line_chars: chars,
                                     last_line_chars: chars,
                                     longest_row: 0,
@@ -397,9 +405,8 @@ impl FoldMap {
             }
 
             let sum = new_transforms.summary();
-            if sum.input.bytes < edit.new_bytes.end {
-                let text_summary =
-                    buffer.text_summary_for_range(sum.input.bytes..edit.new_bytes.end);
+            if sum.input.bytes < edit.new.end {
+                let text_summary = buffer.text_summary_for_range(sum.input.bytes..edit.new.end);
                 new_transforms.push(
                     Transform {
                         summary: TransformSummary {
@@ -436,35 +443,35 @@ impl FoldMap {
             let mut new_transforms = new_transforms.cursor::<(usize, FoldOffset)>();
 
             for mut edit in buffer_edits {
-                old_transforms.seek(&edit.old_bytes.start, Bias::Left, &());
+                old_transforms.seek(&edit.old.start, Bias::Left, &());
                 if old_transforms.item().map_or(false, |t| t.is_fold()) {
-                    edit.old_bytes.start = old_transforms.start().0;
+                    edit.old.start = old_transforms.start().0;
                 }
                 let old_start =
-                    old_transforms.start().1 .0 + (edit.old_bytes.start - old_transforms.start().0);
+                    old_transforms.start().1 .0 + (edit.old.start - old_transforms.start().0);
 
-                old_transforms.seek_forward(&edit.old_bytes.end, Bias::Right, &());
+                old_transforms.seek_forward(&edit.old.end, Bias::Right, &());
                 if old_transforms.item().map_or(false, |t| t.is_fold()) {
                     old_transforms.next(&());
-                    edit.old_bytes.end = old_transforms.start().0;
+                    edit.old.end = old_transforms.start().0;
                 }
                 let old_end =
-                    old_transforms.start().1 .0 + (edit.old_bytes.end - old_transforms.start().0);
+                    old_transforms.start().1 .0 + (edit.old.end - old_transforms.start().0);
 
-                new_transforms.seek(&edit.new_bytes.start, Bias::Left, &());
+                new_transforms.seek(&edit.new.start, Bias::Left, &());
                 if new_transforms.item().map_or(false, |t| t.is_fold()) {
-                    edit.new_bytes.start = new_transforms.start().0;
+                    edit.new.start = new_transforms.start().0;
                 }
                 let new_start =
-                    new_transforms.start().1 .0 + (edit.new_bytes.start - new_transforms.start().0);
+                    new_transforms.start().1 .0 + (edit.new.start - new_transforms.start().0);
 
-                new_transforms.seek_forward(&edit.new_bytes.end, Bias::Right, &());
+                new_transforms.seek_forward(&edit.new.end, Bias::Right, &());
                 if new_transforms.item().map_or(false, |t| t.is_fold()) {
                     new_transforms.next(&());
-                    edit.new_bytes.end = new_transforms.start().0;
+                    edit.new.end = new_transforms.start().0;
                 }
                 let new_end =
-                    new_transforms.start().1 .0 + (edit.new_bytes.end - new_transforms.start().0);
+                    new_transforms.start().1 .0 + (edit.new.end - new_transforms.start().0);
 
                 fold_edits.push(FoldEdit {
                     old_bytes: FoldOffset(old_start)..FoldOffset(old_end),
@@ -720,7 +727,7 @@ fn intersecting_folds<'a, T>(
     folds: &'a SumTree<Fold>,
     range: Range<T>,
     inclusive: bool,
-) -> FilterCursor<'a, impl 'a + Fn(&FoldSummary) -> bool, Fold, usize>
+) -> FilterCursor<'a, impl 'a + FnMut(&FoldSummary) -> bool, Fold, usize>
 where
     T: ToOffset,
 {
@@ -741,22 +748,22 @@ where
     )
 }
 
-fn consolidate_buffer_edits(edits: &mut Vec<buffer::Edit>) {
+fn consolidate_buffer_edits(edits: &mut Vec<buffer::Edit<usize>>) {
     edits.sort_unstable_by(|a, b| {
-        a.old_bytes
+        a.old
             .start
-            .cmp(&b.old_bytes.start)
-            .then_with(|| b.old_bytes.end.cmp(&a.old_bytes.end))
+            .cmp(&b.old.start)
+            .then_with(|| b.old.end.cmp(&a.old.end))
     });
 
     let mut i = 1;
     while i < edits.len() {
         let edit = edits[i].clone();
         let prev_edit = &mut edits[i - 1];
-        if prev_edit.old_bytes.end >= edit.old_bytes.start {
-            prev_edit.old_bytes.end = prev_edit.old_bytes.end.max(edit.old_bytes.end);
-            prev_edit.new_bytes.start = prev_edit.new_bytes.start.min(edit.new_bytes.start);
-            prev_edit.new_bytes.end = prev_edit.new_bytes.end.max(edit.new_bytes.end);
+        if prev_edit.old.end >= edit.old.start {
+            prev_edit.old.end = prev_edit.old.end.max(edit.old.end);
+            prev_edit.new.start = prev_edit.new.start.min(edit.new.start);
+            prev_edit.new.end = prev_edit.new.end.max(edit.new.end);
             edits.remove(i);
             continue;
         }
@@ -995,12 +1002,12 @@ impl<'a> Iterator for Chunks<'a> {
 pub struct HighlightedChunks<'a> {
     transform_cursor: Cursor<'a, Transform, (FoldOffset, usize)>,
     buffer_chunks: language::HighlightedChunks<'a>,
-    buffer_chunk: Option<(usize, &'a str, HighlightId)>,
+    buffer_chunk: Option<(usize, HighlightedChunk<'a>)>,
     buffer_offset: usize,
 }
 
 impl<'a> Iterator for HighlightedChunks<'a> {
-    type Item = (&'a str, HighlightId);
+    type Item = HighlightedChunk<'a>;
 
     fn next(&mut self) -> Option<Self::Item> {
         let transform = if let Some(item) = self.transform_cursor.item() {
@@ -1022,34 +1029,35 @@ impl<'a> Iterator for HighlightedChunks<'a> {
                 self.transform_cursor.next(&());
             }
 
-            return Some((output_text, HighlightId::default()));
+            return Some(HighlightedChunk {
+                text: output_text,
+                highlight_id: HighlightId::default(),
+                diagnostic: None,
+            });
         }
 
         // Retrieve a chunk from the current location in the buffer.
         if self.buffer_chunk.is_none() {
             let chunk_offset = self.buffer_chunks.offset();
-            self.buffer_chunk = self
-                .buffer_chunks
-                .next()
-                .map(|(chunk, capture_ix)| (chunk_offset, chunk, capture_ix));
+            self.buffer_chunk = self.buffer_chunks.next().map(|chunk| (chunk_offset, chunk));
         }
 
         // Otherwise, take a chunk from the buffer's text.
-        if let Some((chunk_offset, mut chunk, capture_ix)) = self.buffer_chunk {
+        if let Some((chunk_offset, mut chunk)) = self.buffer_chunk {
             let offset_in_chunk = self.buffer_offset - chunk_offset;
-            chunk = &chunk[offset_in_chunk..];
+            chunk.text = &chunk.text[offset_in_chunk..];
 
             // Truncate the chunk so that it ends at the next fold.
             let region_end = self.transform_cursor.end(&()).1 - self.buffer_offset;
-            if chunk.len() >= region_end {
-                chunk = &chunk[0..region_end];
+            if chunk.text.len() >= region_end {
+                chunk.text = &chunk.text[0..region_end];
                 self.transform_cursor.next(&());
             } else {
                 self.buffer_chunk.take();
             }
 
-            self.buffer_offset += chunk.len();
-            return Some((chunk, capture_ix));
+            self.buffer_offset += chunk.text.len();
+            return Some(chunk);
         }
 
         None
@@ -1335,7 +1343,9 @@ mod tests {
                         let start_version = buffer.version.clone();
                         let edit_count = rng.gen_range(1..=5);
                         buffer.randomly_edit(&mut rng, edit_count);
-                        buffer.edits_since(start_version).collect::<Vec<_>>()
+                        buffer
+                            .edits_since::<Point>(&start_version)
+                            .collect::<Vec<_>>()
                     });
                     log::info!("editing {:?}", edits);
                 }

crates/editor/src/display_map/tab_map.rs 🔗

@@ -1,5 +1,5 @@
 use super::fold_map::{self, FoldEdit, FoldPoint, Snapshot as FoldSnapshot};
-use language::{rope, HighlightId};
+use language::{rope, HighlightedChunk};
 use parking_lot::Mutex;
 use std::{mem, ops::Range};
 use sum_tree::Bias;
@@ -173,9 +173,11 @@ impl Snapshot {
                 .highlighted_chunks(input_start..input_end),
             column: expanded_char_column,
             tab_size: self.tab_size,
-            chunk: &SPACES[0..to_next_stop],
+            chunk: HighlightedChunk {
+                text: &SPACES[0..to_next_stop],
+                ..Default::default()
+            },
             skip_leading_tab: to_next_stop > 0,
-            style_id: Default::default(),
         }
     }
 
@@ -415,23 +417,21 @@ impl<'a> Iterator for Chunks<'a> {
 
 pub struct HighlightedChunks<'a> {
     fold_chunks: fold_map::HighlightedChunks<'a>,
-    chunk: &'a str,
-    style_id: HighlightId,
+    chunk: HighlightedChunk<'a>,
     column: usize,
     tab_size: usize,
     skip_leading_tab: bool,
 }
 
 impl<'a> Iterator for HighlightedChunks<'a> {
-    type Item = (&'a str, HighlightId);
+    type Item = HighlightedChunk<'a>;
 
     fn next(&mut self) -> Option<Self::Item> {
-        if self.chunk.is_empty() {
-            if let Some((chunk, style_id)) = self.fold_chunks.next() {
+        if self.chunk.text.is_empty() {
+            if let Some(chunk) = self.fold_chunks.next() {
                 self.chunk = chunk;
-                self.style_id = style_id;
                 if self.skip_leading_tab {
-                    self.chunk = &self.chunk[1..];
+                    self.chunk.text = &self.chunk.text[1..];
                     self.skip_leading_tab = false;
                 }
             } else {
@@ -439,18 +439,24 @@ impl<'a> Iterator for HighlightedChunks<'a> {
             }
         }
 
-        for (ix, c) in self.chunk.char_indices() {
+        for (ix, c) in self.chunk.text.char_indices() {
             match c {
                 '\t' => {
                     if ix > 0 {
-                        let (prefix, suffix) = self.chunk.split_at(ix);
-                        self.chunk = suffix;
-                        return Some((prefix, self.style_id));
+                        let (prefix, suffix) = self.chunk.text.split_at(ix);
+                        self.chunk.text = suffix;
+                        return Some(HighlightedChunk {
+                            text: prefix,
+                            ..self.chunk
+                        });
                     } else {
-                        self.chunk = &self.chunk[1..];
+                        self.chunk.text = &self.chunk.text[1..];
                         let len = self.tab_size - self.column % self.tab_size;
                         self.column += len;
-                        return Some((&SPACES[0..len], self.style_id));
+                        return Some(HighlightedChunk {
+                            text: &SPACES[0..len],
+                            ..self.chunk
+                        });
                     }
                 }
                 '\n' => self.column = 0,
@@ -458,7 +464,7 @@ impl<'a> Iterator for HighlightedChunks<'a> {
             }
         }
 
-        Some((mem::take(&mut self.chunk), mem::take(&mut self.style_id)))
+        Some(mem::take(&mut self.chunk))
     }
 }
 

crates/editor/src/display_map/wrap_map.rs 🔗

@@ -3,7 +3,7 @@ use super::{
     tab_map::{self, Edit as TabEdit, Snapshot as TabSnapshot, TabPoint, TextSummary},
 };
 use gpui::{fonts::FontId, text_layout::LineWrapper, Entity, ModelContext, Task};
-use language::{HighlightId, Point};
+use language::{HighlightedChunk, Point};
 use lazy_static::lazy_static;
 use smol::future::yield_now;
 use std::{collections::VecDeque, ops::Range, time::Duration};
@@ -52,8 +52,7 @@ pub struct Chunks<'a> {
 
 pub struct HighlightedChunks<'a> {
     input_chunks: tab_map::HighlightedChunks<'a>,
-    input_chunk: &'a str,
-    style_id: HighlightId,
+    input_chunk: HighlightedChunk<'a>,
     output_position: WrapPoint,
     max_output_row: u32,
     transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>,
@@ -490,8 +489,7 @@ impl Snapshot {
             .min(self.tab_snapshot.max_point());
         HighlightedChunks {
             input_chunks: self.tab_snapshot.highlighted_chunks(input_start..input_end),
-            input_chunk: "",
-            style_id: HighlightId::default(),
+            input_chunk: Default::default(),
             output_position: output_start,
             max_output_row: rows.end,
             transforms,
@@ -674,7 +672,7 @@ impl<'a> Iterator for Chunks<'a> {
 }
 
 impl<'a> Iterator for HighlightedChunks<'a> {
-    type Item = (&'a str, HighlightId);
+    type Item = HighlightedChunk<'a>;
 
     fn next(&mut self) -> Option<Self::Item> {
         if self.output_position.row() >= self.max_output_row {
@@ -699,18 +697,19 @@ impl<'a> Iterator for HighlightedChunks<'a> {
 
             self.output_position.0 += summary;
             self.transforms.next(&());
-            return Some((&display_text[start_ix..end_ix], self.style_id));
+            return Some(HighlightedChunk {
+                text: &display_text[start_ix..end_ix],
+                ..self.input_chunk
+            });
         }
 
-        if self.input_chunk.is_empty() {
-            let (chunk, style_id) = self.input_chunks.next().unwrap();
-            self.input_chunk = chunk;
-            self.style_id = style_id;
+        if self.input_chunk.text.is_empty() {
+            self.input_chunk = self.input_chunks.next().unwrap();
         }
 
         let mut input_len = 0;
         let transform_end = self.transforms.end(&()).0;
-        for c in self.input_chunk.chars() {
+        for c in self.input_chunk.text.chars() {
             let char_len = c.len_utf8();
             input_len += char_len;
             if c == '\n' {
@@ -726,9 +725,12 @@ impl<'a> Iterator for HighlightedChunks<'a> {
             }
         }
 
-        let (prefix, suffix) = self.input_chunk.split_at(input_len);
-        self.input_chunk = suffix;
-        Some((prefix, self.style_id))
+        let (prefix, suffix) = self.input_chunk.text.split_at(input_len);
+        self.input_chunk.text = suffix;
+        Some(HighlightedChunk {
+            text: prefix,
+            ..self.input_chunk
+        })
     }
 }
 
@@ -1090,7 +1092,7 @@ mod tests {
 
                 let actual_text = self
                     .highlighted_chunks_for_rows(start_row..end_row)
-                    .map(|c| c.0)
+                    .map(|c| c.text)
                     .collect::<String>();
                 assert_eq!(
                     expected_text,

crates/editor/src/element.rs 🔗

@@ -17,7 +17,7 @@ use gpui::{
     MutableAppContext, PaintContext, Quad, Scene, SizeConstraint, ViewContext, WeakViewHandle,
 };
 use json::json;
-use language::HighlightId;
+use language::{DiagnosticSeverity, HighlightedChunk};
 use smallvec::SmallVec;
 use std::{
     cmp::{self, Ordering},
@@ -394,7 +394,7 @@ impl EditorElement {
                     RunStyle {
                         font_id: style.text.font_id,
                         color: Color::black(),
-                        underline: false,
+                        underline: None,
                     },
                 )],
             )
@@ -435,7 +435,7 @@ impl EditorElement {
                         RunStyle {
                             font_id: style.text.font_id,
                             color,
-                            underline: false,
+                            underline: None,
                         },
                     )],
                 )));
@@ -476,7 +476,7 @@ impl EditorElement {
                             RunStyle {
                                 font_id: placeholder_style.font_id,
                                 color: placeholder_style.color,
-                                underline: false,
+                                underline: None,
                             },
                         )],
                     )
@@ -495,8 +495,12 @@ impl EditorElement {
         let mut line_exceeded_max_len = false;
         let chunks = snapshot.highlighted_chunks_for_rows(rows.clone());
 
-        'outer: for (chunk, style_ix) in chunks.chain(Some(("\n", HighlightId::default()))) {
-            for (ix, mut line_chunk) in chunk.split('\n').enumerate() {
+        let newline_chunk = HighlightedChunk {
+            text: "\n",
+            ..Default::default()
+        };
+        'outer: for chunk in chunks.chain([newline_chunk]) {
+            for (ix, mut line_chunk) in chunk.text.split('\n').enumerate() {
                 if ix > 0 {
                     layouts.push(cx.text_layout_cache.layout_str(
                         &line,
@@ -513,7 +517,8 @@ impl EditorElement {
                 }
 
                 if !line_chunk.is_empty() && !line_exceeded_max_len {
-                    let highlight_style = style_ix
+                    let highlight_style = chunk
+                        .highlight_id
                         .style(&style.syntax)
                         .unwrap_or(style.text.clone().into());
                     // Avoid a lookup if the font properties match the previous ones.
@@ -537,13 +542,25 @@ impl EditorElement {
                         line_exceeded_max_len = true;
                     }
 
+                    let underline = if let Some(severity) = chunk.diagnostic {
+                        match severity {
+                            DiagnosticSeverity::ERROR => Some(style.error_underline),
+                            DiagnosticSeverity::WARNING => Some(style.warning_underline),
+                            DiagnosticSeverity::INFORMATION => Some(style.information_underline),
+                            DiagnosticSeverity::HINT => Some(style.hint_underline),
+                            _ => highlight_style.underline,
+                        }
+                    } else {
+                        highlight_style.underline
+                    };
+
                     line.push_str(line_chunk);
                     styles.push((
                         line_chunk.len(),
                         RunStyle {
                             font_id,
                             color: highlight_style.color,
-                            underline: highlight_style.underline,
+                            underline,
                         },
                     ));
                     prev_font_id = font_id;
@@ -859,7 +876,7 @@ impl LayoutState {
                 RunStyle {
                     font_id: self.style.text.font_id,
                     color: Color::black(),
-                    underline: false,
+                    underline: None,
                 },
             )],
         )

crates/editor/src/lib.rs 🔗

@@ -1527,10 +1527,12 @@ impl Editor {
 
     pub fn undo(&mut self, _: &Undo, cx: &mut ViewContext<Self>) {
         self.buffer.update(cx, |buffer, cx| buffer.undo(cx));
+        self.request_autoscroll(cx);
     }
 
     pub fn redo(&mut self, _: &Redo, cx: &mut ViewContext<Self>) {
         self.buffer.update(cx, |buffer, cx| buffer.redo(cx));
+        self.request_autoscroll(cx);
     }
 
     pub fn move_left(&mut self, _: &MoveLeft, cx: &mut ViewContext<Self>) {
@@ -2344,10 +2346,8 @@ impl Editor {
         }
 
         if autoscroll {
-            self.autoscroll_requested = true;
-            cx.notify();
+            self.request_autoscroll(cx);
         }
-
         self.pause_cursor_blinking(cx);
 
         self.buffer.update(cx, |buffer, cx| {
@@ -2357,6 +2357,11 @@ impl Editor {
         });
     }
 
+    fn request_autoscroll(&mut self, cx: &mut ViewContext<Self>) {
+        self.autoscroll_requested = true;
+        cx.notify();
+    }
+
     fn start_transaction(&self, cx: &mut ViewContext<Self>) {
         self.buffer.update(cx, |buffer, _| {
             buffer
@@ -2682,7 +2687,7 @@ impl EditorSettings {
                         font_size: 14.,
                         color: gpui::color::Color::from_u32(0xff0000ff),
                         font_properties,
-                        underline: false,
+                        underline: None,
                     },
                     placeholder_text: None,
                     background: Default::default(),
@@ -2693,6 +2698,10 @@ impl EditorSettings {
                     selection: Default::default(),
                     guest_selections: Default::default(),
                     syntax: Default::default(),
+                    error_underline: Default::default(),
+                    warning_underline: Default::default(),
+                    information_underline: Default::default(),
+                    hint_underline: Default::default(),
                 }
             },
         }
@@ -2822,7 +2831,7 @@ impl SelectionExt for Selection<Point> {
 mod tests {
     use super::*;
     use crate::test::sample_text;
-    use buffer::{History, Point};
+    use buffer::Point;
     use unindent::Unindent;
 
     #[gpui::test]
@@ -4325,10 +4334,10 @@ mod tests {
     #[gpui::test]
     async fn test_select_larger_smaller_syntax_node(mut cx: gpui::TestAppContext) {
         let settings = cx.read(EditorSettings::test);
-        let language = Arc::new(Language::new(
+        let language = Some(Arc::new(Language::new(
             LanguageConfig::default(),
             tree_sitter_rust::language(),
-        ));
+        )));
 
         let text = r#"
             use mod1::mod2::{mod3, mod4};
@@ -4339,10 +4348,7 @@ mod tests {
         "#
         .unindent();
 
-        let buffer = cx.add_model(|cx| {
-            let history = History::new(text.into());
-            Buffer::from_history(0, history, None, Some(language), cx)
-        });
+        let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, None, cx));
         let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx));
         view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing())
             .await;
@@ -4469,7 +4475,7 @@ mod tests {
     #[gpui::test]
     async fn test_autoclose_pairs(mut cx: gpui::TestAppContext) {
         let settings = cx.read(EditorSettings::test);
-        let language = Arc::new(Language::new(
+        let language = Some(Arc::new(Language::new(
             LanguageConfig {
                 brackets: vec![
                     BracketPair {
@@ -4488,7 +4494,7 @@ mod tests {
                 ..Default::default()
             },
             tree_sitter_rust::language(),
-        ));
+        )));
 
         let text = r#"
             a
@@ -4498,10 +4504,7 @@ mod tests {
         "#
         .unindent();
 
-        let buffer = cx.add_model(|cx| {
-            let history = History::new(text.into());
-            Buffer::from_history(0, history, None, Some(language), cx)
-        });
+        let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, None, cx));
         let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx));
         view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing())
             .await;
@@ -4584,7 +4587,7 @@ mod tests {
     #[gpui::test]
     async fn test_extra_newline_insertion(mut cx: gpui::TestAppContext) {
         let settings = cx.read(EditorSettings::test);
-        let language = Arc::new(Language::new(
+        let language = Some(Arc::new(Language::new(
             LanguageConfig {
                 brackets: vec![
                     BracketPair {
@@ -4603,7 +4606,7 @@ mod tests {
                 ..Default::default()
             },
             tree_sitter_rust::language(),
-        ));
+        )));
 
         let text = concat!(
             "{   }\n",     // Suppress rustfmt
@@ -4613,10 +4616,7 @@ mod tests {
             "{{} }\n",     //
         );
 
-        let buffer = cx.add_model(|cx| {
-            let history = History::new(text.into());
-            Buffer::from_history(0, history, None, Some(language), cx)
-        });
+        let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, None, cx));
         let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx));
         view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing())
             .await;

crates/gpui/Cargo.toml 🔗

@@ -15,6 +15,7 @@ backtrace = "0.3"
 ctor = "0.1"
 env_logger = { version = "0.8", optional = true }
 etagere = "0.2"
+futures = "0.3"
 image = "0.23"
 lazy_static = "1.4.0"
 log = "0.4"

crates/gpui/examples/text.rs 🔗

@@ -62,7 +62,7 @@ impl gpui::Element for TextElement {
                 .select_font(family, &Default::default())
                 .unwrap(),
             color: Color::default(),
-            underline: false,
+            underline: None,
         };
         let bold = RunStyle {
             font_id: cx
@@ -76,7 +76,7 @@ impl gpui::Element for TextElement {
                 )
                 .unwrap(),
             color: Color::default(),
-            underline: false,
+            underline: None,
         };
 
         let text = "Hello world!";

crates/gpui/src/app.rs 🔗

@@ -23,6 +23,7 @@ use std::{
     mem,
     ops::{Deref, DerefMut},
     path::{Path, PathBuf},
+    pin::Pin,
     rc::{self, Rc},
     sync::{
         atomic::{AtomicUsize, Ordering::SeqCst},
@@ -35,6 +36,12 @@ pub trait Entity: 'static {
     type Event;
 
     fn release(&mut self, _: &mut MutableAppContext) {}
+    fn app_will_quit(
+        &mut self,
+        _: &mut MutableAppContext,
+    ) -> Option<Pin<Box<dyn 'static + Future<Output = ()>>>> {
+        None
+    }
 }
 
 pub trait View: Entity + Sized {
@@ -198,8 +205,6 @@ pub struct App(Rc<RefCell<MutableAppContext>>);
 #[derive(Clone)]
 pub struct AsyncAppContext(Rc<RefCell<MutableAppContext>>);
 
-pub struct BackgroundAppContext(*const RefCell<MutableAppContext>);
-
 #[derive(Clone)]
 pub struct TestAppContext {
     cx: Rc<RefCell<MutableAppContext>>,
@@ -220,20 +225,29 @@ impl App {
             asset_source,
         ))));
 
-        let cx = app.0.clone();
-        foreground_platform.on_menu_command(Box::new(move |action| {
-            let mut cx = cx.borrow_mut();
-            if let Some(key_window_id) = cx.cx.platform.key_window_id() {
-                if let Some((presenter, _)) = cx.presenters_and_platform_windows.get(&key_window_id)
-                {
-                    let presenter = presenter.clone();
-                    let path = presenter.borrow().dispatch_path(cx.as_ref());
-                    cx.dispatch_action_any(key_window_id, &path, action);
+        foreground_platform.on_quit(Box::new({
+            let cx = app.0.clone();
+            move || {
+                cx.borrow_mut().quit();
+            }
+        }));
+        foreground_platform.on_menu_command(Box::new({
+            let cx = app.0.clone();
+            move |action| {
+                let mut cx = cx.borrow_mut();
+                if let Some(key_window_id) = cx.cx.platform.key_window_id() {
+                    if let Some((presenter, _)) =
+                        cx.presenters_and_platform_windows.get(&key_window_id)
+                    {
+                        let presenter = presenter.clone();
+                        let path = presenter.borrow().dispatch_path(cx.as_ref());
+                        cx.dispatch_action_any(key_window_id, &path, action);
+                    } else {
+                        cx.dispatch_global_action_any(action);
+                    }
                 } else {
                     cx.dispatch_global_action_any(action);
                 }
-            } else {
-                cx.dispatch_global_action_any(action);
             }
         }));
 
@@ -265,6 +279,18 @@ impl App {
         self
     }
 
+    pub fn on_quit<F>(self, mut callback: F) -> Self
+    where
+        F: 'static + FnMut(&mut MutableAppContext),
+    {
+        let cx = self.0.clone();
+        self.0
+            .borrow_mut()
+            .foreground_platform
+            .on_quit(Box::new(move || callback(&mut *cx.borrow_mut())));
+        self
+    }
+
     pub fn on_event<F>(self, mut callback: F) -> Self
     where
         F: 'static + FnMut(Event, &mut MutableAppContext) -> bool,
@@ -739,6 +765,39 @@ impl MutableAppContext {
         App(self.weak_self.as_ref().unwrap().upgrade().unwrap())
     }
 
+    pub fn quit(&mut self) {
+        let mut futures = Vec::new();
+        for model_id in self.cx.models.keys().copied().collect::<Vec<_>>() {
+            let mut model = self.cx.models.remove(&model_id).unwrap();
+            futures.extend(model.app_will_quit(self));
+            self.cx.models.insert(model_id, model);
+        }
+
+        for view_id in self.cx.views.keys().copied().collect::<Vec<_>>() {
+            let mut view = self.cx.views.remove(&view_id).unwrap();
+            futures.extend(view.app_will_quit(self));
+            self.cx.views.insert(view_id, view);
+        }
+
+        self.remove_all_windows();
+
+        let futures = futures::future::join_all(futures);
+        if self
+            .background
+            .block_with_timeout(Duration::from_millis(100), futures)
+            .is_err()
+        {
+            log::error!("timed out waiting on app_will_quit");
+        }
+    }
+
+    fn remove_all_windows(&mut self) {
+        for (window_id, _) in self.cx.windows.drain() {
+            self.presenters_and_platform_windows.remove(&window_id);
+        }
+        self.remove_dropped_entities();
+    }
+
     pub fn platform(&self) -> Arc<dyn platform::Platform> {
         self.cx.platform.clone()
     }
@@ -1879,6 +1938,10 @@ pub trait AnyModel {
     fn as_any(&self) -> &dyn Any;
     fn as_any_mut(&mut self) -> &mut dyn Any;
     fn release(&mut self, cx: &mut MutableAppContext);
+    fn app_will_quit(
+        &mut self,
+        cx: &mut MutableAppContext,
+    ) -> Option<Pin<Box<dyn 'static + Future<Output = ()>>>>;
 }
 
 impl<T> AnyModel for T
@@ -1896,12 +1959,23 @@ where
     fn release(&mut self, cx: &mut MutableAppContext) {
         self.release(cx);
     }
+
+    fn app_will_quit(
+        &mut self,
+        cx: &mut MutableAppContext,
+    ) -> Option<Pin<Box<dyn 'static + Future<Output = ()>>>> {
+        self.app_will_quit(cx)
+    }
 }
 
 pub trait AnyView {
     fn as_any(&self) -> &dyn Any;
     fn as_any_mut(&mut self) -> &mut dyn Any;
     fn release(&mut self, cx: &mut MutableAppContext);
+    fn app_will_quit(
+        &mut self,
+        cx: &mut MutableAppContext,
+    ) -> Option<Pin<Box<dyn 'static + Future<Output = ()>>>>;
     fn ui_name(&self) -> &'static str;
     fn render<'a>(
         &mut self,
@@ -1932,6 +2006,13 @@ where
         self.release(cx);
     }
 
+    fn app_will_quit(
+        &mut self,
+        cx: &mut MutableAppContext,
+    ) -> Option<Pin<Box<dyn 'static + Future<Output = ()>>>> {
+        self.app_will_quit(cx)
+    }
+
     fn ui_name(&self) -> &'static str {
         T::ui_name()
     }

crates/gpui/src/elements/label.rs 🔗

@@ -207,7 +207,7 @@ mod tests {
             "Menlo",
             12.,
             Default::default(),
-            false,
+            None,
             Color::black(),
             cx.font_cache(),
         )
@@ -216,7 +216,7 @@ mod tests {
             "Menlo",
             12.,
             *FontProperties::new().weight(Weight::BOLD),
-            false,
+            None,
             Color::new(255, 0, 0, 255),
             cx.font_cache(),
         )

crates/gpui/src/executor.rs 🔗

@@ -38,7 +38,9 @@ pub enum Foreground {
 }
 
 pub enum Background {
-    Deterministic(Arc<Deterministic>),
+    Deterministic {
+        executor: Arc<Deterministic>,
+    },
     Production {
         executor: Arc<smol::Executor<'static>>,
         _stop: channel::Sender<()>,
@@ -50,6 +52,7 @@ type AnyFuture = Pin<Box<dyn 'static + Send + Future<Output = Box<dyn Any + Send
 type AnyTask = async_task::Task<Box<dyn Any + Send + 'static>>;
 type AnyLocalTask = async_task::Task<Box<dyn Any + 'static>>;
 
+#[must_use]
 pub enum Task<T> {
     Local {
         any_task: AnyLocalTask,
@@ -515,7 +518,7 @@ impl Background {
         let future = any_future(future);
         let any_task = match self {
             Self::Production { executor, .. } => executor.spawn(future),
-            Self::Deterministic(executor) => executor.spawn(future),
+            Self::Deterministic { executor, .. } => executor.spawn(future),
         };
         Task::send(any_task)
     }
@@ -533,7 +536,7 @@ impl Background {
         if !timeout.is_zero() {
             let output = match self {
                 Self::Production { .. } => smol::block_on(util::timeout(timeout, &mut future)).ok(),
-                Self::Deterministic(executor) => executor.block_on(&mut future),
+                Self::Deterministic { executor, .. } => executor.block_on(&mut future),
             };
             if let Some(output) = output {
                 return Ok(*output.downcast().unwrap());
@@ -586,7 +589,7 @@ pub fn deterministic(seed: u64) -> (Rc<Foreground>, Arc<Background>) {
     let executor = Arc::new(Deterministic::new(seed));
     (
         Rc::new(Foreground::Deterministic(executor.clone())),
-        Arc::new(Background::Deterministic(executor)),
+        Arc::new(Background::Deterministic { executor }),
     )
 }
 

crates/gpui/src/fonts.rs 🔗

@@ -27,14 +27,14 @@ pub struct TextStyle {
     pub font_id: FontId,
     pub font_size: f32,
     pub font_properties: Properties,
-    pub underline: bool,
+    pub underline: Option<Color>,
 }
 
 #[derive(Clone, Debug, Default)]
 pub struct HighlightStyle {
     pub color: Color,
     pub font_properties: Properties,
-    pub underline: bool,
+    pub underline: Option<Color>,
 }
 
 #[allow(non_camel_case_types)]
@@ -64,7 +64,7 @@ struct TextStyleJson {
     #[serde(default)]
     italic: bool,
     #[serde(default)]
-    underline: bool,
+    underline: UnderlineStyleJson,
 }
 
 #[derive(Deserialize)]
@@ -74,7 +74,14 @@ struct HighlightStyleJson {
     #[serde(default)]
     italic: bool,
     #[serde(default)]
-    underline: bool,
+    underline: UnderlineStyleJson,
+}
+
+#[derive(Deserialize)]
+#[serde(untagged)]
+enum UnderlineStyleJson {
+    Underlined(bool),
+    UnderlinedWithColor(Color),
 }
 
 impl TextStyle {
@@ -82,7 +89,7 @@ impl TextStyle {
         font_family_name: impl Into<Arc<str>>,
         font_size: f32,
         font_properties: Properties,
-        underline: bool,
+        underline: Option<Color>,
         color: Color,
         font_cache: &FontCache,
     ) -> anyhow::Result<Self> {
@@ -116,7 +123,7 @@ impl TextStyle {
                     json.family,
                     json.size,
                     font_properties,
-                    json.underline,
+                    underline_from_json(json.underline, json.color),
                     json.color,
                     font_cache,
                 )
@@ -167,6 +174,12 @@ impl From<TextStyle> for HighlightStyle {
     }
 }
 
+impl Default for UnderlineStyleJson {
+    fn default() -> Self {
+        Self::Underlined(false)
+    }
+}
+
 impl Default for TextStyle {
     fn default() -> Self {
         FONT_CACHE.with(|font_cache| {
@@ -199,7 +212,7 @@ impl HighlightStyle {
         Self {
             color: json.color,
             font_properties,
-            underline: json.underline,
+            underline: underline_from_json(json.underline, json.color),
         }
     }
 }
@@ -209,7 +222,7 @@ impl From<Color> for HighlightStyle {
         Self {
             color,
             font_properties: Default::default(),
-            underline: false,
+            underline: None,
         }
     }
 }
@@ -248,12 +261,20 @@ impl<'de> Deserialize<'de> for HighlightStyle {
             Ok(Self {
                 color: serde_json::from_value(json).map_err(de::Error::custom)?,
                 font_properties: Properties::new(),
-                underline: false,
+                underline: None,
             })
         }
     }
 }
 
+fn underline_from_json(json: UnderlineStyleJson, text_color: Color) -> Option<Color> {
+    match json {
+        UnderlineStyleJson::Underlined(false) => None,
+        UnderlineStyleJson::Underlined(true) => Some(text_color),
+        UnderlineStyleJson::UnderlinedWithColor(color) => Some(color),
+    }
+}
+
 fn properties_from_json(weight: Option<WeightJson>, italic: bool) -> Properties {
     let weight = match weight.unwrap_or(WeightJson::normal) {
         WeightJson::thin => Weight::THIN,

crates/gpui/src/platform.rs 🔗

@@ -53,11 +53,14 @@ pub trait Platform: Send + Sync {
     fn set_cursor_style(&self, style: CursorStyle);
 
     fn local_timezone(&self) -> UtcOffset;
+
+    fn path_for_resource(&self, name: Option<&str>, extension: Option<&str>) -> Result<PathBuf>;
 }
 
 pub(crate) trait ForegroundPlatform {
     fn on_become_active(&self, callback: Box<dyn FnMut()>);
     fn on_resign_active(&self, callback: Box<dyn FnMut()>);
+    fn on_quit(&self, callback: Box<dyn FnMut()>);
     fn on_event(&self, callback: Box<dyn FnMut(Event) -> bool>);
     fn on_open_files(&self, callback: Box<dyn FnMut(Vec<PathBuf>)>);
     fn run(&self, on_finish_launching: Box<dyn FnOnce() -> ()>);

crates/gpui/src/platform/mac/fonts.rs 🔗

@@ -417,21 +417,21 @@ mod tests {
         let menlo_regular = RunStyle {
             font_id: fonts.select_font(&menlo, &Properties::new()).unwrap(),
             color: Default::default(),
-            underline: false,
+            underline: None,
         };
         let menlo_italic = RunStyle {
             font_id: fonts
                 .select_font(&menlo, &Properties::new().style(Style::Italic))
                 .unwrap(),
             color: Default::default(),
-            underline: false,
+            underline: None,
         };
         let menlo_bold = RunStyle {
             font_id: fonts
                 .select_font(&menlo, &Properties::new().weight(Weight::BOLD))
                 .unwrap(),
             color: Default::default(),
-            underline: false,
+            underline: None,
         };
         assert_ne!(menlo_regular, menlo_italic);
         assert_ne!(menlo_regular, menlo_bold);
@@ -458,13 +458,13 @@ mod tests {
         let zapfino_regular = RunStyle {
             font_id: fonts.select_font(&zapfino, &Properties::new())?,
             color: Default::default(),
-            underline: false,
+            underline: None,
         };
         let menlo = fonts.load_family("Menlo")?;
         let menlo_regular = RunStyle {
             font_id: fonts.select_font(&menlo, &Properties::new())?,
             color: Default::default(),
-            underline: false,
+            underline: None,
         };
 
         let text = "This is, m𐍈re 𐍈r less, Zapfino!𐍈";
@@ -543,7 +543,7 @@ mod tests {
         let style = RunStyle {
             font_id: fonts.select_font(&font_ids, &Default::default()).unwrap(),
             color: Default::default(),
-            underline: false,
+            underline: None,
         };
 
         let line = "\u{feff}";

crates/gpui/src/platform/mac/platform.rs 🔗

@@ -14,7 +14,9 @@ use cocoa::{
         NSPasteboardTypeString, NSSavePanel, NSWindow,
     },
     base::{id, nil, selector, YES},
-    foundation::{NSArray, NSAutoreleasePool, NSData, NSInteger, NSString, NSURL},
+    foundation::{
+        NSArray, NSAutoreleasePool, NSBundle, NSData, NSInteger, NSString, NSUInteger, NSURL,
+    },
 };
 use core_foundation::{
     base::{CFType, CFTypeRef, OSStatus, TCFType as _},
@@ -45,6 +47,9 @@ use std::{
 };
 use time::UtcOffset;
 
+#[allow(non_upper_case_globals)]
+const NSUTF8StringEncoding: NSUInteger = 4;
+
 const MAC_PLATFORM_IVAR: &'static str = "platform";
 static mut APP_CLASS: *const Class = ptr::null();
 static mut APP_DELEGATE_CLASS: *const Class = ptr::null();
@@ -76,6 +81,10 @@ unsafe fn build_classes() {
             sel!(applicationDidResignActive:),
             did_resign_active as extern "C" fn(&mut Object, Sel, id),
         );
+        decl.add_method(
+            sel!(applicationWillTerminate:),
+            will_terminate as extern "C" fn(&mut Object, Sel, id),
+        );
         decl.add_method(
             sel!(handleGPUIMenuItem:),
             handle_menu_item as extern "C" fn(&mut Object, Sel, id),
@@ -95,6 +104,7 @@ pub struct MacForegroundPlatform(RefCell<MacForegroundPlatformState>);
 pub struct MacForegroundPlatformState {
     become_active: Option<Box<dyn FnMut()>>,
     resign_active: Option<Box<dyn FnMut()>>,
+    quit: Option<Box<dyn FnMut()>>,
     event: Option<Box<dyn FnMut(crate::Event) -> bool>>,
     menu_command: Option<Box<dyn FnMut(&dyn AnyAction)>>,
     open_files: Option<Box<dyn FnMut(Vec<PathBuf>)>>,
@@ -191,6 +201,10 @@ impl platform::ForegroundPlatform for MacForegroundPlatform {
         self.0.borrow_mut().resign_active = Some(callback);
     }
 
+    fn on_quit(&self, callback: Box<dyn FnMut()>) {
+        self.0.borrow_mut().quit = Some(callback);
+    }
+
     fn on_event(&self, callback: Box<dyn FnMut(crate::Event) -> bool>) {
         self.0.borrow_mut().event = Some(callback);
     }
@@ -588,6 +602,27 @@ impl platform::Platform for MacPlatform {
             UtcOffset::from_whole_seconds(seconds_from_gmt.try_into().unwrap()).unwrap()
         }
     }
+
+    fn path_for_resource(&self, name: Option<&str>, extension: Option<&str>) -> Result<PathBuf> {
+        unsafe {
+            let bundle: id = NSBundle::mainBundle();
+            if bundle.is_null() {
+                Err(anyhow!("app is not running inside a bundle"))
+            } else {
+                let name = name.map_or(nil, |name| ns_string(name));
+                let extension = extension.map_or(nil, |extension| ns_string(extension));
+                let path: id = msg_send![bundle, pathForResource: name ofType: extension];
+                if path.is_null() {
+                    Err(anyhow!("resource could not be found"))
+                } else {
+                    let len = msg_send![path, lengthOfBytesUsingEncoding: NSUTF8StringEncoding];
+                    let bytes = path.UTF8String() as *const u8;
+                    let path = str::from_utf8(slice::from_raw_parts(bytes, len)).unwrap();
+                    Ok(PathBuf::from(path))
+                }
+            }
+        }
+    }
 }
 
 unsafe fn get_foreground_platform(object: &mut Object) -> &MacForegroundPlatform {
@@ -638,6 +673,13 @@ extern "C" fn did_resign_active(this: &mut Object, _: Sel, _: id) {
     }
 }
 
+extern "C" fn will_terminate(this: &mut Object, _: Sel, _: id) {
+    let platform = unsafe { get_foreground_platform(this) };
+    if let Some(callback) = platform.0.borrow_mut().quit.as_mut() {
+        callback();
+    }
+}
+
 extern "C" fn open_files(this: &mut Object, _: Sel, _: id, paths: id) {
     let paths = unsafe {
         (0..paths.count())

crates/gpui/src/platform/test.rs 🔗

@@ -1,6 +1,6 @@
 use super::CursorStyle;
 use crate::{AnyAction, ClipboardItem};
-use anyhow::Result;
+use anyhow::{anyhow, Result};
 use parking_lot::Mutex;
 use pathfinder_geometry::vector::Vector2F;
 use std::{
@@ -58,6 +58,8 @@ impl super::ForegroundPlatform for ForegroundPlatform {
 
     fn on_resign_active(&self, _: Box<dyn FnMut()>) {}
 
+    fn on_quit(&self, _: Box<dyn FnMut()>) {}
+
     fn on_event(&self, _: Box<dyn FnMut(crate::Event) -> bool>) {}
 
     fn on_open_files(&self, _: Box<dyn FnMut(Vec<std::path::PathBuf>)>) {}
@@ -148,6 +150,10 @@ impl super::Platform for Platform {
     fn local_timezone(&self) -> UtcOffset {
         UtcOffset::UTC
     }
+
+    fn path_for_resource(&self, _name: Option<&str>, _extension: Option<&str>) -> Result<PathBuf> {
+        Err(anyhow!("app not running inside a bundle"))
+    }
 }
 
 impl Window {

crates/gpui/src/text_layout.rs 🔗

@@ -28,7 +28,7 @@ pub struct TextLayoutCache {
 pub struct RunStyle {
     pub color: Color,
     pub font_id: FontId,
-    pub underline: bool,
+    pub underline: Option<Color>,
 }
 
 impl TextLayoutCache {
@@ -167,7 +167,7 @@ impl<'a> Hash for CacheKeyRef<'a> {
 #[derive(Default, Debug)]
 pub struct Line {
     layout: Arc<LineLayout>,
-    style_runs: SmallVec<[(u32, Color, bool); 32]>,
+    style_runs: SmallVec<[(u32, Color, Option<Color>); 32]>,
 }
 
 #[derive(Default, Debug)]
@@ -249,7 +249,7 @@ impl Line {
         let mut style_runs = self.style_runs.iter();
         let mut run_end = 0;
         let mut color = Color::black();
-        let mut underline_start = None;
+        let mut underline = None;
 
         for run in &self.layout.runs {
             let max_glyph_width = cx
@@ -268,24 +268,24 @@ impl Line {
                 }
 
                 if glyph.index >= run_end {
-                    if let Some((run_len, run_color, run_underlined)) = style_runs.next() {
-                        if let Some(underline_origin) = underline_start {
-                            if !*run_underlined || *run_color != color {
+                    if let Some((run_len, run_color, run_underline_color)) = style_runs.next() {
+                        if let Some((underline_origin, underline_color)) = underline {
+                            if *run_underline_color != Some(underline_color) {
                                 cx.scene.push_underline(scene::Quad {
                                     bounds: RectF::from_points(
                                         underline_origin,
                                         glyph_origin + vec2f(0., 1.),
                                     ),
-                                    background: Some(color),
+                                    background: Some(underline_color),
                                     border: Default::default(),
                                     corner_radius: 0.,
                                 });
-                                underline_start = None;
+                                underline = None;
                             }
                         }
 
-                        if *run_underlined {
-                            underline_start.get_or_insert(glyph_origin);
+                        if let Some(run_underline_color) = run_underline_color {
+                            underline.get_or_insert((glyph_origin, *run_underline_color));
                         }
 
                         run_end += *run_len as usize;
@@ -293,13 +293,13 @@ impl Line {
                     } else {
                         run_end = self.layout.len;
                         color = Color::black();
-                        if let Some(underline_origin) = underline_start.take() {
+                        if let Some((underline_origin, underline_color)) = underline.take() {
                             cx.scene.push_underline(scene::Quad {
                                 bounds: RectF::from_points(
                                     underline_origin,
                                     glyph_origin + vec2f(0., 1.),
                                 ),
-                                background: Some(color),
+                                background: Some(underline_color),
                                 border: Default::default(),
                                 corner_radius: 0.,
                             });
@@ -317,12 +317,12 @@ impl Line {
             }
         }
 
-        if let Some(underline_start) = underline_start.take() {
+        if let Some((underline_start, underline_color)) = underline.take() {
             let line_end = origin + baseline_offset + vec2f(self.layout.width, 0.);
 
             cx.scene.push_underline(scene::Quad {
                 bounds: RectF::from_points(underline_start, line_end + vec2f(0., 1.)),
-                background: Some(color),
+                background: Some(underline_color),
                 border: Default::default(),
                 corner_radius: 0.,
             });
@@ -597,7 +597,7 @@ impl LineWrapper {
                     RunStyle {
                         font_id: self.font_id,
                         color: Default::default(),
-                        underline: false,
+                        underline: None,
                     },
                 )],
             )
@@ -681,7 +681,7 @@ mod tests {
         let normal = RunStyle {
             font_id,
             color: Default::default(),
-            underline: false,
+            underline: None,
         };
         let bold = RunStyle {
             font_id: font_cache
@@ -694,7 +694,7 @@ mod tests {
                 )
                 .unwrap(),
             color: Default::default(),
-            underline: false,
+            underline: None,
         };
 
         let text = "aa bbb cccc ddddd eeee";

crates/language/Cargo.toml 🔗

@@ -4,12 +4,18 @@ version = "0.1.0"
 edition = "2018"
 
 [features]
-test-support = ["rand", "buffer/test-support"]
+test-support = [
+    "rand",
+    "buffer/test-support",
+    "lsp/test-support",
+    "tree-sitter-rust",
+]
 
 [dependencies]
 buffer = { path = "../buffer" }
 clock = { path = "../clock" }
 gpui = { path = "../gpui" }
+lsp = { path = "../lsp" }
 rpc = { path = "../rpc" }
 theme = { path = "../theme" }
 util = { path = "../util" }
@@ -18,15 +24,18 @@ futures = "0.3"
 lazy_static = "1.4"
 log = "0.4"
 parking_lot = "0.11.1"
+postage = { version = "0.4.1", features = ["futures-traits"] }
 rand = { version = "0.8.3", optional = true }
 serde = { version = "1", features = ["derive"] }
 similar = "1.3"
 smol = "1.2"
 tree-sitter = "0.19.5"
+tree-sitter-rust = { version = "0.19.0", optional = true }
 
 [dev-dependencies]
 buffer = { path = "../buffer", features = ["test-support"] }
 gpui = { path = "../gpui", features = ["test-support"] }
+lsp = { path = "../lsp", features = ["test-support"] }
 rand = "0.8.3"
 tree-sitter-rust = "0.19.0"
 unindent = "0.1.7"

crates/language/build.rs 🔗

@@ -0,0 +1,6 @@
+fn main() {
+    if let Ok(bundled) = std::env::var("ZED_BUNDLE") {
+        println!("cargo:rustc-env=ZED_BUNDLE={}", bundled);
+    }
+}
+

crates/language/src/language.rs 🔗

@@ -1,8 +1,10 @@
 use crate::HighlightMap;
 use anyhow::Result;
+use gpui::{executor::Background, AppContext};
+use lsp::LanguageServer;
 use parking_lot::Mutex;
 use serde::Deserialize;
-use std::{path::Path, str, sync::Arc};
+use std::{collections::HashSet, path::Path, str, sync::Arc};
 use theme::SyntaxTheme;
 use tree_sitter::{Language as Grammar, Query};
 pub use tree_sitter::{Parser, Tree};
@@ -12,6 +14,16 @@ pub struct LanguageConfig {
     pub name: String,
     pub path_suffixes: Vec<String>,
     pub brackets: Vec<BracketPair>,
+    pub language_server: Option<LanguageServerConfig>,
+}
+
+#[derive(Default, Deserialize)]
+pub struct LanguageServerConfig {
+    pub binary: String,
+    pub disk_based_diagnostic_sources: HashSet<String>,
+    #[cfg(any(test, feature = "test-support"))]
+    #[serde(skip)]
+    pub fake_server: Option<(Arc<LanguageServer>, Arc<std::sync::atomic::AtomicBool>)>,
 }
 
 #[derive(Clone, Debug, Deserialize)]
@@ -51,6 +63,12 @@ impl LanguageRegistry {
         }
     }
 
+    pub fn get_language(&self, name: &str) -> Option<&Arc<Language>> {
+        self.languages
+            .iter()
+            .find(|language| language.name() == name)
+    }
+
     pub fn select_language(&self, path: impl AsRef<Path>) -> Option<&Arc<Language>> {
         let path = path.as_ref();
         let filename = path.file_name().and_then(|name| name.to_str());
@@ -97,6 +115,38 @@ impl Language {
         self.config.name.as_str()
     }
 
+    pub fn start_server(
+        &self,
+        root_path: &Path,
+        cx: &AppContext,
+    ) -> Result<Option<Arc<lsp::LanguageServer>>> {
+        if let Some(config) = &self.config.language_server {
+            #[cfg(any(test, feature = "test-support"))]
+            if let Some((server, started)) = &config.fake_server {
+                started.store(true, std::sync::atomic::Ordering::SeqCst);
+                return Ok(Some(server.clone()));
+            }
+
+            const ZED_BUNDLE: Option<&'static str> = option_env!("ZED_BUNDLE");
+            let binary_path = if ZED_BUNDLE.map_or(Ok(false), |b| b.parse())? {
+                cx.platform()
+                    .path_for_resource(Some(&config.binary), None)?
+            } else {
+                Path::new(&config.binary).to_path_buf()
+            };
+            lsp::LanguageServer::new(&binary_path, root_path, cx.background().clone()).map(Some)
+        } else {
+            Ok(None)
+        }
+    }
+
+    pub fn disk_based_diagnostic_sources(&self) -> Option<&HashSet<String>> {
+        self.config
+            .language_server
+            .as_ref()
+            .map(|config| &config.disk_based_diagnostic_sources)
+    }
+
     pub fn brackets(&self) -> &[BracketPair] {
         &self.config.brackets
     }
@@ -111,6 +161,23 @@ impl Language {
     }
 }
 
+#[cfg(any(test, feature = "test-support"))]
+impl LanguageServerConfig {
+    pub async fn fake(executor: Arc<Background>) -> (Self, lsp::FakeLanguageServer) {
+        let (server, fake) = lsp::LanguageServer::fake(executor).await;
+        fake.started
+            .store(false, std::sync::atomic::Ordering::SeqCst);
+        let started = fake.started.clone();
+        (
+            Self {
+                fake_server: Some((server, started)),
+                ..Default::default()
+            },
+            fake,
+        )
+    }
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;

crates/language/src/lib.rs 🔗

@@ -1,20 +1,22 @@
 mod highlight_map;
 mod language;
+pub mod proto;
 #[cfg(test)]
 mod tests;
 
 pub use self::{
     highlight_map::{HighlightId, HighlightMap},
-    language::{BracketPair, Language, LanguageConfig, LanguageRegistry},
+    language::{BracketPair, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig},
 };
 use anyhow::{anyhow, Result};
-pub use buffer::{Buffer as TextBuffer, *};
+pub use buffer::{Buffer as TextBuffer, Operation as _, *};
 use clock::ReplicaId;
 use futures::FutureExt as _;
 use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task};
 use lazy_static::lazy_static;
+use lsp::LanguageServer;
 use parking_lot::Mutex;
-use rpc::proto;
+use postage::{prelude::Stream, sink::Sink, watch};
 use similar::{ChangeTag, TextDiff};
 use smol::future::yield_now;
 use std::{
@@ -24,15 +26,21 @@ use std::{
     collections::{BTreeMap, HashMap, HashSet},
     ffi::OsString,
     future::Future,
-    iter::Iterator,
+    iter::{Iterator, Peekable},
     ops::{Deref, DerefMut, Range},
     path::{Path, PathBuf},
     str,
     sync::Arc,
     time::{Duration, Instant, SystemTime, UNIX_EPOCH},
+    vec,
 };
 use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
-use util::TryFutureExt as _;
+use util::{post_inc, TryFutureExt as _};
+
+#[cfg(any(test, feature = "test-support"))]
+pub use tree_sitter_rust;
+
+pub use lsp::DiagnosticSeverity;
 
 thread_local! {
     static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
@@ -57,6 +65,9 @@ pub struct Buffer {
     syntax_tree: Mutex<Option<SyntaxTree>>,
     parsing_in_background: bool,
     parse_count: usize,
+    diagnostics: AnchorRangeMultimap<Diagnostic>,
+    diagnostics_update_count: usize,
+    language_server: Option<LanguageServerState>,
     #[cfg(test)]
     operations: Vec<Operation>,
 }
@@ -64,11 +75,39 @@ pub struct Buffer {
 pub struct Snapshot {
     text: buffer::Snapshot,
     tree: Option<Tree>,
+    diagnostics: AnchorRangeMultimap<Diagnostic>,
     is_parsing: bool,
     language: Option<Arc<Language>>,
     query_cursor: QueryCursorHandle,
 }
 
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct Diagnostic {
+    pub severity: DiagnosticSeverity,
+    pub message: String,
+}
+
+struct LanguageServerState {
+    server: Arc<LanguageServer>,
+    latest_snapshot: watch::Sender<Option<LanguageServerSnapshot>>,
+    pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
+    next_version: usize,
+    _maintain_server: Task<Option<()>>,
+}
+
+#[derive(Clone)]
+struct LanguageServerSnapshot {
+    buffer_snapshot: buffer::Snapshot,
+    version: usize,
+    path: Arc<Path>,
+}
+
+#[derive(Clone)]
+pub enum Operation {
+    Buffer(buffer::Operation),
+    UpdateDiagnostics(AnchorRangeMultimap<Diagnostic>),
+}
+
 #[derive(Clone, Debug, Eq, PartialEq)]
 pub enum Event {
     Edited,
@@ -87,13 +126,19 @@ pub trait File {
 
     fn mtime(&self) -> SystemTime;
 
+    /// Returns the path of this file relative to the worktree's root directory.
     fn path(&self) -> &Arc<Path>;
 
-    fn full_path(&self, cx: &AppContext) -> PathBuf;
+    /// Returns the absolute path of this file.
+    fn abs_path(&self) -> Option<PathBuf>;
+
+    /// Returns the path of this file relative to the worktree's parent directory (this means it
+    /// includes the name of the worktree's root folder).
+    fn full_path(&self) -> PathBuf;
 
     /// Returns the last component of this handle's absolute path. If this handle refers to the root
     /// of its worktree, then this method will return the name of the worktree itself.
-    fn file_name<'a>(&'a self, cx: &'a AppContext) -> Option<OsString>;
+    fn file_name(&self) -> Option<OsString>;
 
     fn is_deleted(&self) -> bool;
 
@@ -150,15 +195,34 @@ struct Highlights<'a> {
 pub struct HighlightedChunks<'a> {
     range: Range<usize>,
     chunks: Chunks<'a>,
+    diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
+    error_depth: usize,
+    warning_depth: usize,
+    information_depth: usize,
+    hint_depth: usize,
     highlights: Option<Highlights<'a>>,
 }
 
+#[derive(Clone, Copy, Debug, Default)]
+pub struct HighlightedChunk<'a> {
+    pub text: &'a str,
+    pub highlight_id: HighlightId,
+    pub diagnostic: Option<DiagnosticSeverity>,
+}
+
 struct Diff {
     base_version: clock::Global,
     new_text: Arc<str>,
     changes: Vec<(ChangeTag, usize)>,
 }
 
+#[derive(Clone, Copy)]
+struct DiagnosticEndpoint {
+    offset: usize,
+    is_start: bool,
+    severity: DiagnosticSeverity,
+}
+
 impl Buffer {
     pub fn new<T: Into<Arc<str>>>(
         replica_id: ReplicaId,
@@ -172,23 +236,22 @@ impl Buffer {
                 History::new(base_text.into()),
             ),
             None,
-            None,
-            cx,
         )
     }
 
-    pub fn from_history(
+    pub fn from_file<T: Into<Arc<str>>>(
         replica_id: ReplicaId,
-        history: History,
-        file: Option<Box<dyn File>>,
-        language: Option<Arc<Language>>,
+        base_text: T,
+        file: Box<dyn File>,
         cx: &mut ModelContext<Self>,
     ) -> Self {
         Self::build(
-            TextBuffer::new(replica_id, cx.model_id() as u64, history),
-            file,
-            language,
-            cx,
+            TextBuffer::new(
+                replica_id,
+                cx.model_id() as u64,
+                History::new(base_text.into()),
+            ),
+            Some(file),
         )
     }
 
@@ -196,23 +259,54 @@ impl Buffer {
         replica_id: ReplicaId,
         message: proto::Buffer,
         file: Option<Box<dyn File>>,
-        language: Option<Arc<Language>>,
         cx: &mut ModelContext<Self>,
     ) -> Result<Self> {
-        Ok(Self::build(
-            TextBuffer::from_proto(replica_id, message)?,
-            file,
-            language,
-            cx,
-        ))
+        let mut buffer =
+            buffer::Buffer::new(replica_id, message.id, History::new(message.content.into()));
+        let ops = message
+            .history
+            .into_iter()
+            .map(|op| buffer::Operation::Edit(proto::deserialize_edit_operation(op)));
+        buffer.apply_ops(ops)?;
+        for set in message.selections {
+            let set = proto::deserialize_selection_set(set);
+            buffer.add_raw_selection_set(set.id, set);
+        }
+        let mut this = Self::build(buffer, file);
+        if let Some(diagnostics) = message.diagnostics {
+            this.apply_diagnostic_update(proto::deserialize_diagnostics(diagnostics), cx);
+        }
+        Ok(this)
     }
 
-    fn build(
-        buffer: TextBuffer,
-        file: Option<Box<dyn File>>,
+    pub fn to_proto(&self) -> proto::Buffer {
+        proto::Buffer {
+            id: self.remote_id(),
+            content: self.text.base_text().to_string(),
+            history: self
+                .text
+                .history()
+                .map(proto::serialize_edit_operation)
+                .collect(),
+            selections: self
+                .selection_sets()
+                .map(|(_, set)| proto::serialize_selection_set(set))
+                .collect(),
+            diagnostics: Some(proto::serialize_diagnostics(&self.diagnostics)),
+        }
+    }
+
+    pub fn with_language(
+        mut self,
         language: Option<Arc<Language>>,
+        language_server: Option<Arc<LanguageServer>>,
         cx: &mut ModelContext<Self>,
     ) -> Self {
+        self.set_language(language, language_server, cx);
+        self
+    }
+
+    fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
         let saved_mtime;
         if let Some(file) = file.as_ref() {
             saved_mtime = file.mtime();
@@ -220,7 +314,7 @@ impl Buffer {
             saved_mtime = UNIX_EPOCH;
         }
 
-        let mut result = Self {
+        Self {
             text: buffer,
             saved_mtime,
             saved_version: clock::Global::new(),
@@ -231,19 +325,20 @@ impl Buffer {
             sync_parse_timeout: Duration::from_millis(1),
             autoindent_requests: Default::default(),
             pending_autoindent: Default::default(),
-            language,
-
+            language: None,
+            diagnostics: Default::default(),
+            diagnostics_update_count: 0,
+            language_server: None,
             #[cfg(test)]
             operations: Default::default(),
-        };
-        result.reparse(cx);
-        result
+        }
     }
 
     pub fn snapshot(&self) -> Snapshot {
         Snapshot {
             text: self.text.snapshot(),
             tree: self.syntax_tree(),
+            diagnostics: self.diagnostics.clone(),
             is_parsing: self.parsing_in_background,
             language: self.language.clone(),
             query_cursor: QueryCursorHandle::new(),
@@ -263,7 +358,7 @@ impl Buffer {
             .as_ref()
             .ok_or_else(|| anyhow!("buffer has no file"))?;
         let text = self.as_rope().clone();
-        let version = self.version.clone();
+        let version = self.version();
         let save = file.save(self.remote_id(), text, version, cx.as_mut());
         Ok(cx.spawn(|this, mut cx| async move {
             let (version, mtime) = save.await?;
@@ -274,9 +369,96 @@ impl Buffer {
         }))
     }
 
-    pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
+    pub fn set_language(
+        &mut self,
+        language: Option<Arc<Language>>,
+        language_server: Option<Arc<lsp::LanguageServer>>,
+        cx: &mut ModelContext<Self>,
+    ) {
         self.language = language;
+        self.language_server = if let Some(server) = language_server {
+            let (latest_snapshot_tx, mut latest_snapshot_rx) = watch::channel();
+            Some(LanguageServerState {
+                latest_snapshot: latest_snapshot_tx,
+                pending_snapshots: Default::default(),
+                next_version: 0,
+                server: server.clone(),
+                _maintain_server: cx.background().spawn(
+                    async move {
+                        let mut prev_snapshot: Option<LanguageServerSnapshot> = None;
+                        while let Some(snapshot) = latest_snapshot_rx.recv().await {
+                            if let Some(snapshot) = snapshot {
+                                let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
+                                if let Some(prev_snapshot) = prev_snapshot {
+                                    let changes = lsp::DidChangeTextDocumentParams {
+                                        text_document: lsp::VersionedTextDocumentIdentifier::new(
+                                            uri,
+                                            snapshot.version as i32,
+                                        ),
+                                        content_changes: snapshot
+                                            .buffer_snapshot
+                                            .edits_since::<(PointUtf16, usize)>(
+                                                prev_snapshot.buffer_snapshot.version(),
+                                            )
+                                            .map(|edit| {
+                                                let edit_start = edit.new.start.0;
+                                                let edit_end = edit_start
+                                                    + (edit.old.end.0 - edit.old.start.0);
+                                                let new_text = snapshot
+                                                    .buffer_snapshot
+                                                    .text_for_range(
+                                                        edit.new.start.1..edit.new.end.1,
+                                                    )
+                                                    .collect();
+                                                lsp::TextDocumentContentChangeEvent {
+                                                    range: Some(lsp::Range::new(
+                                                        lsp::Position::new(
+                                                            edit_start.row,
+                                                            edit_start.column,
+                                                        ),
+                                                        lsp::Position::new(
+                                                            edit_end.row,
+                                                            edit_end.column,
+                                                        ),
+                                                    )),
+                                                    range_length: None,
+                                                    text: new_text,
+                                                }
+                                            })
+                                            .collect(),
+                                    };
+                                    server
+                                        .notify::<lsp::notification::DidChangeTextDocument>(changes)
+                                        .await?;
+                                } else {
+                                    server
+                                        .notify::<lsp::notification::DidOpenTextDocument>(
+                                            lsp::DidOpenTextDocumentParams {
+                                                text_document: lsp::TextDocumentItem::new(
+                                                    uri,
+                                                    Default::default(),
+                                                    snapshot.version as i32,
+                                                    snapshot.buffer_snapshot.text().into(),
+                                                ),
+                                            },
+                                        )
+                                        .await?;
+                                }
+
+                                prev_snapshot = Some(snapshot);
+                            }
+                        }
+                        Ok(())
+                    }
+                    .log_err(),
+                ),
+            })
+        } else {
+            None
+        };
+
         self.reparse(cx);
+        self.update_language_server();
     }
 
     pub fn did_save(
@@ -291,6 +473,25 @@ impl Buffer {
         if let Some(new_file) = new_file {
             self.file = Some(new_file);
         }
+        if let Some(state) = &self.language_server {
+            cx.background()
+                .spawn(
+                    state
+                        .server
+                        .notify::<lsp::notification::DidSaveTextDocument>(
+                            lsp::DidSaveTextDocumentParams {
+                                text_document: lsp::TextDocumentIdentifier {
+                                    uri: lsp::Url::from_file_path(
+                                        self.file.as_ref().unwrap().abs_path().unwrap(),
+                                    )
+                                    .unwrap(),
+                                },
+                                text: None,
+                            },
+                        ),
+                )
+                .detach()
+        }
         cx.emit(Event::Saved);
     }
 
@@ -332,7 +533,7 @@ impl Buffer {
                                     .await;
                                 this.update(&mut cx, |this, cx| {
                                     if this.apply_diff(diff, cx) {
-                                        this.saved_version = this.version.clone();
+                                        this.saved_version = this.version();
                                         this.saved_mtime = new_mtime;
                                         cx.emit(Event::Reloaded);
                                     }
@@ -453,22 +654,17 @@ impl Buffer {
     }
 
     fn interpolate_tree(&self, tree: &mut SyntaxTree) {
-        let mut delta = 0_isize;
-        for edit in self.edits_since(tree.version.clone()) {
-            let start_offset = (edit.old_bytes.start as isize + delta) as usize;
-            let start_point = self.as_rope().to_point(start_offset);
+        for edit in self.edits_since::<(usize, Point)>(&tree.version) {
+            let (bytes, lines) = edit.flatten();
             tree.tree.edit(&InputEdit {
-                start_byte: start_offset,
-                old_end_byte: start_offset + edit.deleted_bytes(),
-                new_end_byte: start_offset + edit.inserted_bytes(),
-                start_position: start_point.to_ts_point(),
-                old_end_position: (start_point + edit.deleted_lines()).to_ts_point(),
-                new_end_position: self
-                    .as_rope()
-                    .to_point(start_offset + edit.inserted_bytes())
+                start_byte: bytes.new.start,
+                old_end_byte: bytes.new.start + bytes.old.len(),
+                new_end_byte: bytes.new.end,
+                start_position: lines.new.start.to_ts_point(),
+                old_end_position: (lines.new.start + (lines.old.end - lines.old.start))
                     .to_ts_point(),
+                new_end_position: lines.new.end.to_ts_point(),
             });
-            delta += edit.inserted_bytes() as isize - edit.deleted_bytes() as isize;
         }
         tree.version = self.version();
     }
@@ -486,6 +682,118 @@ impl Buffer {
         cx.notify();
     }
 
+    pub fn update_diagnostics(
+        &mut self,
+        version: Option<i32>,
+        mut diagnostics: Vec<lsp::Diagnostic>,
+        cx: &mut ModelContext<Self>,
+    ) -> Result<Operation> {
+        let version = version.map(|version| version as usize);
+        let content = if let Some(version) = version {
+            let language_server = self.language_server.as_mut().unwrap();
+            let snapshot = language_server
+                .pending_snapshots
+                .get(&version)
+                .ok_or_else(|| anyhow!("missing snapshot"))?;
+            snapshot.buffer_snapshot.content()
+        } else {
+            self.content()
+        };
+
+        let empty_set = HashSet::new();
+        let disk_based_sources = self
+            .language
+            .as_ref()
+            .and_then(|language| language.disk_based_diagnostic_sources())
+            .unwrap_or(&empty_set);
+
+        diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end));
+        self.diagnostics = {
+            let mut edits_since_save = content
+                .edits_since::<PointUtf16>(&self.saved_version)
+                .peekable();
+            let mut last_edit_old_end = PointUtf16::zero();
+            let mut last_edit_new_end = PointUtf16::zero();
+
+            content.anchor_range_multimap(
+                Bias::Left,
+                Bias::Right,
+                diagnostics.into_iter().filter_map(|diagnostic| {
+                    let mut start = PointUtf16::new(
+                        diagnostic.range.start.line,
+                        diagnostic.range.start.character,
+                    );
+                    let mut end =
+                        PointUtf16::new(diagnostic.range.end.line, diagnostic.range.end.character);
+                    if diagnostic
+                        .source
+                        .as_ref()
+                        .map_or(false, |source| disk_based_sources.contains(source))
+                    {
+                        while let Some(edit) = edits_since_save.peek() {
+                            if edit.old.end <= start {
+                                last_edit_old_end = edit.old.end;
+                                last_edit_new_end = edit.new.end;
+                                edits_since_save.next();
+                            } else if edit.old.start <= end && edit.old.end >= start {
+                                return None;
+                            } else {
+                                break;
+                            }
+                        }
+
+                        start = last_edit_new_end + (start - last_edit_old_end);
+                        end = last_edit_new_end + (end - last_edit_old_end);
+                    }
+
+                    let mut range = content.clip_point_utf16(start, Bias::Left)
+                        ..content.clip_point_utf16(end, Bias::Right);
+                    if range.start == range.end {
+                        range.end.column += 1;
+                        range.end = content.clip_point_utf16(range.end, Bias::Right);
+                    }
+                    Some((
+                        range,
+                        Diagnostic {
+                            severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
+                            message: diagnostic.message,
+                        },
+                    ))
+                }),
+            )
+        };
+
+        if let Some(version) = version {
+            let language_server = self.language_server.as_mut().unwrap();
+            let versions_to_delete = language_server
+                .pending_snapshots
+                .range(..version)
+                .map(|(v, _)| *v)
+                .collect::<Vec<_>>();
+            for version in versions_to_delete {
+                language_server.pending_snapshots.remove(&version);
+            }
+        }
+
+        self.diagnostics_update_count += 1;
+        cx.notify();
+        Ok(Operation::UpdateDiagnostics(self.diagnostics.clone()))
+    }
+
+    pub fn diagnostics_in_range<'a, T: 'a + ToOffset>(
+        &'a self,
+        range: Range<T>,
+    ) -> impl Iterator<Item = (Range<Point>, &Diagnostic)> + 'a {
+        let content = self.content();
+        self.diagnostics
+            .intersecting_ranges(range, content, true)
+            .map(move |(_, range, diagnostic)| (range, diagnostic))
+    }
+
+    pub fn diagnostics_update_count(&self) -> usize {
+        self.diagnostics_update_count
+    }
+
     fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
         if let Some(indent_columns) = self.compute_autoindents() {
             let indent_columns = cx.background().spawn(indent_columns);
@@ -810,17 +1118,39 @@ impl Buffer {
         cx: &mut ModelContext<Self>,
     ) -> Result<()> {
         if let Some(start_version) = self.text.end_transaction_at(selection_set_ids, now) {
-            cx.notify();
             let was_dirty = start_version != self.saved_version;
-            let edited = self.edits_since(start_version).next().is_some();
-            if edited {
-                self.did_edit(was_dirty, cx);
-                self.reparse(cx);
-            }
+            self.did_edit(&start_version, was_dirty, cx);
         }
         Ok(())
     }
 
+    fn update_language_server(&mut self) {
+        let language_server = if let Some(language_server) = self.language_server.as_mut() {
+            language_server
+        } else {
+            return;
+        };
+        let abs_path = self
+            .file
+            .as_ref()
+            .map_or(Path::new("/").to_path_buf(), |file| {
+                file.abs_path().unwrap()
+            });
+
+        let version = post_inc(&mut language_server.next_version);
+        let snapshot = LanguageServerSnapshot {
+            buffer_snapshot: self.text.snapshot(),
+            version,
+            path: Arc::from(abs_path),
+        };
+        language_server
+            .pending_snapshots
+            .insert(version, snapshot.clone());
+        let _ = language_server
+            .latest_snapshot
+            .blocking_send(Some(snapshot));
+    }
+
     pub fn edit<I, S, T>(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext<Self>)
     where
         I: IntoIterator<Item = Range<S>>,
@@ -925,14 +1255,27 @@ impl Buffer {
         }
 
         self.end_transaction(None, cx).unwrap();
-        self.send_operation(Operation::Edit(edit), cx);
+        self.send_operation(Operation::Buffer(buffer::Operation::Edit(edit)), cx);
     }
 
-    fn did_edit(&self, was_dirty: bool, cx: &mut ModelContext<Self>) {
+    fn did_edit(
+        &mut self,
+        old_version: &clock::Global,
+        was_dirty: bool,
+        cx: &mut ModelContext<Self>,
+    ) {
+        if self.edits_since::<usize>(old_version).next().is_none() {
+            return;
+        }
+
+        self.reparse(cx);
+        self.update_language_server();
+
         cx.emit(Event::Edited);
         if !was_dirty {
             cx.emit(Event::Dirtied);
         }
+        cx.notify();
     }
 
     pub fn add_selection_set<T: ToOffset>(
@@ -941,10 +1284,10 @@ impl Buffer {
         cx: &mut ModelContext<Self>,
     ) -> SelectionSetId {
         let operation = self.text.add_selection_set(selections);
-        if let Operation::UpdateSelections { set_id, .. } = &operation {
+        if let buffer::Operation::UpdateSelections { set_id, .. } = &operation {
             let set_id = *set_id;
             cx.notify();
-            self.send_operation(operation, cx);
+            self.send_operation(Operation::Buffer(operation), cx);
             set_id
         } else {
             unreachable!()
@@ -959,7 +1302,7 @@ impl Buffer {
     ) -> Result<()> {
         let operation = self.text.update_selection_set(set_id, selections)?;
         cx.notify();
-        self.send_operation(operation, cx);
+        self.send_operation(Operation::Buffer(operation), cx);
         Ok(())
     }
 
@@ -969,7 +1312,7 @@ impl Buffer {
         cx: &mut ModelContext<Self>,
     ) -> Result<()> {
         let operation = self.text.set_active_selection_set(set_id)?;
-        self.send_operation(operation, cx);
+        self.send_operation(Operation::Buffer(operation), cx);
         Ok(())
     }
 
@@ -980,7 +1323,7 @@ impl Buffer {
     ) -> Result<()> {
         let operation = self.text.remove_selection_set(set_id)?;
         cx.notify();
-        self.send_operation(operation, cx);
+        self.send_operation(Operation::Buffer(operation), cx);
         Ok(())
     }
 
@@ -990,21 +1333,36 @@ impl Buffer {
         cx: &mut ModelContext<Self>,
     ) -> Result<()> {
         self.pending_autoindent.take();
-
         let was_dirty = self.is_dirty();
         let old_version = self.version.clone();
-
-        self.text.apply_ops(ops)?;
-
+        let buffer_ops = ops
+            .into_iter()
+            .filter_map(|op| match op {
+                Operation::Buffer(op) => Some(op),
+                Operation::UpdateDiagnostics(diagnostics) => {
+                    self.apply_diagnostic_update(diagnostics, cx);
+                    None
+                }
+            })
+            .collect::<Vec<_>>();
+        self.text.apply_ops(buffer_ops)?;
+        self.did_edit(&old_version, was_dirty, cx);
+        // Notify independently of whether the buffer was edited as the operations could include a
+        // selection update.
         cx.notify();
-        if self.edits_since(old_version).next().is_some() {
-            self.did_edit(was_dirty, cx);
-            self.reparse(cx);
-        }
-
         Ok(())
     }
 
+    fn apply_diagnostic_update(
+        &mut self,
+        diagnostics: AnchorRangeMultimap<Diagnostic>,
+        cx: &mut ModelContext<Self>,
+    ) {
+        self.diagnostics = diagnostics;
+        self.diagnostics_update_count += 1;
+        cx.notify();
+    }
+
     #[cfg(not(test))]
     pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
         if let Some(file) = &self.file {
@@ -1027,14 +1385,10 @@ impl Buffer {
         let old_version = self.version.clone();
 
         for operation in self.text.undo() {
-            self.send_operation(operation, cx);
+            self.send_operation(Operation::Buffer(operation), cx);
         }
 
-        cx.notify();
-        if self.edits_since(old_version).next().is_some() {
-            self.did_edit(was_dirty, cx);
-            self.reparse(cx);
-        }
+        self.did_edit(&old_version, was_dirty, cx);
     }
 
     pub fn redo(&mut self, cx: &mut ModelContext<Self>) {
@@ -1042,14 +1396,10 @@ impl Buffer {
         let old_version = self.version.clone();
 
         for operation in self.text.redo() {
-            self.send_operation(operation, cx);
+            self.send_operation(Operation::Buffer(operation), cx);
         }
 
-        cx.notify();
-        if self.edits_since(old_version).next().is_some() {
-            self.did_edit(was_dirty, cx);
-            self.reparse(cx);
-        }
+        self.did_edit(&old_version, was_dirty, cx);
     }
 }
 
@@ -1080,6 +1430,7 @@ impl Entity for Buffer {
     }
 }
 
+// TODO: Do we need to clone a buffer?
 impl Clone for Buffer {
     fn clone(&self) -> Self {
         Self {
@@ -1094,7 +1445,9 @@ impl Clone for Buffer {
             parse_count: self.parse_count,
             autoindent_requests: Default::default(),
             pending_autoindent: Default::default(),
-
+            diagnostics: self.diagnostics.clone(),
+            diagnostics_update_count: self.diagnostics_update_count,
+            language_server: None,
             #[cfg(test)]
             operations: self.operations.clone(),
         }
@@ -1247,30 +1600,54 @@ impl Snapshot {
         range: Range<T>,
     ) -> HighlightedChunks {
         let range = range.start.to_offset(&*self)..range.end.to_offset(&*self);
-        let chunks = self.text.as_rope().chunks_in_range(range.clone());
-        if let Some((language, tree)) = self.language.as_ref().zip(self.tree.as_ref()) {
-            let captures = self.query_cursor.set_byte_range(range.clone()).captures(
-                &language.highlights_query,
-                tree.root_node(),
-                TextProvider(self.text.as_rope()),
-            );
 
-            HighlightedChunks {
-                range,
-                chunks,
-                highlights: Some(Highlights {
+        let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
+        for (_, range, diagnostic) in
+            self.diagnostics
+                .intersecting_ranges(range.clone(), self.content(), true)
+        {
+            diagnostic_endpoints.push(DiagnosticEndpoint {
+                offset: range.start,
+                is_start: true,
+                severity: diagnostic.severity,
+            });
+            diagnostic_endpoints.push(DiagnosticEndpoint {
+                offset: range.end,
+                is_start: false,
+                severity: diagnostic.severity,
+            });
+        }
+        diagnostic_endpoints.sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
+        let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
+
+        let chunks = self.text.as_rope().chunks_in_range(range.clone());
+        let highlights =
+            if let Some((language, tree)) = self.language.as_ref().zip(self.tree.as_ref()) {
+                let captures = self.query_cursor.set_byte_range(range.clone()).captures(
+                    &language.highlights_query,
+                    tree.root_node(),
+                    TextProvider(self.text.as_rope()),
+                );
+
+                Some(Highlights {
                     captures,
                     next_capture: None,
                     stack: Default::default(),
                     highlight_map: language.highlight_map(),
-                }),
-            }
-        } else {
-            HighlightedChunks {
-                range,
-                chunks,
-                highlights: None,
-            }
+                })
+            } else {
+                None
+            };
+
+        HighlightedChunks {
+            range,
+            chunks,
+            diagnostic_endpoints,
+            error_depth: 0,
+            warning_depth: 0,
+            information_depth: 0,
+            hint_depth: 0,
+            highlights,
         }
     }
 }
@@ -1280,6 +1657,7 @@ impl Clone for Snapshot {
         Self {
             text: self.text.clone(),
             tree: self.tree.clone(),
+            diagnostics: self.diagnostics.clone(),
             is_parsing: self.is_parsing,
             language: self.language.clone(),
             query_cursor: QueryCursorHandle::new(),
@@ -1341,13 +1719,43 @@ impl<'a> HighlightedChunks<'a> {
     pub fn offset(&self) -> usize {
         self.range.start
     }
+
+    fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
+        let depth = match endpoint.severity {
+            DiagnosticSeverity::ERROR => &mut self.error_depth,
+            DiagnosticSeverity::WARNING => &mut self.warning_depth,
+            DiagnosticSeverity::INFORMATION => &mut self.information_depth,
+            DiagnosticSeverity::HINT => &mut self.hint_depth,
+            _ => return,
+        };
+        if endpoint.is_start {
+            *depth += 1;
+        } else {
+            *depth -= 1;
+        }
+    }
+
+    fn current_diagnostic_severity(&mut self) -> Option<DiagnosticSeverity> {
+        if self.error_depth > 0 {
+            Some(DiagnosticSeverity::ERROR)
+        } else if self.warning_depth > 0 {
+            Some(DiagnosticSeverity::WARNING)
+        } else if self.information_depth > 0 {
+            Some(DiagnosticSeverity::INFORMATION)
+        } else if self.hint_depth > 0 {
+            Some(DiagnosticSeverity::HINT)
+        } else {
+            None
+        }
+    }
 }
 
 impl<'a> Iterator for HighlightedChunks<'a> {
-    type Item = (&'a str, HighlightId);
+    type Item = HighlightedChunk<'a>;
 
     fn next(&mut self) -> Option<Self::Item> {
         let mut next_capture_start = usize::MAX;
+        let mut next_diagnostic_endpoint = usize::MAX;
 
         if let Some(highlights) = self.highlights.as_mut() {
             while let Some((parent_capture_end, _)) = highlights.stack.last() {
@@ -1368,22 +1776,36 @@ impl<'a> Iterator for HighlightedChunks<'a> {
                     next_capture_start = capture.node.start_byte();
                     break;
                 } else {
-                    let style_id = highlights.highlight_map.get(capture.index);
-                    highlights.stack.push((capture.node.end_byte(), style_id));
+                    let highlight_id = highlights.highlight_map.get(capture.index);
+                    highlights
+                        .stack
+                        .push((capture.node.end_byte(), highlight_id));
                     highlights.next_capture = highlights.captures.next();
                 }
             }
         }
 
+        while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
+            if endpoint.offset <= self.range.start {
+                self.update_diagnostic_depths(endpoint);
+                self.diagnostic_endpoints.next();
+            } else {
+                next_diagnostic_endpoint = endpoint.offset;
+                break;
+            }
+        }
+
         if let Some(chunk) = self.chunks.peek() {
             let chunk_start = self.range.start;
-            let mut chunk_end = (self.chunks.offset() + chunk.len()).min(next_capture_start);
-            let mut style_id = HighlightId::default();
-            if let Some((parent_capture_end, parent_style_id)) =
+            let mut chunk_end = (self.chunks.offset() + chunk.len())
+                .min(next_capture_start)
+                .min(next_diagnostic_endpoint);
+            let mut highlight_id = HighlightId::default();
+            if let Some((parent_capture_end, parent_highlight_id)) =
                 self.highlights.as_ref().and_then(|h| h.stack.last())
             {
                 chunk_end = chunk_end.min(*parent_capture_end);
-                style_id = *parent_style_id;
+                highlight_id = *parent_highlight_id;
             }
 
             let slice =
@@ -1393,7 +1815,11 @@ impl<'a> Iterator for HighlightedChunks<'a> {
                 self.chunks.next().unwrap();
             }
 
-            Some((slice, style_id))
+            Some(HighlightedChunk {
+                text: slice,
+                highlight_id,
+                diagnostic: self.current_diagnostic_severity(),
+            })
         } else {
             None
         }

crates/language/src/proto.rs 🔗

@@ -0,0 +1,315 @@
+use std::sync::Arc;
+
+use crate::Diagnostic;
+
+use super::Operation;
+use anyhow::{anyhow, Result};
+use buffer::*;
+use clock::ReplicaId;
+use lsp::DiagnosticSeverity;
+use rpc::proto;
+
+pub use proto::Buffer;
+
+pub fn serialize_operation(operation: &Operation) -> proto::Operation {
+    proto::Operation {
+        variant: Some(match operation {
+            Operation::Buffer(buffer::Operation::Edit(edit)) => {
+                proto::operation::Variant::Edit(serialize_edit_operation(edit))
+            }
+            Operation::Buffer(buffer::Operation::Undo {
+                undo,
+                lamport_timestamp,
+            }) => proto::operation::Variant::Undo(proto::operation::Undo {
+                replica_id: undo.id.replica_id as u32,
+                local_timestamp: undo.id.value,
+                lamport_timestamp: lamport_timestamp.value,
+                ranges: undo
+                    .ranges
+                    .iter()
+                    .map(|r| proto::Range {
+                        start: r.start.0 as u64,
+                        end: r.end.0 as u64,
+                    })
+                    .collect(),
+                counts: undo
+                    .counts
+                    .iter()
+                    .map(|(edit_id, count)| proto::operation::UndoCount {
+                        replica_id: edit_id.replica_id as u32,
+                        local_timestamp: edit_id.value,
+                        count: *count,
+                    })
+                    .collect(),
+                version: From::from(&undo.version),
+            }),
+            Operation::Buffer(buffer::Operation::UpdateSelections {
+                set_id,
+                selections,
+                lamport_timestamp,
+            }) => proto::operation::Variant::UpdateSelections(proto::operation::UpdateSelections {
+                replica_id: set_id.replica_id as u32,
+                local_timestamp: set_id.value,
+                lamport_timestamp: lamport_timestamp.value,
+                version: selections.version().into(),
+                selections: selections
+                    .full_offset_ranges()
+                    .map(|(range, state)| proto::Selection {
+                        id: state.id as u64,
+                        start: range.start.0 as u64,
+                        end: range.end.0 as u64,
+                        reversed: state.reversed,
+                    })
+                    .collect(),
+            }),
+            Operation::Buffer(buffer::Operation::RemoveSelections {
+                set_id,
+                lamport_timestamp,
+            }) => proto::operation::Variant::RemoveSelections(proto::operation::RemoveSelections {
+                replica_id: set_id.replica_id as u32,
+                local_timestamp: set_id.value,
+                lamport_timestamp: lamport_timestamp.value,
+            }),
+            Operation::Buffer(buffer::Operation::SetActiveSelections {
+                set_id,
+                lamport_timestamp,
+            }) => proto::operation::Variant::SetActiveSelections(
+                proto::operation::SetActiveSelections {
+                    replica_id: lamport_timestamp.replica_id as u32,
+                    local_timestamp: set_id.map(|set_id| set_id.value),
+                    lamport_timestamp: lamport_timestamp.value,
+                },
+            ),
+            Operation::UpdateDiagnostics(diagnostic_set) => {
+                proto::operation::Variant::UpdateDiagnostics(serialize_diagnostics(diagnostic_set))
+            }
+        }),
+    }
+}
+
+pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation::Edit {
+    let ranges = operation
+        .ranges
+        .iter()
+        .map(|range| proto::Range {
+            start: range.start.0 as u64,
+            end: range.end.0 as u64,
+        })
+        .collect();
+    proto::operation::Edit {
+        replica_id: operation.timestamp.replica_id as u32,
+        local_timestamp: operation.timestamp.local,
+        lamport_timestamp: operation.timestamp.lamport,
+        version: From::from(&operation.version),
+        ranges,
+        new_text: operation.new_text.clone(),
+    }
+}
+
+pub fn serialize_selection_set(set: &SelectionSet) -> proto::SelectionSet {
+    let version = set.selections.version();
+    let entries = set.selections.full_offset_ranges();
+    proto::SelectionSet {
+        replica_id: set.id.replica_id as u32,
+        lamport_timestamp: set.id.value as u32,
+        is_active: set.active,
+        version: version.into(),
+        selections: entries
+            .map(|(range, state)| proto::Selection {
+                id: state.id as u64,
+                start: range.start.0 as u64,
+                end: range.end.0 as u64,
+                reversed: state.reversed,
+            })
+            .collect(),
+    }
+}
+
+pub fn serialize_diagnostics(map: &AnchorRangeMultimap<Diagnostic>) -> proto::DiagnosticSet {
+    proto::DiagnosticSet {
+        version: map.version().into(),
+        diagnostics: map
+            .full_offset_ranges()
+            .map(|(range, diagnostic)| proto::Diagnostic {
+                start: range.start.0 as u64,
+                end: range.end.0 as u64,
+                message: diagnostic.message.clone(),
+                severity: match diagnostic.severity {
+                    DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error,
+                    DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning,
+                    DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information,
+                    DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint,
+                    _ => proto::diagnostic::Severity::None,
+                } as i32,
+            })
+            .collect(),
+    }
+}
+
+pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
+    Ok(
+        match message
+            .variant
+            .ok_or_else(|| anyhow!("missing operation variant"))?
+        {
+            proto::operation::Variant::Edit(edit) => {
+                Operation::Buffer(buffer::Operation::Edit(deserialize_edit_operation(edit)))
+            }
+            proto::operation::Variant::Undo(undo) => Operation::Buffer(buffer::Operation::Undo {
+                lamport_timestamp: clock::Lamport {
+                    replica_id: undo.replica_id as ReplicaId,
+                    value: undo.lamport_timestamp,
+                },
+                undo: UndoOperation {
+                    id: clock::Local {
+                        replica_id: undo.replica_id as ReplicaId,
+                        value: undo.local_timestamp,
+                    },
+                    counts: undo
+                        .counts
+                        .into_iter()
+                        .map(|c| {
+                            (
+                                clock::Local {
+                                    replica_id: c.replica_id as ReplicaId,
+                                    value: c.local_timestamp,
+                                },
+                                c.count,
+                            )
+                        })
+                        .collect(),
+                    ranges: undo
+                        .ranges
+                        .into_iter()
+                        .map(|r| FullOffset(r.start as usize)..FullOffset(r.end as usize))
+                        .collect(),
+                    version: undo.version.into(),
+                },
+            }),
+            proto::operation::Variant::UpdateSelections(message) => {
+                let version = message.version.into();
+                let entries = message
+                    .selections
+                    .iter()
+                    .map(|selection| {
+                        let range = (FullOffset(selection.start as usize), Bias::Left)
+                            ..(FullOffset(selection.end as usize), Bias::Right);
+                        let state = SelectionState {
+                            id: selection.id as usize,
+                            reversed: selection.reversed,
+                            goal: SelectionGoal::None,
+                        };
+                        (range, state)
+                    })
+                    .collect();
+                let selections = AnchorRangeMap::from_full_offset_ranges(version, entries);
+
+                Operation::Buffer(buffer::Operation::UpdateSelections {
+                    set_id: clock::Lamport {
+                        replica_id: message.replica_id as ReplicaId,
+                        value: message.local_timestamp,
+                    },
+                    lamport_timestamp: clock::Lamport {
+                        replica_id: message.replica_id as ReplicaId,
+                        value: message.lamport_timestamp,
+                    },
+                    selections: Arc::from(selections),
+                })
+            }
+            proto::operation::Variant::RemoveSelections(message) => {
+                Operation::Buffer(buffer::Operation::RemoveSelections {
+                    set_id: clock::Lamport {
+                        replica_id: message.replica_id as ReplicaId,
+                        value: message.local_timestamp,
+                    },
+                    lamport_timestamp: clock::Lamport {
+                        replica_id: message.replica_id as ReplicaId,
+                        value: message.lamport_timestamp,
+                    },
+                })
+            }
+            proto::operation::Variant::SetActiveSelections(message) => {
+                Operation::Buffer(buffer::Operation::SetActiveSelections {
+                    set_id: message.local_timestamp.map(|value| clock::Lamport {
+                        replica_id: message.replica_id as ReplicaId,
+                        value,
+                    }),
+                    lamport_timestamp: clock::Lamport {
+                        replica_id: message.replica_id as ReplicaId,
+                        value: message.lamport_timestamp,
+                    },
+                })
+            }
+            proto::operation::Variant::UpdateDiagnostics(message) => {
+                Operation::UpdateDiagnostics(deserialize_diagnostics(message))
+            }
+        },
+    )
+}
+
+pub fn deserialize_edit_operation(edit: proto::operation::Edit) -> EditOperation {
+    let ranges = edit
+        .ranges
+        .into_iter()
+        .map(|range| FullOffset(range.start as usize)..FullOffset(range.end as usize))
+        .collect();
+    EditOperation {
+        timestamp: InsertionTimestamp {
+            replica_id: edit.replica_id as ReplicaId,
+            local: edit.local_timestamp,
+            lamport: edit.lamport_timestamp,
+        },
+        version: edit.version.into(),
+        ranges,
+        new_text: edit.new_text,
+    }
+}
+
+pub fn deserialize_selection_set(set: proto::SelectionSet) -> SelectionSet {
+    SelectionSet {
+        id: clock::Lamport {
+            replica_id: set.replica_id as u16,
+            value: set.lamport_timestamp,
+        },
+        active: set.is_active,
+        selections: Arc::new(AnchorRangeMap::from_full_offset_ranges(
+            set.version.into(),
+            set.selections
+                .into_iter()
+                .map(|selection| {
+                    let range = (FullOffset(selection.start as usize), Bias::Left)
+                        ..(FullOffset(selection.end as usize), Bias::Right);
+                    let state = SelectionState {
+                        id: selection.id as usize,
+                        reversed: selection.reversed,
+                        goal: SelectionGoal::None,
+                    };
+                    (range, state)
+                })
+                .collect(),
+        )),
+    }
+}
+
+pub fn deserialize_diagnostics(message: proto::DiagnosticSet) -> AnchorRangeMultimap<Diagnostic> {
+    AnchorRangeMultimap::from_full_offset_ranges(
+        message.version.into(),
+        Bias::Left,
+        Bias::Right,
+        message.diagnostics.into_iter().filter_map(|diagnostic| {
+            Some((
+                FullOffset(diagnostic.start as usize)..FullOffset(diagnostic.end as usize),
+                Diagnostic {
+                    severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)? {
+                        proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR,
+                        proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING,
+                        proto::diagnostic::Severity::Information => DiagnosticSeverity::INFORMATION,
+                        proto::diagnostic::Severity::Hint => DiagnosticSeverity::HINT,
+                        proto::diagnostic::Severity::None => return None,
+                    },
+                    message: diagnostic.message,
+                },
+            ))
+        }),
+    )
+}

crates/language/src/tests.rs 🔗

@@ -1,6 +1,6 @@
 use super::*;
 use gpui::{ModelHandle, MutableAppContext};
-use std::rc::Rc;
+use std::{iter::FromIterator, rc::Rc};
 use unindent::Unindent as _;
 
 #[gpui::test]
@@ -78,9 +78,9 @@ async fn test_apply_diff(mut cx: gpui::TestAppContext) {
 
 #[gpui::test]
 async fn test_reparse(mut cx: gpui::TestAppContext) {
+    let text = "fn a() {}";
     let buffer = cx.add_model(|cx| {
-        let text = "fn a() {}".into();
-        Buffer::from_history(0, History::new(text), None, Some(rust_lang()), cx)
+        Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx)
     });
 
     // Wait for the initial text to parse
@@ -222,9 +222,8 @@ fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) {
                 }
             }
         "
-        .unindent()
-        .into();
-        Buffer::from_history(0, History::new(text), None, Some(rust_lang()), cx)
+        .unindent();
+        Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx)
     });
     let buffer = buffer.read(cx);
     assert_eq!(
@@ -253,8 +252,9 @@ fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) {
 #[gpui::test]
 fn test_edit_with_autoindent(cx: &mut MutableAppContext) {
     cx.add_model(|cx| {
-        let text = "fn a() {}".into();
-        let mut buffer = Buffer::from_history(0, History::new(text), None, Some(rust_lang()), cx);
+        let text = "fn a() {}";
+        let mut buffer =
+            Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx);
 
         buffer.edit_with_autoindent([8..8], "\n\n", cx);
         assert_eq!(buffer.text(), "fn a() {\n    \n}");
@@ -272,8 +272,10 @@ fn test_edit_with_autoindent(cx: &mut MutableAppContext) {
 #[gpui::test]
 fn test_autoindent_moves_selections(cx: &mut MutableAppContext) {
     cx.add_model(|cx| {
-        let text = History::new("fn a() {}".into());
-        let mut buffer = Buffer::from_history(0, text, None, Some(rust_lang()), cx);
+        let text = "fn a() {}";
+
+        let mut buffer =
+            Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx);
 
         let selection_set_id = buffer.add_selection_set::<usize>(&[], cx);
         buffer.start_transaction(Some(selection_set_id)).unwrap();
@@ -329,9 +331,10 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
             d;
             }
         "
-        .unindent()
-        .into();
-        let mut buffer = Buffer::from_history(0, History::new(text), None, Some(rust_lang()), cx);
+        .unindent();
+
+        let mut buffer =
+            Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx);
 
         // Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
         // their indentation is not adjusted.
@@ -375,14 +378,13 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
 #[gpui::test]
 fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) {
     cx.add_model(|cx| {
-        let text = History::new(
-            "
-                fn a() {}
-            "
-            .unindent()
-            .into(),
-        );
-        let mut buffer = Buffer::from_history(0, text, None, Some(rust_lang()), cx);
+        let text = "
+            fn a() {}
+        "
+        .unindent();
+
+        let mut buffer =
+            Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx);
 
         buffer.edit_with_autoindent([5..5], "\nb", cx);
         assert_eq!(
@@ -410,6 +412,247 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppConte
     });
 }
 
+#[gpui::test]
+async fn test_diagnostics(mut cx: gpui::TestAppContext) {
+    let (language_server, mut fake) = lsp::LanguageServer::fake(cx.background()).await;
+    let mut rust_lang = rust_lang();
+    rust_lang.config.language_server = Some(LanguageServerConfig {
+        disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]),
+        ..Default::default()
+    });
+
+    let text = "
+        fn a() { A }
+        fn b() { BB }
+        fn c() { CCC }
+    "
+    .unindent();
+
+    let buffer = cx.add_model(|cx| {
+        Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang)), Some(language_server), cx)
+    });
+
+    let open_notification = fake
+        .receive_notification::<lsp::notification::DidOpenTextDocument>()
+        .await;
+
+    // Edit the buffer, moving the content down
+    buffer.update(&mut cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
+    let change_notification_1 = fake
+        .receive_notification::<lsp::notification::DidChangeTextDocument>()
+        .await;
+    assert!(change_notification_1.text_document.version > open_notification.text_document.version);
+
+    buffer.update(&mut cx, |buffer, cx| {
+        // Receive diagnostics for an earlier version of the buffer.
+        buffer
+            .update_diagnostics(
+                Some(open_notification.text_document.version),
+                vec![
+                    lsp::Diagnostic {
+                        range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
+                        severity: Some(lsp::DiagnosticSeverity::ERROR),
+                        message: "undefined variable 'A'".to_string(),
+                        ..Default::default()
+                    },
+                    lsp::Diagnostic {
+                        range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
+                        severity: Some(lsp::DiagnosticSeverity::ERROR),
+                        message: "undefined variable 'BB'".to_string(),
+                        ..Default::default()
+                    },
+                    lsp::Diagnostic {
+                        range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
+                        severity: Some(lsp::DiagnosticSeverity::ERROR),
+                        message: "undefined variable 'CCC'".to_string(),
+                        ..Default::default()
+                    },
+                ],
+                cx,
+            )
+            .unwrap();
+
+        // The diagnostics have moved down since they were created.
+        assert_eq!(
+            buffer
+                .diagnostics_in_range(Point::new(3, 0)..Point::new(5, 0))
+                .collect::<Vec<_>>(),
+            &[
+                (
+                    Point::new(3, 9)..Point::new(3, 11),
+                    &Diagnostic {
+                        severity: DiagnosticSeverity::ERROR,
+                        message: "undefined variable 'BB'".to_string()
+                    },
+                ),
+                (
+                    Point::new(4, 9)..Point::new(4, 12),
+                    &Diagnostic {
+                        severity: DiagnosticSeverity::ERROR,
+                        message: "undefined variable 'CCC'".to_string()
+                    }
+                )
+            ]
+        );
+        assert_eq!(
+            chunks_with_diagnostics(buffer, 0..buffer.len()),
+            [
+                ("\n\nfn a() { ".to_string(), None),
+                ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
+                (" }\nfn b() { ".to_string(), None),
+                ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
+                (" }\nfn c() { ".to_string(), None),
+                ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
+                (" }\n".to_string(), None),
+            ]
+        );
+        assert_eq!(
+            chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
+            [
+                ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
+                (" }\nfn c() { ".to_string(), None),
+                ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
+            ]
+        );
+
+        // Ensure overlapping diagnostics are highlighted correctly.
+        buffer
+            .update_diagnostics(
+                Some(open_notification.text_document.version),
+                vec![
+                    lsp::Diagnostic {
+                        range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
+                        severity: Some(lsp::DiagnosticSeverity::ERROR),
+                        message: "undefined variable 'A'".to_string(),
+                        ..Default::default()
+                    },
+                    lsp::Diagnostic {
+                        range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
+                        severity: Some(lsp::DiagnosticSeverity::WARNING),
+                        message: "unreachable statement".to_string(),
+                        ..Default::default()
+                    },
+                ],
+                cx,
+            )
+            .unwrap();
+        assert_eq!(
+            buffer
+                .diagnostics_in_range(Point::new(2, 0)..Point::new(3, 0))
+                .collect::<Vec<_>>(),
+            &[
+                (
+                    Point::new(2, 9)..Point::new(2, 12),
+                    &Diagnostic {
+                        severity: DiagnosticSeverity::WARNING,
+                        message: "unreachable statement".to_string()
+                    }
+                ),
+                (
+                    Point::new(2, 9)..Point::new(2, 10),
+                    &Diagnostic {
+                        severity: DiagnosticSeverity::ERROR,
+                        message: "undefined variable 'A'".to_string()
+                    },
+                )
+            ]
+        );
+        assert_eq!(
+            chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
+            [
+                ("fn a() { ".to_string(), None),
+                ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
+                (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
+                ("\n".to_string(), None),
+            ]
+        );
+        assert_eq!(
+            chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
+            [
+                (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
+                ("\n".to_string(), None),
+            ]
+        );
+    });
+
+    // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
+    // changes since the last save.
+    buffer.update(&mut cx, |buffer, cx| {
+        buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), "    ", cx);
+        buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
+    });
+    let change_notification_2 = fake
+        .receive_notification::<lsp::notification::DidChangeTextDocument>()
+        .await;
+    assert!(
+        change_notification_2.text_document.version > change_notification_1.text_document.version
+    );
+
+    buffer.update(&mut cx, |buffer, cx| {
+        buffer
+            .update_diagnostics(
+                Some(change_notification_2.text_document.version),
+                vec![
+                    lsp::Diagnostic {
+                        range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
+                        severity: Some(lsp::DiagnosticSeverity::ERROR),
+                        message: "undefined variable 'BB'".to_string(),
+                        source: Some("disk".to_string()),
+                        ..Default::default()
+                    },
+                    lsp::Diagnostic {
+                        range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
+                        severity: Some(lsp::DiagnosticSeverity::ERROR),
+                        message: "undefined variable 'A'".to_string(),
+                        source: Some("disk".to_string()),
+                        ..Default::default()
+                    },
+                ],
+                cx,
+            )
+            .unwrap();
+        assert_eq!(
+            buffer
+                .diagnostics_in_range(0..buffer.len())
+                .collect::<Vec<_>>(),
+            &[
+                (
+                    Point::new(2, 21)..Point::new(2, 22),
+                    &Diagnostic {
+                        severity: DiagnosticSeverity::ERROR,
+                        message: "undefined variable 'A'".to_string()
+                    }
+                ),
+                (
+                    Point::new(3, 9)..Point::new(3, 11),
+                    &Diagnostic {
+                        severity: DiagnosticSeverity::ERROR,
+                        message: "undefined variable 'BB'".to_string()
+                    },
+                )
+            ]
+        );
+    });
+
+    fn chunks_with_diagnostics<T: ToOffset>(
+        buffer: &Buffer,
+        range: Range<T>,
+    ) -> Vec<(String, Option<DiagnosticSeverity>)> {
+        let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
+        for chunk in buffer.snapshot().highlighted_text_for_range(range) {
+            if chunks
+                .last()
+                .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
+            {
+                chunks.last_mut().unwrap().0.push_str(chunk.text);
+            } else {
+                chunks.push((chunk.text.to_string(), chunk.diagnostic));
+            }
+        }
+        chunks
+    }
+}
+
 #[test]
 fn test_contiguous_ranges() {
     assert_eq!(
@@ -437,28 +680,27 @@ impl Buffer {
     }
 }
 
-fn rust_lang() -> Arc<Language> {
-    Arc::new(
-        Language::new(
-            LanguageConfig {
-                name: "Rust".to_string(),
-                path_suffixes: vec!["rs".to_string()],
-                ..Default::default()
-            },
-            tree_sitter_rust::language(),
-        )
-        .with_indents_query(
-            r#"
+fn rust_lang() -> Language {
+    Language::new(
+        LanguageConfig {
+            name: "Rust".to_string(),
+            path_suffixes: vec!["rs".to_string()],
+            language_server: None,
+            ..Default::default()
+        },
+        tree_sitter_rust::language(),
+    )
+    .with_indents_query(
+        r#"
                 (call_expression) @indent
                 (field_expression) @indent
                 (_ "(" ")" @end) @indent
                 (_ "{" "}" @end) @indent
             "#,
-        )
-        .unwrap()
-        .with_brackets_query(r#" ("{" @open "}" @close) "#)
-        .unwrap(),
     )
+    .unwrap()
+    .with_brackets_query(r#" ("{" @open "}" @close) "#)
+    .unwrap()
 }
 
 fn empty(point: Point) -> Range<Point> {

crates/lsp/Cargo.toml 🔗

@@ -0,0 +1,28 @@
+[package]
+name = "lsp"
+version = "0.1.0"
+edition = "2018"
+
+[features]
+test-support = ["async-pipe"]
+
+[dependencies]
+gpui = { path = "../gpui" }
+util = { path = "../util" }
+anyhow = "1.0"
+async-pipe = { git = "https://github.com/routerify/async-pipe-rs", rev = "feeb77e83142a9ff837d0767652ae41bfc5d8e47", optional = true }
+futures = "0.3"
+log = "0.4"
+lsp-types = "0.91"
+parking_lot = "0.11"
+postage = { version = "0.4.1", features = ["futures-traits"] }
+serde = { version = "1.0", features = ["derive"] }
+serde_json = { version = "1.0", features = ["raw_value"] }
+smol = "1.2"
+
+[dev-dependencies]
+gpui = { path = "../gpui", features = ["test-support"] }
+util = { path = "../util", features = ["test-support"] }
+async-pipe = { git = "https://github.com/routerify/async-pipe-rs", rev = "feeb77e83142a9ff837d0767652ae41bfc5d8e47" }
+simplelog = "0.9"
+unindent = "0.1.7"

crates/lsp/src/lib.rs 🔗

@@ -0,0 +1,710 @@
+use anyhow::{anyhow, Context, Result};
+use futures::{io::BufWriter, AsyncRead, AsyncWrite};
+use gpui::{executor, Task};
+use parking_lot::{Mutex, RwLock};
+use postage::{barrier, oneshot, prelude::Stream, sink::Sink};
+use serde::{Deserialize, Serialize};
+use serde_json::{json, value::RawValue, Value};
+use smol::{
+    channel,
+    io::{AsyncBufReadExt, AsyncReadExt, AsyncWriteExt, BufReader},
+    process::Command,
+};
+use std::{
+    collections::HashMap,
+    future::Future,
+    io::Write,
+    str::FromStr,
+    sync::{
+        atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
+        Arc,
+    },
+};
+use std::{path::Path, process::Stdio};
+use util::TryFutureExt;
+
+pub use lsp_types::*;
+
+const JSON_RPC_VERSION: &'static str = "2.0";
+const CONTENT_LEN_HEADER: &'static str = "Content-Length: ";
+
+type NotificationHandler = Box<dyn Send + Sync + Fn(&str)>;
+type ResponseHandler = Box<dyn Send + FnOnce(Result<&str, Error>)>;
+
+pub struct LanguageServer {
+    next_id: AtomicUsize,
+    outbound_tx: RwLock<Option<channel::Sender<Vec<u8>>>>,
+    notification_handlers: Arc<RwLock<HashMap<&'static str, NotificationHandler>>>,
+    response_handlers: Arc<Mutex<HashMap<usize, ResponseHandler>>>,
+    executor: Arc<executor::Background>,
+    io_tasks: Mutex<Option<(Task<Option<()>>, Task<Option<()>>)>>,
+    initialized: barrier::Receiver,
+    output_done_rx: Mutex<Option<barrier::Receiver>>,
+}
+
+pub struct Subscription {
+    method: &'static str,
+    notification_handlers: Arc<RwLock<HashMap<&'static str, NotificationHandler>>>,
+}
+
+#[derive(Serialize, Deserialize)]
+struct Request<'a, T> {
+    jsonrpc: &'a str,
+    id: usize,
+    method: &'a str,
+    params: T,
+}
+
+#[derive(Serialize, Deserialize)]
+struct AnyResponse<'a> {
+    id: usize,
+    #[serde(default)]
+    error: Option<Error>,
+    #[serde(borrow)]
+    result: Option<&'a RawValue>,
+}
+
+#[derive(Serialize, Deserialize)]
+struct Notification<'a, T> {
+    #[serde(borrow)]
+    jsonrpc: &'a str,
+    #[serde(borrow)]
+    method: &'a str,
+    params: T,
+}
+
+#[derive(Deserialize)]
+struct AnyNotification<'a> {
+    #[serde(borrow)]
+    method: &'a str,
+    #[serde(borrow)]
+    params: &'a RawValue,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+struct Error {
+    message: String,
+}
+
+impl LanguageServer {
+    pub fn new(
+        binary_path: &Path,
+        root_path: &Path,
+        background: Arc<executor::Background>,
+    ) -> Result<Arc<Self>> {
+        let mut server = Command::new(binary_path)
+            .stdin(Stdio::piped())
+            .stdout(Stdio::piped())
+            .stderr(Stdio::inherit())
+            .spawn()?;
+        let stdin = server.stdin.take().unwrap();
+        let stdout = server.stdout.take().unwrap();
+        Self::new_internal(stdin, stdout, root_path, background)
+    }
+
+    fn new_internal<Stdin, Stdout>(
+        stdin: Stdin,
+        stdout: Stdout,
+        root_path: &Path,
+        executor: Arc<executor::Background>,
+    ) -> Result<Arc<Self>>
+    where
+        Stdin: AsyncWrite + Unpin + Send + 'static,
+        Stdout: AsyncRead + Unpin + Send + 'static,
+    {
+        let mut stdin = BufWriter::new(stdin);
+        let mut stdout = BufReader::new(stdout);
+        let (outbound_tx, outbound_rx) = channel::unbounded::<Vec<u8>>();
+        let notification_handlers = Arc::new(RwLock::new(HashMap::<_, NotificationHandler>::new()));
+        let response_handlers = Arc::new(Mutex::new(HashMap::<_, ResponseHandler>::new()));
+        let input_task = executor.spawn(
+            {
+                let notification_handlers = notification_handlers.clone();
+                let response_handlers = response_handlers.clone();
+                async move {
+                    let mut buffer = Vec::new();
+                    loop {
+                        buffer.clear();
+                        stdout.read_until(b'\n', &mut buffer).await?;
+                        stdout.read_until(b'\n', &mut buffer).await?;
+                        let message_len: usize = std::str::from_utf8(&buffer)?
+                            .strip_prefix(CONTENT_LEN_HEADER)
+                            .ok_or_else(|| anyhow!("invalid header"))?
+                            .trim_end()
+                            .parse()?;
+
+                        buffer.resize(message_len, 0);
+                        stdout.read_exact(&mut buffer).await?;
+
+                        if let Ok(AnyNotification { method, params }) =
+                            serde_json::from_slice(&buffer)
+                        {
+                            if let Some(handler) = notification_handlers.read().get(method) {
+                                handler(params.get());
+                            } else {
+                                log::info!(
+                                    "unhandled notification {}:\n{}",
+                                    method,
+                                    serde_json::to_string_pretty(
+                                        &Value::from_str(params.get()).unwrap()
+                                    )
+                                    .unwrap()
+                                );
+                            }
+                        } else if let Ok(AnyResponse { id, error, result }) =
+                            serde_json::from_slice(&buffer)
+                        {
+                            if let Some(handler) = response_handlers.lock().remove(&id) {
+                                if let Some(error) = error {
+                                    handler(Err(error));
+                                } else if let Some(result) = result {
+                                    handler(Ok(result.get()));
+                                } else {
+                                    handler(Ok("null"));
+                                }
+                            }
+                        } else {
+                            return Err(anyhow!(
+                                "failed to deserialize message:\n{}",
+                                std::str::from_utf8(&buffer)?
+                            ));
+                        }
+                    }
+                }
+            }
+            .log_err(),
+        );
+        let (output_done_tx, output_done_rx) = barrier::channel();
+        let output_task = executor.spawn(
+            async move {
+                let mut content_len_buffer = Vec::new();
+                while let Ok(message) = outbound_rx.recv().await {
+                    content_len_buffer.clear();
+                    write!(content_len_buffer, "{}", message.len()).unwrap();
+                    stdin.write_all(CONTENT_LEN_HEADER.as_bytes()).await?;
+                    stdin.write_all(&content_len_buffer).await?;
+                    stdin.write_all("\r\n\r\n".as_bytes()).await?;
+                    stdin.write_all(&message).await?;
+                    stdin.flush().await?;
+                }
+                drop(output_done_tx);
+                Ok(())
+            }
+            .log_err(),
+        );
+
+        let (initialized_tx, initialized_rx) = barrier::channel();
+        let this = Arc::new(Self {
+            notification_handlers,
+            response_handlers,
+            next_id: Default::default(),
+            outbound_tx: RwLock::new(Some(outbound_tx)),
+            executor: executor.clone(),
+            io_tasks: Mutex::new(Some((input_task, output_task))),
+            initialized: initialized_rx,
+            output_done_rx: Mutex::new(Some(output_done_rx)),
+        });
+
+        let root_uri =
+            lsp_types::Url::from_file_path(root_path).map_err(|_| anyhow!("invalid root path"))?;
+        executor
+            .spawn({
+                let this = this.clone();
+                async move {
+                    this.init(root_uri).log_err().await;
+                    drop(initialized_tx);
+                }
+            })
+            .detach();
+
+        Ok(this)
+    }
+
+    async fn init(self: Arc<Self>, root_uri: lsp_types::Url) -> Result<()> {
+        #[allow(deprecated)]
+        let params = lsp_types::InitializeParams {
+            process_id: Default::default(),
+            root_path: Default::default(),
+            root_uri: Some(root_uri),
+            initialization_options: Default::default(),
+            capabilities: lsp_types::ClientCapabilities {
+                experimental: Some(json!({
+                    "serverStatusNotification": true,
+                })),
+                ..Default::default()
+            },
+            trace: Default::default(),
+            workspace_folders: Default::default(),
+            client_info: Default::default(),
+            locale: Default::default(),
+        };
+
+        let this = self.clone();
+        let request = Self::request_internal::<lsp_types::request::Initialize>(
+            &this.next_id,
+            &this.response_handlers,
+            this.outbound_tx.read().as_ref(),
+            params,
+        );
+        request.await?;
+        Self::notify_internal::<lsp_types::notification::Initialized>(
+            this.outbound_tx.read().as_ref(),
+            lsp_types::InitializedParams {},
+        )?;
+        Ok(())
+    }
+
+    pub fn shutdown(&self) -> Option<impl 'static + Send + Future<Output = Result<()>>> {
+        if let Some(tasks) = self.io_tasks.lock().take() {
+            let response_handlers = self.response_handlers.clone();
+            let outbound_tx = self.outbound_tx.write().take();
+            let next_id = AtomicUsize::new(self.next_id.load(SeqCst));
+            let mut output_done = self.output_done_rx.lock().take().unwrap();
+            Some(async move {
+                Self::request_internal::<lsp_types::request::Shutdown>(
+                    &next_id,
+                    &response_handlers,
+                    outbound_tx.as_ref(),
+                    (),
+                )
+                .await?;
+                Self::notify_internal::<lsp_types::notification::Exit>(outbound_tx.as_ref(), ())?;
+                drop(outbound_tx);
+                output_done.recv().await;
+                drop(tasks);
+                Ok(())
+            })
+        } else {
+            None
+        }
+    }
+
+    pub fn on_notification<T, F>(&self, f: F) -> Subscription
+    where
+        T: lsp_types::notification::Notification,
+        F: 'static + Send + Sync + Fn(T::Params),
+    {
+        let prev_handler = self.notification_handlers.write().insert(
+            T::METHOD,
+            Box::new(
+                move |notification| match serde_json::from_str(notification) {
+                    Ok(notification) => f(notification),
+                    Err(err) => log::error!("error parsing notification {}: {}", T::METHOD, err),
+                },
+            ),
+        );
+
+        assert!(
+            prev_handler.is_none(),
+            "registered multiple handlers for the same notification"
+        );
+
+        Subscription {
+            method: T::METHOD,
+            notification_handlers: self.notification_handlers.clone(),
+        }
+    }
+
+    pub fn request<T: lsp_types::request::Request>(
+        self: Arc<Self>,
+        params: T::Params,
+    ) -> impl Future<Output = Result<T::Result>>
+    where
+        T::Result: 'static + Send,
+    {
+        let this = self.clone();
+        async move {
+            this.initialized.clone().recv().await;
+            Self::request_internal::<T>(
+                &this.next_id,
+                &this.response_handlers,
+                this.outbound_tx.read().as_ref(),
+                params,
+            )
+            .await
+        }
+    }
+
+    fn request_internal<T: lsp_types::request::Request>(
+        next_id: &AtomicUsize,
+        response_handlers: &Mutex<HashMap<usize, ResponseHandler>>,
+        outbound_tx: Option<&channel::Sender<Vec<u8>>>,
+        params: T::Params,
+    ) -> impl 'static + Future<Output = Result<T::Result>>
+    where
+        T::Result: 'static + Send,
+    {
+        let id = next_id.fetch_add(1, SeqCst);
+        let message = serde_json::to_vec(&Request {
+            jsonrpc: JSON_RPC_VERSION,
+            id,
+            method: T::METHOD,
+            params,
+        })
+        .unwrap();
+        let mut response_handlers = response_handlers.lock();
+        let (mut tx, mut rx) = oneshot::channel();
+        response_handlers.insert(
+            id,
+            Box::new(move |result| {
+                let response = match result {
+                    Ok(response) => {
+                        serde_json::from_str(response).context("failed to deserialize response")
+                    }
+                    Err(error) => Err(anyhow!("{}", error.message)),
+                };
+                let _ = tx.try_send(response);
+            }),
+        );
+
+        let send = outbound_tx
+            .as_ref()
+            .ok_or_else(|| {
+                anyhow!("tried to send a request to a language server that has been shut down")
+            })
+            .and_then(|outbound_tx| {
+                outbound_tx.try_send(message)?;
+                Ok(())
+            });
+        async move {
+            send?;
+            rx.recv().await.unwrap()
+        }
+    }
+
+    pub fn notify<T: lsp_types::notification::Notification>(
+        self: &Arc<Self>,
+        params: T::Params,
+    ) -> impl Future<Output = Result<()>> {
+        let this = self.clone();
+        async move {
+            this.initialized.clone().recv().await;
+            Self::notify_internal::<T>(this.outbound_tx.read().as_ref(), params)?;
+            Ok(())
+        }
+    }
+
+    fn notify_internal<T: lsp_types::notification::Notification>(
+        outbound_tx: Option<&channel::Sender<Vec<u8>>>,
+        params: T::Params,
+    ) -> Result<()> {
+        let message = serde_json::to_vec(&Notification {
+            jsonrpc: JSON_RPC_VERSION,
+            method: T::METHOD,
+            params,
+        })
+        .unwrap();
+        let outbound_tx = outbound_tx
+            .as_ref()
+            .ok_or_else(|| anyhow!("tried to notify a language server that has been shut down"))?;
+        outbound_tx.try_send(message)?;
+        Ok(())
+    }
+}
+
+impl Drop for LanguageServer {
+    fn drop(&mut self) {
+        if let Some(shutdown) = self.shutdown() {
+            self.executor.spawn(shutdown).detach();
+        }
+    }
+}
+
+impl Subscription {
+    pub fn detach(mut self) {
+        self.method = "";
+    }
+}
+
+impl Drop for Subscription {
+    fn drop(&mut self) {
+        self.notification_handlers.write().remove(self.method);
+    }
+}
+
+#[cfg(any(test, feature = "test-support"))]
+pub struct FakeLanguageServer {
+    buffer: Vec<u8>,
+    stdin: smol::io::BufReader<async_pipe::PipeReader>,
+    stdout: smol::io::BufWriter<async_pipe::PipeWriter>,
+    pub started: Arc<AtomicBool>,
+}
+
+#[cfg(any(test, feature = "test-support"))]
+pub struct RequestId<T> {
+    id: usize,
+    _type: std::marker::PhantomData<T>,
+}
+
+#[cfg(any(test, feature = "test-support"))]
+impl LanguageServer {
+    pub async fn fake(executor: Arc<executor::Background>) -> (Arc<Self>, FakeLanguageServer) {
+        let stdin = async_pipe::pipe();
+        let stdout = async_pipe::pipe();
+        let mut fake = FakeLanguageServer {
+            stdin: smol::io::BufReader::new(stdin.1),
+            stdout: smol::io::BufWriter::new(stdout.0),
+            buffer: Vec::new(),
+            started: Arc::new(AtomicBool::new(true)),
+        };
+
+        let server = Self::new_internal(stdin.0, stdout.1, Path::new("/"), executor).unwrap();
+
+        let (init_id, _) = fake.receive_request::<request::Initialize>().await;
+        fake.respond(init_id, InitializeResult::default()).await;
+        fake.receive_notification::<notification::Initialized>()
+            .await;
+
+        (server, fake)
+    }
+}
+
+#[cfg(any(test, feature = "test-support"))]
+impl FakeLanguageServer {
+    pub async fn notify<T: notification::Notification>(&mut self, params: T::Params) {
+        if !self.started.load(std::sync::atomic::Ordering::SeqCst) {
+            panic!("can't simulate an LSP notification before the server has been started");
+        }
+        let message = serde_json::to_vec(&Notification {
+            jsonrpc: JSON_RPC_VERSION,
+            method: T::METHOD,
+            params,
+        })
+        .unwrap();
+        self.send(message).await;
+    }
+
+    pub async fn respond<'a, T: request::Request>(
+        &mut self,
+        request_id: RequestId<T>,
+        result: T::Result,
+    ) {
+        let result = serde_json::to_string(&result).unwrap();
+        let message = serde_json::to_vec(&AnyResponse {
+            id: request_id.id,
+            error: None,
+            result: Some(&RawValue::from_string(result).unwrap()),
+        })
+        .unwrap();
+        self.send(message).await;
+    }
+
+    pub async fn receive_request<T: request::Request>(&mut self) -> (RequestId<T>, T::Params) {
+        self.receive().await;
+        let request = serde_json::from_slice::<Request<T::Params>>(&self.buffer).unwrap();
+        assert_eq!(request.method, T::METHOD);
+        assert_eq!(request.jsonrpc, JSON_RPC_VERSION);
+        (
+            RequestId {
+                id: request.id,
+                _type: std::marker::PhantomData,
+            },
+            request.params,
+        )
+    }
+
+    pub async fn receive_notification<T: notification::Notification>(&mut self) -> T::Params {
+        self.receive().await;
+        let notification = serde_json::from_slice::<Notification<T::Params>>(&self.buffer).unwrap();
+        assert_eq!(notification.method, T::METHOD);
+        notification.params
+    }
+
+    async fn send(&mut self, message: Vec<u8>) {
+        self.stdout
+            .write_all(CONTENT_LEN_HEADER.as_bytes())
+            .await
+            .unwrap();
+        self.stdout
+            .write_all((format!("{}", message.len())).as_bytes())
+            .await
+            .unwrap();
+        self.stdout.write_all("\r\n\r\n".as_bytes()).await.unwrap();
+        self.stdout.write_all(&message).await.unwrap();
+        self.stdout.flush().await.unwrap();
+    }
+
+    async fn receive(&mut self) {
+        self.buffer.clear();
+        self.stdin
+            .read_until(b'\n', &mut self.buffer)
+            .await
+            .unwrap();
+        self.stdin
+            .read_until(b'\n', &mut self.buffer)
+            .await
+            .unwrap();
+        let message_len: usize = std::str::from_utf8(&self.buffer)
+            .unwrap()
+            .strip_prefix(CONTENT_LEN_HEADER)
+            .unwrap()
+            .trim_end()
+            .parse()
+            .unwrap();
+        self.buffer.resize(message_len, 0);
+        self.stdin.read_exact(&mut self.buffer).await.unwrap();
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use gpui::TestAppContext;
+    use simplelog::SimpleLogger;
+    use unindent::Unindent;
+    use util::test::temp_tree;
+
+    #[gpui::test]
+    async fn test_basic(cx: TestAppContext) {
+        let lib_source = r#"
+            fn fun() {
+                let hello = "world";
+            }
+        "#
+        .unindent();
+        let root_dir = temp_tree(json!({
+            "Cargo.toml": r#"
+                [package]
+                name = "temp"
+                version = "0.1.0"
+                edition = "2018"
+            "#.unindent(),
+            "src": {
+                "lib.rs": &lib_source
+            }
+        }));
+        let lib_file_uri =
+            lsp_types::Url::from_file_path(root_dir.path().join("src/lib.rs")).unwrap();
+
+        let server = cx.read(|cx| {
+            LanguageServer::new(
+                Path::new("rust-analyzer"),
+                root_dir.path(),
+                cx.background().clone(),
+            )
+            .unwrap()
+        });
+        server.next_idle_notification().await;
+
+        server
+            .notify::<lsp_types::notification::DidOpenTextDocument>(
+                lsp_types::DidOpenTextDocumentParams {
+                    text_document: lsp_types::TextDocumentItem::new(
+                        lib_file_uri.clone(),
+                        "rust".to_string(),
+                        0,
+                        lib_source,
+                    ),
+                },
+            )
+            .await
+            .unwrap();
+
+        let hover = server
+            .request::<lsp_types::request::HoverRequest>(lsp_types::HoverParams {
+                text_document_position_params: lsp_types::TextDocumentPositionParams {
+                    text_document: lsp_types::TextDocumentIdentifier::new(lib_file_uri),
+                    position: lsp_types::Position::new(1, 21),
+                },
+                work_done_progress_params: Default::default(),
+            })
+            .await
+            .unwrap()
+            .unwrap();
+        assert_eq!(
+            hover.contents,
+            lsp_types::HoverContents::Markup(lsp_types::MarkupContent {
+                kind: lsp_types::MarkupKind::Markdown,
+                value: "&str".to_string()
+            })
+        );
+    }
+
+    #[gpui::test]
+    async fn test_fake(cx: TestAppContext) {
+        SimpleLogger::init(log::LevelFilter::Info, Default::default()).unwrap();
+
+        let (server, mut fake) = LanguageServer::fake(cx.background()).await;
+
+        let (message_tx, message_rx) = channel::unbounded();
+        let (diagnostics_tx, diagnostics_rx) = channel::unbounded();
+        server
+            .on_notification::<notification::ShowMessage, _>(move |params| {
+                message_tx.try_send(params).unwrap()
+            })
+            .detach();
+        server
+            .on_notification::<notification::PublishDiagnostics, _>(move |params| {
+                diagnostics_tx.try_send(params).unwrap()
+            })
+            .detach();
+
+        server
+            .notify::<notification::DidOpenTextDocument>(DidOpenTextDocumentParams {
+                text_document: TextDocumentItem::new(
+                    Url::from_str("file://a/b").unwrap(),
+                    "rust".to_string(),
+                    0,
+                    "".to_string(),
+                ),
+            })
+            .await
+            .unwrap();
+        assert_eq!(
+            fake.receive_notification::<notification::DidOpenTextDocument>()
+                .await
+                .text_document
+                .uri
+                .as_str(),
+            "file://a/b"
+        );
+
+        fake.notify::<notification::ShowMessage>(ShowMessageParams {
+            typ: MessageType::ERROR,
+            message: "ok".to_string(),
+        })
+        .await;
+        fake.notify::<notification::PublishDiagnostics>(PublishDiagnosticsParams {
+            uri: Url::from_str("file://b/c").unwrap(),
+            version: Some(5),
+            diagnostics: vec![],
+        })
+        .await;
+        assert_eq!(message_rx.recv().await.unwrap().message, "ok");
+        assert_eq!(
+            diagnostics_rx.recv().await.unwrap().uri.as_str(),
+            "file://b/c"
+        );
+
+        drop(server);
+        let (shutdown_request, _) = fake.receive_request::<lsp_types::request::Shutdown>().await;
+        fake.respond(shutdown_request, ()).await;
+        fake.receive_notification::<lsp_types::notification::Exit>()
+            .await;
+    }
+
+    impl LanguageServer {
+        async fn next_idle_notification(self: &Arc<Self>) {
+            let (tx, rx) = channel::unbounded();
+            let _subscription =
+                self.on_notification::<ServerStatusNotification, _>(move |params| {
+                    if params.quiescent {
+                        tx.try_send(()).unwrap();
+                    }
+                });
+            let _ = rx.recv().await;
+        }
+    }
+
+    pub enum ServerStatusNotification {}
+
+    impl lsp_types::notification::Notification for ServerStatusNotification {
+        type Params = ServerStatusParams;
+        const METHOD: &'static str = "experimental/serverStatus";
+    }
+
+    #[derive(Deserialize, Serialize, PartialEq, Eq, Clone)]
+    pub struct ServerStatusParams {
+        pub quiescent: bool,
+    }
+}

crates/project/Cargo.toml 🔗

@@ -8,16 +8,16 @@ test-support = ["language/test-support", "buffer/test-support"]
 
 [dependencies]
 buffer = { path = "../buffer" }
+client = { path = "../client" }
 clock = { path = "../clock" }
 fsevent = { path = "../fsevent" }
 fuzzy = { path = "../fuzzy" }
 gpui = { path = "../gpui" }
 language = { path = "../language" }
-client = { path = "../client" }
+lsp = { path = "../lsp" }
+rpc = { path = "../rpc" }
 sum_tree = { path = "../sum_tree" }
 util = { path = "../util" }
-rpc = { path = "../rpc" }
-
 anyhow = "1.0.38"
 async-trait = "0.1"
 futures = "0.3"
@@ -36,8 +36,10 @@ toml = "0.5"
 client = { path = "../client", features = ["test-support"] }
 gpui = { path = "../gpui", features = ["test-support"] }
 language = { path = "../language", features = ["test-support"] }
+lsp = { path = "../lsp", features = ["test-support"] }
 util = { path = "../util", features = ["test-support"] }
 rpc = { path = "../rpc", features = ["test-support"] }
-
 rand = "0.8.3"
+simplelog = "0.9"
 tempdir = { version = "0.3.7" }
+unindent = "0.1.7"

crates/project/src/worktree.rs 🔗

@@ -3,7 +3,7 @@ use super::{
     ignore::IgnoreStack,
 };
 use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
-use anyhow::{anyhow, Result};
+use anyhow::{anyhow, Context, Result};
 use client::{proto, Client, PeerId, TypedEnvelope};
 use clock::ReplicaId;
 use futures::{Stream, StreamExt};
@@ -12,13 +12,15 @@ use gpui::{
     executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext,
     Task, UpgradeModelHandle, WeakModelHandle,
 };
-use language::{Buffer, History, LanguageRegistry, Operation, Rope};
+use language::{Buffer, Language, LanguageRegistry, Operation, Rope};
 use lazy_static::lazy_static;
+use lsp::LanguageServer;
 use parking_lot::Mutex;
 use postage::{
     prelude::{Sink as _, Stream as _},
     watch,
 };
+
 use serde::Deserialize;
 use smol::channel::{self, Sender};
 use std::{
@@ -39,7 +41,7 @@ use std::{
 };
 use sum_tree::Bias;
 use sum_tree::{Edit, SeekTarget, SumTree};
-use util::TryFutureExt;
+use util::{ResultExt, TryFutureExt};
 
 lazy_static! {
     static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore");
@@ -89,6 +91,29 @@ impl Entity for Worktree {
             }
         }
     }
+
+    fn app_will_quit(
+        &mut self,
+        _: &mut MutableAppContext,
+    ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
+        use futures::FutureExt;
+
+        if let Self::Local(worktree) = self {
+            let shutdown_futures = worktree
+                .language_servers
+                .drain()
+                .filter_map(|(_, server)| server.shutdown())
+                .collect::<Vec<_>>();
+            Some(
+                async move {
+                    futures::future::join_all(shutdown_futures).await;
+                }
+                .boxed(),
+            )
+        } else {
+            None
+        }
+    }
 }
 
 impl Worktree {
@@ -421,8 +446,8 @@ impl Worktree {
         let ops = payload
             .operations
             .into_iter()
-            .map(|op| op.try_into())
-            .collect::<anyhow::Result<Vec<_>>>()?;
+            .map(|op| language::proto::deserialize_operation(op))
+            .collect::<Result<Vec<_>, _>>()?;
 
         match self {
             Worktree::Local(worktree) => {
@@ -587,6 +612,8 @@ impl Worktree {
             }
         };
 
+        let local = self.as_local().is_some();
+        let worktree_path = self.abs_path.clone();
         let worktree_handle = cx.handle();
         let mut buffers_to_delete = Vec::new();
         for (buffer_id, buffer) in open_buffers {
@@ -598,6 +625,8 @@ impl Worktree {
                             .and_then(|entry_id| self.entry_for_id(entry_id))
                         {
                             File {
+                                is_local: local,
+                                worktree_path: worktree_path.clone(),
                                 entry_id: Some(entry.id),
                                 mtime: entry.mtime,
                                 path: entry.path.clone(),
@@ -605,6 +634,8 @@ impl Worktree {
                             }
                         } else if let Some(entry) = self.entry_for_path(old_file.path().as_ref()) {
                             File {
+                                is_local: local,
+                                worktree_path: worktree_path.clone(),
                                 entry_id: Some(entry.id),
                                 mtime: entry.mtime,
                                 path: entry.path.clone(),
@@ -612,6 +643,8 @@ impl Worktree {
                             }
                         } else {
                             File {
+                                is_local: local,
+                                worktree_path: worktree_path.clone(),
                                 entry_id: None,
                                 path: old_file.path().clone(),
                                 mtime: old_file.mtime(),
@@ -640,6 +673,79 @@ impl Worktree {
             }
         }
     }
+
+    fn update_diagnostics(
+        &mut self,
+        params: lsp::PublishDiagnosticsParams,
+        cx: &mut ModelContext<Worktree>,
+    ) -> Result<()> {
+        let this = self.as_local_mut().ok_or_else(|| anyhow!("not local"))?;
+        let file_path = params
+            .uri
+            .to_file_path()
+            .map_err(|_| anyhow!("URI is not a file"))?
+            .strip_prefix(&this.abs_path)
+            .context("path is not within worktree")?
+            .to_owned();
+
+        for buffer in this.open_buffers.values() {
+            if let Some(buffer) = buffer.upgrade(cx) {
+                if buffer
+                    .read(cx)
+                    .file()
+                    .map_or(false, |file| file.path().as_ref() == file_path)
+                {
+                    let (remote_id, operation) = buffer.update(cx, |buffer, cx| {
+                        (
+                            buffer.remote_id(),
+                            buffer.update_diagnostics(params.version, params.diagnostics, cx),
+                        )
+                    });
+                    self.send_buffer_update(remote_id, operation?, cx);
+                    return Ok(());
+                }
+            }
+        }
+
+        this.diagnostics.insert(file_path, params.diagnostics);
+        Ok(())
+    }
+
+    fn send_buffer_update(
+        &mut self,
+        buffer_id: u64,
+        operation: Operation,
+        cx: &mut ModelContext<Self>,
+    ) {
+        if let Some((rpc, remote_id)) = match self {
+            Worktree::Local(worktree) => worktree
+                .remote_id
+                .borrow()
+                .map(|id| (worktree.rpc.clone(), id)),
+            Worktree::Remote(worktree) => Some((worktree.client.clone(), worktree.remote_id)),
+        } {
+            cx.spawn(|worktree, mut cx| async move {
+                if let Err(error) = rpc
+                    .request(proto::UpdateBuffer {
+                        worktree_id: remote_id,
+                        buffer_id,
+                        operations: vec![language::proto::serialize_operation(&operation)],
+                    })
+                    .await
+                {
+                    worktree.update(&mut cx, |worktree, _| {
+                        log::error!("error sending buffer operation: {}", error);
+                        match worktree {
+                            Worktree::Local(t) => &mut t.queued_operations,
+                            Worktree::Remote(t) => &mut t.queued_operations,
+                        }
+                        .push((buffer_id, operation));
+                    });
+                }
+            })
+            .detach();
+        }
+    }
 }
 
 impl Deref for Worktree {
@@ -665,11 +771,13 @@ pub struct LocalWorktree {
     share: Option<ShareState>,
     open_buffers: HashMap<usize, WeakModelHandle<Buffer>>,
     shared_buffers: HashMap<PeerId, HashMap<u64, ModelHandle<Buffer>>>,
+    diagnostics: HashMap<PathBuf, Vec<lsp::Diagnostic>>,
     peers: HashMap<PeerId, ReplicaId>,
-    languages: Arc<LanguageRegistry>,
     queued_operations: Vec<(u64, Operation)>,
+    languages: Arc<LanguageRegistry>,
     rpc: Arc<Client>,
     fs: Arc<dyn Fs>,
+    language_servers: HashMap<String, Arc<LanguageServer>>,
 }
 
 #[derive(Default, Deserialize)]
@@ -777,11 +885,13 @@ impl LocalWorktree {
                 poll_task: None,
                 open_buffers: Default::default(),
                 shared_buffers: Default::default(),
+                diagnostics: Default::default(),
                 queued_operations: Default::default(),
                 peers: Default::default(),
                 languages,
                 rpc,
                 fs,
+                language_servers: Default::default(),
             };
 
             cx.spawn_weak(|this, mut cx| async move {
@@ -817,6 +927,51 @@ impl LocalWorktree {
         Ok((tree, scan_states_tx))
     }
 
+    pub fn languages(&self) -> &LanguageRegistry {
+        &self.languages
+    }
+
+    pub fn ensure_language_server(
+        &mut self,
+        language: &Language,
+        cx: &mut ModelContext<Worktree>,
+    ) -> Option<Arc<LanguageServer>> {
+        if let Some(server) = self.language_servers.get(language.name()) {
+            return Some(server.clone());
+        }
+
+        if let Some(language_server) = language
+            .start_server(self.abs_path(), cx)
+            .log_err()
+            .flatten()
+        {
+            let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
+            language_server
+                .on_notification::<lsp::notification::PublishDiagnostics, _>(move |params| {
+                    smol::block_on(diagnostics_tx.send(params)).ok();
+                })
+                .detach();
+            cx.spawn_weak(|this, mut cx| async move {
+                while let Ok(diagnostics) = diagnostics_rx.recv().await {
+                    if let Some(handle) = cx.read(|cx| this.upgrade(cx)) {
+                        handle.update(&mut cx, |this, cx| {
+                            this.update_diagnostics(diagnostics, cx).log_err();
+                        });
+                    } else {
+                        break;
+                    }
+                }
+            })
+            .detach();
+
+            self.language_servers
+                .insert(language.name().to_string(), language_server.clone());
+            Some(language_server.clone())
+        } else {
+            None
+        }
+    }
+
     pub fn open_buffer(
         &mut self,
         path: &Path,
@@ -847,26 +1002,32 @@ impl LocalWorktree {
                 let (file, contents) = this
                     .update(&mut cx, |this, cx| this.as_local().unwrap().load(&path, cx))
                     .await?;
-                let language = this.read_with(&cx, |this, cx| {
+                let language = this.read_with(&cx, |this, _| {
                     use language::File;
-
-                    this.languages()
-                        .select_language(file.full_path(cx))
-                        .cloned()
+                    this.languages().select_language(file.full_path()).cloned()
                 });
-                let buffer = cx.add_model(|cx| {
-                    Buffer::from_history(
-                        0,
-                        History::new(contents.into()),
-                        Some(Box::new(file)),
-                        language,
-                        cx,
+                let (diagnostics, language_server) = this.update(&mut cx, |this, cx| {
+                    let this = this.as_local_mut().unwrap();
+                    (
+                        this.diagnostics.remove(path.as_ref()),
+                        language
+                            .as_ref()
+                            .and_then(|language| this.ensure_language_server(language, cx)),
                     )
                 });
+                let buffer = cx.add_model(|cx| {
+                    let mut buffer = Buffer::from_file(0, contents, Box::new(file), cx);
+                    buffer.set_language(language, language_server, cx);
+                    if let Some(diagnostics) = diagnostics {
+                        buffer.update_diagnostics(None, diagnostics, cx).unwrap();
+                    }
+                    buffer
+                });
                 this.update(&mut cx, |this, _| {
                     let this = this
                         .as_local_mut()
                         .ok_or_else(|| anyhow!("must be a local worktree"))?;
+
                     this.open_buffers.insert(buffer.id(), buffer.downgrade());
                     Ok(buffer)
                 })
@@ -1009,6 +1170,7 @@ impl LocalWorktree {
     fn load(&self, path: &Path, cx: &mut ModelContext<Worktree>) -> Task<Result<(File, String)>> {
         let handle = cx.handle();
         let path = Arc::from(path);
+        let worktree_path = self.abs_path.clone();
         let abs_path = self.absolutize(&path);
         let background_snapshot = self.background_snapshot.clone();
         let fs = self.fs.clone();
@@ -1017,7 +1179,17 @@ impl LocalWorktree {
             // Eagerly populate the snapshot with an updated entry for the loaded file
             let entry = refresh_entry(fs.as_ref(), &background_snapshot, path, &abs_path).await?;
             this.update(&mut cx, |this, cx| this.poll_snapshot(cx));
-            Ok((File::new(entry.id, handle, entry.path, entry.mtime), text))
+            Ok((
+                File {
+                    entry_id: Some(entry.id),
+                    worktree: handle,
+                    worktree_path,
+                    path: entry.path,
+                    mtime: entry.mtime,
+                    is_local: true,
+                },
+                text,
+            ))
         })
     }
 
@@ -1032,11 +1204,16 @@ impl LocalWorktree {
         cx.spawn(|this, mut cx| async move {
             let entry = save.await?;
             this.update(&mut cx, |this, cx| {
-                this.as_local_mut()
-                    .unwrap()
-                    .open_buffers
-                    .insert(buffer.id(), buffer.downgrade());
-                Ok(File::new(entry.id, cx.handle(), entry.path, entry.mtime))
+                let this = this.as_local_mut().unwrap();
+                this.open_buffers.insert(buffer.id(), buffer.downgrade());
+                Ok(File {
+                    entry_id: Some(entry.id),
+                    worktree: cx.handle(),
+                    worktree_path: this.abs_path.clone(),
+                    path: entry.path,
+                    mtime: entry.mtime,
+                    is_local: true,
+                })
             })
         })
     }
@@ -1225,6 +1402,7 @@ impl RemoteWorktree {
         let rpc = self.client.clone();
         let replica_id = self.replica_id;
         let remote_worktree_id = self.remote_id;
+        let root_path = self.snapshot.abs_path.clone();
         let path = path.to_string_lossy().to_string();
         cx.spawn_weak(|this, mut cx| async move {
             if let Some(existing_buffer) = existing_buffer {
@@ -1245,25 +1423,24 @@ impl RemoteWorktree {
                 let this = this
                     .upgrade(&cx)
                     .ok_or_else(|| anyhow!("worktree was closed"))?;
-                let file = File::new(entry.id, this.clone(), entry.path, entry.mtime);
-                let language = this.read_with(&cx, |this, cx| {
+                let file = File {
+                    entry_id: Some(entry.id),
+                    worktree: this.clone(),
+                    worktree_path: root_path,
+                    path: entry.path,
+                    mtime: entry.mtime,
+                    is_local: false,
+                };
+                let language = this.read_with(&cx, |this, _| {
                     use language::File;
-
-                    this.languages()
-                        .select_language(file.full_path(cx))
-                        .cloned()
+                    this.languages().select_language(file.full_path()).cloned()
                 });
                 let remote_buffer = response.buffer.ok_or_else(|| anyhow!("empty buffer"))?;
                 let buffer_id = remote_buffer.id as usize;
                 let buffer = cx.add_model(|cx| {
-                    Buffer::from_proto(
-                        replica_id,
-                        remote_buffer,
-                        Some(Box::new(file)),
-                        language,
-                        cx,
-                    )
-                    .unwrap()
+                    Buffer::from_proto(replica_id, remote_buffer, Some(Box::new(file)), cx)
+                        .unwrap()
+                        .with_language(language, None, cx)
                 });
                 this.update(&mut cx, |this, cx| {
                     let this = this.as_remote_mut().unwrap();
@@ -1738,24 +1915,10 @@ impl fmt::Debug for Snapshot {
 pub struct File {
     entry_id: Option<usize>,
     worktree: ModelHandle<Worktree>,
+    worktree_path: Arc<Path>,
     pub path: Arc<Path>,
     pub mtime: SystemTime,
-}
-
-impl File {
-    pub fn new(
-        entry_id: usize,
-        worktree: ModelHandle<Worktree>,
-        path: Arc<Path>,
-        mtime: SystemTime,
-    ) -> Self {
-        Self {
-            entry_id: Some(entry_id),
-            worktree,
-            path,
-            mtime,
-        }
-    }
+    is_local: bool,
 }
 
 impl language::File for File {
@@ -1775,20 +1938,29 @@ impl language::File for File {
         &self.path
     }
 
-    fn full_path(&self, cx: &AppContext) -> PathBuf {
-        let worktree = self.worktree.read(cx);
+    fn abs_path(&self) -> Option<PathBuf> {
+        if self.is_local {
+            Some(self.worktree_path.join(&self.path))
+        } else {
+            None
+        }
+    }
+
+    fn full_path(&self) -> PathBuf {
         let mut full_path = PathBuf::new();
-        full_path.push(worktree.root_name());
+        if let Some(worktree_name) = self.worktree_path.file_name() {
+            full_path.push(worktree_name);
+        }
         full_path.push(&self.path);
         full_path
     }
 
     /// Returns the last component of this handle's absolute path. If this handle refers to the root
     /// of its worktree, then this method will return the name of the worktree itself.
-    fn file_name<'a>(&'a self, cx: &'a AppContext) -> Option<OsString> {
+    fn file_name<'a>(&'a self) -> Option<OsString> {
         self.path
             .file_name()
-            .or_else(|| Some(OsStr::new(self.worktree.read(cx).root_name())))
+            .or_else(|| self.worktree_path.file_name())
             .map(Into::into)
     }
 
@@ -1855,34 +2027,7 @@ impl language::File for File {
 
     fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext) {
         self.worktree.update(cx, |worktree, cx| {
-            if let Some((rpc, remote_id)) = match worktree {
-                Worktree::Local(worktree) => worktree
-                    .remote_id
-                    .borrow()
-                    .map(|id| (worktree.rpc.clone(), id)),
-                Worktree::Remote(worktree) => Some((worktree.client.clone(), worktree.remote_id)),
-            } {
-                cx.spawn(|worktree, mut cx| async move {
-                    if let Err(error) = rpc
-                        .request(proto::UpdateBuffer {
-                            worktree_id: remote_id,
-                            buffer_id,
-                            operations: vec![(&operation).into()],
-                        })
-                        .await
-                    {
-                        worktree.update(&mut cx, |worktree, _| {
-                            log::error!("error sending buffer operation: {}", error);
-                            match worktree {
-                                Worktree::Local(t) => &mut t.queued_operations,
-                                Worktree::Remote(t) => &mut t.queued_operations,
-                            }
-                            .push((buffer_id, operation));
-                        });
-                    }
-                })
-                .detach();
-            }
+            worktree.send_buffer_update(buffer_id, operation, cx);
         });
     }
 
@@ -2798,8 +2943,12 @@ mod tests {
     use super::*;
     use crate::fs::FakeFs;
     use anyhow::Result;
+    use buffer::Point;
     use client::test::FakeServer;
     use fs::RealFs;
+    use language::{tree_sitter_rust, LanguageServerConfig};
+    use language::{Diagnostic, LanguageConfig};
+    use lsp::Url;
     use rand::prelude::*;
     use serde_json::json;
     use std::{cell::RefCell, rc::Rc};
@@ -3418,6 +3567,81 @@ mod tests {
             .await;
     }
 
+    #[gpui::test]
+    async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
+        simplelog::SimpleLogger::init(log::LevelFilter::Info, Default::default()).unwrap();
+
+        let (language_server_config, mut fake_server) =
+            LanguageServerConfig::fake(cx.background()).await;
+        let mut languages = LanguageRegistry::new();
+        languages.add(Arc::new(Language::new(
+            LanguageConfig {
+                name: "Rust".to_string(),
+                path_suffixes: vec!["rs".to_string()],
+                language_server: Some(language_server_config),
+                ..Default::default()
+            },
+            tree_sitter_rust::language(),
+        )));
+
+        let dir = temp_tree(json!({
+            "a.rs": "fn a() { A }",
+            "b.rs": "const y: i32 = 1",
+        }));
+
+        let tree = Worktree::open_local(
+            Client::new(),
+            dir.path(),
+            Arc::new(RealFs),
+            Arc::new(languages),
+            &mut cx.to_async(),
+        )
+        .await
+        .unwrap();
+        cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+            .await;
+
+        // Cause worktree to start the fake language server
+        let _buffer = tree
+            .update(&mut cx, |tree, cx| tree.open_buffer("b.rs", cx))
+            .await
+            .unwrap();
+
+        fake_server
+            .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
+                uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(),
+                version: None,
+                diagnostics: vec![lsp::Diagnostic {
+                    range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
+                    severity: Some(lsp::DiagnosticSeverity::ERROR),
+                    message: "undefined variable 'A'".to_string(),
+                    ..Default::default()
+                }],
+            })
+            .await;
+
+        let buffer = tree
+            .update(&mut cx, |tree, cx| tree.open_buffer("a.rs", cx))
+            .await
+            .unwrap();
+
+        buffer.read_with(&cx, |buffer, _| {
+            let diagnostics = buffer
+                .diagnostics_in_range(0..buffer.len())
+                .collect::<Vec<_>>();
+            assert_eq!(
+                diagnostics,
+                &[(
+                    Point::new(0, 9)..Point::new(0, 10),
+                    &Diagnostic {
+                        severity: lsp::DiagnosticSeverity::ERROR,
+                        message: "undefined variable 'A'".to_string()
+                    }
+                )]
+            )
+        });
+    }
+
     #[gpui::test(iterations = 100)]
     fn test_random(mut rng: StdRng) {
         let operations = env::var("OPERATIONS")

crates/rpc/proto/zed.proto 🔗

@@ -228,6 +228,7 @@ message Buffer {
     string content = 2;
     repeated Operation.Edit history = 3;
     repeated SelectionSet selections = 4;
+    DiagnosticSet diagnostics = 5;
 }
 
 message SelectionSet {
@@ -245,6 +246,27 @@ message Selection {
     bool reversed = 4;
 }
 
+message DiagnosticSet {
+    repeated VectorClockEntry version = 1;
+    repeated Diagnostic diagnostics = 2;
+}
+
+message Diagnostic {
+    uint64 start = 1;
+    uint64 end = 2;
+    Severity severity = 3;
+    string message = 4;
+    enum Severity {
+        None = 0;
+        Error = 1;
+        Warning = 2;
+        Information = 3;
+        Hint = 4;
+    }
+}
+
+
+
 message Operation {
     oneof variant {
         Edit edit = 1;
@@ -252,6 +274,7 @@ message Operation {
         UpdateSelections update_selections = 3;
         RemoveSelections remove_selections = 4;
         SetActiveSelections set_active_selections = 5;
+        DiagnosticSet update_diagnostics = 6;
     }
 
     message Edit {

crates/rpc/src/peer.rs 🔗

@@ -398,6 +398,7 @@ mod tests {
                         content: "path/one content".to_string(),
                         history: vec![],
                         selections: vec![],
+                        diagnostics: None,
                     }),
                 }
             );
@@ -419,6 +420,7 @@ mod tests {
                         content: "path/two content".to_string(),
                         history: vec![],
                         selections: vec![],
+                        diagnostics: None,
                     }),
                 }
             );
@@ -449,6 +451,7 @@ mod tests {
                                         content: "path/one content".to_string(),
                                         history: vec![],
                                         selections: vec![],
+                                        diagnostics: None,
                                     }),
                                 }
                             }
@@ -460,6 +463,7 @@ mod tests {
                                         content: "path/two content".to_string(),
                                         history: vec![],
                                         selections: vec![],
+                                        diagnostics: None,
                                     }),
                                 }
                             }

crates/server/src/rpc.rs 🔗

@@ -982,7 +982,11 @@ mod tests {
         },
         editor::{Editor, EditorSettings, Input},
         fs::{FakeFs, Fs as _},
-        language::LanguageRegistry,
+        language::{
+            tree_sitter_rust, Diagnostic, Language, LanguageConfig, LanguageRegistry,
+            LanguageServerConfig, Point,
+        },
+        lsp,
         people_panel::JoinWorktree,
         project::{ProjectPath, Worktree},
         workspace::{Workspace, WorkspaceParams},
@@ -1595,6 +1599,136 @@ mod tests {
             .await;
     }
 
+    #[gpui::test]
+    async fn test_collaborating_with_diagnostics(
+        mut cx_a: TestAppContext,
+        mut cx_b: TestAppContext,
+    ) {
+        cx_a.foreground().forbid_parking();
+        let (language_server_config, mut fake_language_server) =
+            LanguageServerConfig::fake(cx_a.background()).await;
+        let mut lang_registry = LanguageRegistry::new();
+        lang_registry.add(Arc::new(Language::new(
+            LanguageConfig {
+                name: "Rust".to_string(),
+                path_suffixes: vec!["rs".to_string()],
+                language_server: Some(language_server_config),
+                ..Default::default()
+            },
+            tree_sitter_rust::language(),
+        )));
+
+        let lang_registry = Arc::new(lang_registry);
+
+        // Connect to a server as 2 clients.
+        let mut server = TestServer::start().await;
+        let (client_a, _) = server.create_client(&mut cx_a, "user_a").await;
+        let (client_b, _) = server.create_client(&mut cx_a, "user_b").await;
+
+        // Share a local worktree as client A
+        let fs = Arc::new(FakeFs::new());
+        fs.insert_tree(
+            "/a",
+            json!({
+                ".zed.toml": r#"collaborators = ["user_b"]"#,
+                "a.rs": "let one = two",
+                "other.rs": "",
+            }),
+        )
+        .await;
+        let worktree_a = Worktree::open_local(
+            client_a.clone(),
+            "/a".as_ref(),
+            fs,
+            lang_registry.clone(),
+            &mut cx_a.to_async(),
+        )
+        .await
+        .unwrap();
+        worktree_a
+            .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
+            .await;
+        let worktree_id = worktree_a
+            .update(&mut cx_a, |tree, cx| tree.as_local_mut().unwrap().share(cx))
+            .await
+            .unwrap();
+
+        // Cause language server to start.
+        let _ = cx_a
+            .background()
+            .spawn(worktree_a.update(&mut cx_a, |worktree, cx| {
+                worktree.open_buffer("other.rs", cx)
+            }))
+            .await
+            .unwrap();
+
+        // Simulate a language server reporting errors for a file.
+        fake_language_server
+            .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
+                uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
+                version: None,
+                diagnostics: vec![
+                    lsp::Diagnostic {
+                        severity: Some(lsp::DiagnosticSeverity::ERROR),
+                        range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 7)),
+                        message: "message 1".to_string(),
+                        ..Default::default()
+                    },
+                    lsp::Diagnostic {
+                        severity: Some(lsp::DiagnosticSeverity::WARNING),
+                        range: lsp::Range::new(
+                            lsp::Position::new(0, 10),
+                            lsp::Position::new(0, 13),
+                        ),
+                        message: "message 2".to_string(),
+                        ..Default::default()
+                    },
+                ],
+            })
+            .await;
+
+        // Join the worktree as client B.
+        let worktree_b = Worktree::open_remote(
+            client_b.clone(),
+            worktree_id,
+            lang_registry.clone(),
+            &mut cx_b.to_async(),
+        )
+        .await
+        .unwrap();
+
+        // Open the file with the errors.
+        let buffer_b = cx_b
+            .background()
+            .spawn(worktree_b.update(&mut cx_b, |worktree, cx| worktree.open_buffer("a.rs", cx)))
+            .await
+            .unwrap();
+
+        buffer_b.read_with(&cx_b, |buffer, _| {
+            assert_eq!(
+                buffer
+                    .diagnostics_in_range(0..buffer.len())
+                    .collect::<Vec<_>>(),
+                &[
+                    (
+                        Point::new(0, 4)..Point::new(0, 7),
+                        &Diagnostic {
+                            message: "message 1".to_string(),
+                            severity: lsp::DiagnosticSeverity::ERROR,
+                        }
+                    ),
+                    (
+                        Point::new(0, 10)..Point::new(0, 13),
+                        &Diagnostic {
+                            severity: lsp::DiagnosticSeverity::WARNING,
+                            message: "message 2".to_string()
+                        }
+                    )
+                ]
+            );
+        });
+    }
+
     #[gpui::test]
     async fn test_basic_chat(mut cx_a: TestAppContext, mut cx_b: TestAppContext) {
         cx_a.foreground().forbid_parking();

crates/sum_tree/src/cursor.rs 🔗

@@ -184,9 +184,9 @@ where
         self.next_internal(|_| true, cx)
     }
 
-    fn next_internal<F>(&mut self, filter_node: F, cx: &<T::Summary as Summary>::Context)
+    fn next_internal<F>(&mut self, mut filter_node: F, cx: &<T::Summary as Summary>::Context)
     where
-        F: Fn(&T::Summary) -> bool,
+        F: FnMut(&T::Summary) -> bool,
     {
         let mut descend = false;
 
@@ -509,24 +509,24 @@ where
     }
 }
 
-pub struct FilterCursor<'a, F: Fn(&T::Summary) -> bool, T: Item, D> {
+pub struct FilterCursor<'a, F, T: Item, D> {
     cursor: Cursor<'a, T, D>,
     filter_node: F,
 }
 
 impl<'a, F, T, D> FilterCursor<'a, F, T, D>
 where
-    F: Fn(&T::Summary) -> bool,
+    F: FnMut(&T::Summary) -> bool,
     T: Item,
     D: Dimension<'a, T::Summary>,
 {
     pub fn new(
         tree: &'a SumTree<T>,
-        filter_node: F,
+        mut filter_node: F,
         cx: &<T::Summary as Summary>::Context,
     ) -> Self {
         let mut cursor = tree.cursor::<D>();
-        cursor.next_internal(&filter_node, cx);
+        cursor.next_internal(&mut filter_node, cx);
         Self {
             cursor,
             filter_node,
@@ -537,12 +537,16 @@ where
         self.cursor.start()
     }
 
+    pub fn end(&self, cx: &<T::Summary as Summary>::Context) -> D {
+        self.cursor.end(cx)
+    }
+
     pub fn item(&self) -> Option<&'a T> {
         self.cursor.item()
     }
 
     pub fn next(&mut self, cx: &<T::Summary as Summary>::Context) {
-        self.cursor.next_internal(&self.filter_node, cx);
+        self.cursor.next_internal(&mut self.filter_node, cx);
     }
 }
 

crates/sum_tree/src/lib.rs 🔗

@@ -163,7 +163,7 @@ impl<T: Item> SumTree<T> {
         cx: &<T::Summary as Summary>::Context,
     ) -> FilterCursor<F, T, U>
     where
-        F: Fn(&T::Summary) -> bool,
+        F: FnMut(&T::Summary) -> bool,
         U: Dimension<'a, T::Summary>,
     {
         FilterCursor::new(self, filter_node, cx)

crates/theme/src/lib.rs 🔗

@@ -214,6 +214,12 @@ pub struct EditorStyle {
     pub line_number_active: Color,
     pub guest_selections: Vec<SelectionStyle>,
     pub syntax: Arc<SyntaxTheme>,
+    pub error_underline: Color,
+    pub warning_underline: Color,
+    #[serde(default)]
+    pub information_underline: Color,
+    #[serde(default)]
+    pub hint_underline: Color,
 }
 
 #[derive(Clone, Copy, Default, Deserialize)]
@@ -254,6 +260,10 @@ impl InputEditorStyle {
             line_number_active: Default::default(),
             guest_selections: Default::default(),
             syntax: Default::default(),
+            error_underline: Default::default(),
+            warning_underline: Default::default(),
+            information_underline: Default::default(),
+            hint_underline: Default::default(),
         }
     }
 }

crates/workspace/src/items.rs 🔗

@@ -37,7 +37,7 @@ impl Item for Buffer {
                     font_id,
                     font_size,
                     font_properties,
-                    underline: false,
+                    underline: None,
                 };
                 EditorSettings {
                     tab_size: settings.tab_size,
@@ -77,7 +77,7 @@ impl ItemView for Editor {
             .buffer()
             .read(cx)
             .file()
-            .and_then(|file| file.file_name(cx));
+            .and_then(|file| file.file_name());
         if let Some(name) = filename {
             name.to_string_lossy().into()
         } else {
@@ -127,16 +127,21 @@ impl ItemView for Editor {
 
             cx.spawn(|buffer, mut cx| async move {
                 save_as.await.map(|new_file| {
-                    let language = worktree.read_with(&cx, |worktree, cx| {
-                        worktree
+                    let (language, language_server) = worktree.update(&mut cx, |worktree, cx| {
+                        let worktree = worktree.as_local_mut().unwrap();
+                        let language = worktree
                             .languages()
-                            .select_language(new_file.full_path(cx))
-                            .cloned()
+                            .select_language(new_file.full_path())
+                            .cloned();
+                        let language_server = language
+                            .as_ref()
+                            .and_then(|language| worktree.ensure_language_server(language, cx));
+                        (language, language_server.clone())
                     });
 
                     buffer.update(&mut cx, |buffer, cx| {
                         buffer.did_save(version, new_file.mtime, Some(Box::new(new_file)), cx);
-                        buffer.set_language(language, cx);
+                        buffer.set_language(language, language_server, cx);
                     });
                 })
             })

crates/zed/Cargo.toml 🔗

@@ -17,11 +17,14 @@ path = "src/main.rs"
 test-support = [
     "buffer/test-support",
     "client/test-support",
+    "editor/test-support",
     "gpui/test-support",
     "language/test-support",
+    "lsp/test-support",
     "project/test-support",
     "rpc/test-support",
     "tempdir",
+    "workspace/test-support",
 ]
 
 [dependencies]
@@ -35,6 +38,7 @@ editor = { path = "../editor" }
 file_finder = { path = "../file_finder" }
 gpui = { path = "../gpui" }
 language = { path = "../language" }
+lsp = { path = "../lsp" }
 people_panel = { path = "../people_panel" }
 project = { path = "../project" }
 project_panel = { path = "../project_panel" }
@@ -88,6 +92,7 @@ buffer = { path = "../buffer", features = ["test-support"] }
 editor = { path = "../editor", features = ["test-support"] }
 gpui = { path = "../gpui", features = ["test-support"] }
 language = { path = "../language", features = ["test-support"] }
+lsp = { path = "../lsp", features = ["test-support"] }
 project = { path = "../project", features = ["test-support"] }
 rpc = { path = "../rpc", features = ["test-support"] }
 client = { path = "../client", features = ["test-support"] }

crates/zed/assets/themes/_base.toml 🔗

@@ -226,3 +226,8 @@ line_number = "$text.2.color"
 line_number_active = "$text.0.color"
 selection = "$selection.host"
 guest_selections = "$selection.guests"
+
+error_underline = "$status.bad"
+warning_underline = "$status.warn"
+info_underline = "$status.info"
+hint_underline = "$status.info"

crates/zed/assets/themes/light.toml 🔗

@@ -26,7 +26,7 @@ guests = [
   { selection = "#EE823133", cursor = "#EE8231" },
   { selection = "#5A2B9233", cursor = "#5A2B92" },
   { selection = "#FDF35133", cursor = "#FDF351" },
-  { selection = "#4EACAD33", cursor = "#4EACAD" }
+  { selection = "#4EACAD33", cursor = "#4EACAD" },
 ]
 
 [status]

crates/zed/languages/rust/config.toml 🔗

@@ -8,3 +8,7 @@ brackets = [
     { start = "\"", end = "\"", close = true, newline = false },
     { start = "/*", end = " */", close = true, newline = false },
 ]
+
+[language_server]
+binary = "rust-analyzer"
+disk_based_diagnostic_sources = ["rustc"]

crates/zed/src/language.rs 🔗

@@ -1,4 +1,4 @@
-pub use language::{Language, LanguageRegistry};
+pub use language::*;
 use rust_embed::RustEmbed;
 use std::borrow::Cow;
 use std::{str, sync::Arc};

crates/zed/src/lib.rs 🔗

@@ -15,6 +15,7 @@ use gpui::{
     platform::WindowOptions,
     ModelHandle, MutableAppContext, PathPromptOptions, Task, ViewContext,
 };
+pub use lsp;
 use parking_lot::Mutex;
 pub use people_panel;
 use people_panel::PeoplePanel;

script/bundle 🔗

@@ -2,6 +2,8 @@
 
 set -e
 
+export ZED_BUNDLE=true
+
 # Install cargo-bundle 0.5.0 if it's not already installed
 cargo install cargo-bundle --version 0.5.0
 
@@ -16,6 +18,9 @@ cargo build --release --target aarch64-apple-darwin
 # Replace the bundle's binary with a "fat binary" that combines the two architecture-specific binaries
 lipo -create target/x86_64-apple-darwin/release/Zed target/aarch64-apple-darwin/release/Zed -output target/x86_64-apple-darwin/release/bundle/osx/Zed.app/Contents/MacOS/zed
 
+# Bundle rust-analyzer
+cp vendor/bin/rust-analyzer target/x86_64-apple-darwin/release/bundle/osx/Zed.app/Contents/Resources/
+
 # Sign the app bundle with an ad-hoc signature so it runs on the M1. We need a real certificate but this works for now.
 if [[ -n $MACOS_CERTIFICATE && -n $MACOS_CERTIFICATE_PASSWORD && -n $APPLE_NOTARIZATION_USERNAME && -n $APPLE_NOTARIZATION_PASSWORD ]]; then
     echo "Signing bundle with Apple-issued certificate"
@@ -26,6 +31,7 @@ if [[ -n $MACOS_CERTIFICATE && -n $MACOS_CERTIFICATE_PASSWORD && -n $APPLE_NOTAR
     security import /tmp/zed-certificate.p12 -k zed.keychain -P $MACOS_CERTIFICATE_PASSWORD -T /usr/bin/codesign
     rm /tmp/zed-certificate.p12
     security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k $MACOS_CERTIFICATE_PASSWORD zed.keychain
+    /usr/bin/codesign --force --deep --timestamp --options runtime --sign "Zed Industries, Inc." target/x86_64-apple-darwin/release/bundle/osx/Zed.app/Contents/Resources/rust-analyzer -v
     /usr/bin/codesign --force --deep --timestamp --options runtime --sign "Zed Industries, Inc." target/x86_64-apple-darwin/release/bundle/osx/Zed.app -v
     security default-keychain -s login.keychain
 else

script/download-rust-analyzer 🔗

@@ -0,0 +1,19 @@
+#!/bin/bash
+
+set -e
+
+export RUST_ANALYZER_URL="https://github.com/rust-analyzer/rust-analyzer/releases/download/2021-10-18/"
+
+function download {
+    local filename="rust-analyzer-$1"
+    curl -L $RUST_ANALYZER_URL/$filename.gz | gunzip > vendor/bin/$filename
+    chmod +x vendor/bin/$filename
+}
+
+mkdir -p vendor/bin
+download "x86_64-apple-darwin"
+download "aarch64-apple-darwin"
+
+cd vendor/bin
+lipo -create rust-analyzer-* -output rust-analyzer
+rm rust-analyzer-*

script/server 🔗

@@ -2,5 +2,5 @@
 
 set -e
 
-cd server
+cd crates/server
 cargo run $@

script/sqlx 🔗

@@ -5,7 +5,7 @@ set -e
 # Install sqlx-cli if needed
 [[ "$(sqlx --version)" == "sqlx-cli 0.5.7" ]] || cargo install sqlx-cli --version 0.5.7
 
-cd server
+cd crates/server
 
 # Export contents of .env.toml
 eval "$(cargo run --bin dotenv)"