Detailed changes
@@ -310,6 +310,7 @@ dependencies = [
"language",
"log",
"menu",
+ "multi_buffer",
"ordered-float 2.10.0",
"parking_lot 0.11.2",
"project",
@@ -1468,7 +1469,7 @@ dependencies = [
[[package]]
name = "collab"
-version = "0.25.0"
+version = "0.27.0"
dependencies = [
"anyhow",
"async-trait",
@@ -1713,6 +1714,7 @@ dependencies = [
"log",
"lsp",
"node_runtime",
+ "parking_lot 0.11.2",
"rpc",
"serde",
"serde_derive",
@@ -2410,6 +2412,7 @@ dependencies = [
"lazy_static",
"log",
"lsp",
+ "multi_buffer",
"ordered-float 2.10.0",
"parking_lot 0.11.2",
"postage",
@@ -4600,6 +4603,55 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389"
+[[package]]
+name = "multi_buffer"
+version = "0.1.0"
+dependencies = [
+ "aho-corasick",
+ "anyhow",
+ "client",
+ "clock",
+ "collections",
+ "context_menu",
+ "convert_case 0.6.0",
+ "copilot",
+ "ctor",
+ "env_logger 0.9.3",
+ "futures 0.3.28",
+ "git",
+ "gpui",
+ "indoc",
+ "itertools 0.10.5",
+ "language",
+ "lazy_static",
+ "log",
+ "lsp",
+ "ordered-float 2.10.0",
+ "parking_lot 0.11.2",
+ "postage",
+ "project",
+ "pulldown-cmark",
+ "rand 0.8.5",
+ "rich_text",
+ "schemars",
+ "serde",
+ "serde_derive",
+ "settings",
+ "smallvec",
+ "smol",
+ "snippet",
+ "sum_tree",
+ "text",
+ "theme",
+ "tree-sitter",
+ "tree-sitter-html",
+ "tree-sitter-rust",
+ "tree-sitter-typescript",
+ "unindent",
+ "util",
+ "workspace",
+]
+
[[package]]
name = "multimap"
version = "0.8.3"
@@ -5562,6 +5614,7 @@ dependencies = [
"log",
"lsp",
"node_runtime",
+ "parking_lot 0.11.2",
"serde",
"serde_derive",
"serde_json",
@@ -10094,7 +10147,7 @@ dependencies = [
[[package]]
name = "zed"
-version = "0.110.0"
+version = "0.111.0"
dependencies = [
"activity_indicator",
"ai",
@@ -46,6 +46,7 @@ members = [
"crates/lsp",
"crates/media",
"crates/menu",
+ "crates/multi_buffer",
"crates/node_runtime",
"crates/notifications",
"crates/outline",
@@ -17,6 +17,7 @@ fs = { path = "../fs" }
gpui = { path = "../gpui" }
language = { path = "../language" }
menu = { path = "../menu" }
+multi_buffer = { path = "../multi_buffer" }
search = { path = "../search" }
settings = { path = "../settings" }
theme = { path = "../theme" }
@@ -292,7 +292,7 @@ impl AssistantPanel {
project: &ModelHandle<Project>,
) {
let selection = editor.read(cx).selections.newest_anchor().clone();
- if selection.start.excerpt_id() != selection.end.excerpt_id() {
+ if selection.start.excerpt_id != selection.end.excerpt_id {
return;
}
let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx);
@@ -1,10 +1,11 @@
use crate::streaming_diff::{Hunk, StreamingDiff};
use ai::completion::{CompletionProvider, CompletionRequest};
use anyhow::Result;
-use editor::{multi_buffer, Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint};
+use editor::{Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint};
use futures::{channel::mpsc, SinkExt, Stream, StreamExt};
use gpui::{Entity, ModelContext, ModelHandle, Task};
use language::{Rope, TransactionId};
+use multi_buffer;
use std::{cmp, future, ops::Range, sync::Arc};
pub enum Event {
@@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
default-run = "collab"
edition = "2021"
name = "collab"
-version = "0.25.0"
+version = "0.27.0"
publish = false
[[bin]]
@@ -942,7 +942,7 @@ async fn create_room(
let live_kit_room = live_kit_room.clone();
let live_kit = session.live_kit_client.as_ref();
- util::async_iife!({
+ util::async_maybe!({
let live_kit = live_kit?;
let token = live_kit
@@ -46,7 +46,7 @@ use serde_derive::{Deserialize, Serialize};
use settings::SettingsStore;
use std::{borrow::Cow, hash::Hash, mem, sync::Arc};
use theme::{components::ComponentExt, IconButton, Interactive};
-use util::{iife, ResultExt, TryFutureExt};
+use util::{maybe, ResultExt, TryFutureExt};
use workspace::{
dock::{DockPosition, Panel},
item::ItemHandle,
@@ -1461,7 +1461,7 @@ impl CollabPanel {
let text = match section {
Section::ActiveCall => {
- let channel_name = iife!({
+ let channel_name = maybe!({
let channel_id = ActiveCall::global(cx).read(cx).channel_id(cx)?;
let channel = self.channel_store.read(cx).channel_for_id(channel_id)?;
@@ -1941,7 +1941,7 @@ impl CollabPanel {
let disclosed =
has_children.then(|| !self.collapsed_channels.binary_search(&channel.id).is_ok());
- let is_active = iife!({
+ let is_active = maybe!({
let call_channel = ActiveCall::global(cx)
.read(cx)
.room()?
@@ -2791,7 +2791,7 @@ impl CollabPanel {
}
}
ListEntry::Channel { channel, .. } => {
- let is_active = iife!({
+ let is_active = maybe!({
let call_channel = ActiveCall::global(cx)
.read(cx)
.room()?
@@ -36,6 +36,7 @@ serde.workspace = true
serde_derive.workspace = true
smol.workspace = true
futures.workspace = true
+parking_lot.workspace = true
[dev-dependencies]
clock = { path = "../clock" }
@@ -16,6 +16,7 @@ use language::{
};
use lsp::{LanguageServer, LanguageServerBinary, LanguageServerId};
use node_runtime::NodeRuntime;
+use parking_lot::Mutex;
use request::StatusNotification;
use settings::SettingsStore;
use smol::{fs, io::BufReader, stream::StreamExt};
@@ -387,8 +388,15 @@ impl Copilot {
path: node_path,
arguments,
};
- let server =
- LanguageServer::new(new_server_id, binary, Path::new("/"), None, cx.clone())?;
+
+ let server = LanguageServer::new(
+ Arc::new(Mutex::new(None)),
+ new_server_id,
+ binary,
+ Path::new("/"),
+ None,
+ cx.clone(),
+ )?;
server
.on_notification::<StatusNotification, _>(
@@ -20,7 +20,7 @@ use std::future::Future;
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, Ordering};
use util::channel::ReleaseChannel;
-use util::{async_iife, ResultExt};
+use util::{async_maybe, ResultExt};
const CONNECTION_INITIALIZE_QUERY: &'static str = sql!(
PRAGMA foreign_keys=TRUE;
@@ -57,7 +57,7 @@ pub async fn open_db<M: Migrator + 'static>(
let release_channel_name = release_channel.dev_name();
let main_db_dir = db_dir.join(Path::new(&format!("0-{}", release_channel_name)));
- let connection = async_iife!({
+ let connection = async_maybe!({
smol::fs::create_dir_all(&main_db_dir)
.await
.context("Could not create db directory")
@@ -14,6 +14,7 @@ test-support = [
"text/test-support",
"language/test-support",
"gpui/test-support",
+ "multi_buffer/test-support",
"project/test-support",
"util/test-support",
"workspace/test-support",
@@ -34,6 +35,7 @@ git = { path = "../git" }
gpui = { path = "../gpui" }
language = { path = "../language" }
lsp = { path = "../lsp" }
+multi_buffer = { path = "../multi_buffer" }
project = { path = "../project" }
rpc = { path = "../rpc" }
rich_text = { path = "../rich_text" }
@@ -993,8 +993,8 @@ mod tests {
use super::*;
use crate::display_map::inlay_map::InlayMap;
use crate::display_map::{fold_map::FoldMap, tab_map::TabMap, wrap_map::WrapMap};
- use crate::multi_buffer::MultiBuffer;
use gpui::{elements::Empty, Element};
+ use multi_buffer::MultiBuffer;
use rand::prelude::*;
use settings::SettingsStore;
use std::env;
@@ -91,7 +91,7 @@ impl<'a> FoldMapWriter<'a> {
// For now, ignore any ranges that span an excerpt boundary.
let fold = Fold(buffer.anchor_after(range.start)..buffer.anchor_before(range.end));
- if fold.0.start.excerpt_id() != fold.0.end.excerpt_id() {
+ if fold.0.start.excerpt_id != fold.0.end.excerpt_id {
continue;
}
@@ -1,10 +1,8 @@
-use crate::{
- multi_buffer::{MultiBufferChunks, MultiBufferRows},
- Anchor, InlayId, MultiBufferSnapshot, ToOffset,
-};
+use crate::{Anchor, InlayId, MultiBufferSnapshot, ToOffset};
use collections::{BTreeMap, BTreeSet};
use gpui::fonts::HighlightStyle;
use language::{Chunk, Edit, Point, TextSummary};
+use multi_buffer::{MultiBufferChunks, MultiBufferRows};
use std::{
any::TypeId,
cmp,
@@ -11,7 +11,6 @@ pub mod items;
mod link_go_to_definition;
mod mouse_context_menu;
pub mod movement;
-pub mod multi_buffer;
mod persistence;
pub mod scroll;
pub mod selections_collection;
@@ -968,7 +967,6 @@ impl CompletionsMenu {
self.selected_item -= 1;
} else {
self.selected_item = self.matches.len() - 1;
- self.list.scroll_to(ScrollTarget::Show(self.selected_item));
}
self.list.scroll_to(ScrollTarget::Show(self.selected_item));
self.attempt_resolve_selected_completion_documentation(project, cx);
@@ -1539,7 +1537,6 @@ impl CodeActionsMenu {
self.selected_item -= 1;
} else {
self.selected_item = self.actions.len() - 1;
- self.list.scroll_to(ScrollTarget::Show(self.selected_item));
}
self.list.scroll_to(ScrollTarget::Show(self.selected_item));
cx.notify();
@@ -1548,11 +1545,10 @@ impl CodeActionsMenu {
fn select_next(&mut self, cx: &mut ViewContext<Editor>) {
if self.selected_item + 1 < self.actions.len() {
self.selected_item += 1;
- self.list.scroll_to(ScrollTarget::Show(self.selected_item));
} else {
self.selected_item = 0;
- self.list.scroll_to(ScrollTarget::Show(self.selected_item));
}
+ self.list.scroll_to(ScrollTarget::Show(self.selected_item));
cx.notify();
}
@@ -7213,6 +7209,7 @@ impl Editor {
&& entry.diagnostic.severity <= DiagnosticSeverity::WARNING
&& !entry.range.is_empty()
&& Some(entry.range.end) != active_primary_range.as_ref().map(|r| *r.end())
+ && !entry.range.contains(&search_start)
{
Some((entry.range, entry.diagnostic.group_id))
} else {
@@ -7319,11 +7316,11 @@ impl Editor {
let display_point = initial_point.to_display_point(snapshot);
let mut hunks = hunks
.map(|hunk| diff_hunk_to_display(hunk, &snapshot))
- .skip_while(|hunk| {
+ .filter(|hunk| {
if is_wrapped {
- false
+ true
} else {
- hunk.contains_display_row(display_point.row())
+ !hunk.contains_display_row(display_point.row())
}
})
.dedup();
@@ -7715,8 +7712,8 @@ impl Editor {
let mut buffer_highlights = this
.document_highlights_for_position(selection.head(), &buffer)
.filter(|highlight| {
- highlight.start.excerpt_id() == selection.head().excerpt_id()
- && highlight.end.excerpt_id() == selection.head().excerpt_id()
+ highlight.start.excerpt_id == selection.head().excerpt_id
+ && highlight.end.excerpt_id == selection.head().excerpt_id
});
buffer_highlights
.next()
@@ -8957,6 +8954,16 @@ impl Editor {
telemetry.report_clickhouse_event(event, telemetry_settings);
}
+ #[cfg(any(test, feature = "test-support"))]
+ fn report_editor_event(
+ &self,
+ _operation: &'static str,
+ _file_extension: Option<String>,
+ _cx: &AppContext,
+ ) {
+ }
+
+ #[cfg(not(any(test, feature = "test-support")))]
fn report_editor_event(
&self,
operation: &'static str,
@@ -6717,6 +6717,102 @@ fn test_combine_syntax_and_fuzzy_match_highlights() {
);
}
+#[gpui::test]
+async fn go_to_prev_overlapping_diagnostic(
+ deterministic: Arc<Deterministic>,
+ cx: &mut gpui::TestAppContext,
+) {
+ init_test(cx, |_| {});
+
+ let mut cx = EditorTestContext::new(cx).await;
+ let project = cx.update_editor(|editor, _| editor.project.clone().unwrap());
+
+ cx.set_state(indoc! {"
+ Λfn func(abc def: i32) -> u32 {
+ }
+ "});
+
+ cx.update(|cx| {
+ project.update(cx, |project, cx| {
+ project
+ .update_diagnostics(
+ LanguageServerId(0),
+ lsp::PublishDiagnosticsParams {
+ uri: lsp::Url::from_file_path("/root/file").unwrap(),
+ version: None,
+ diagnostics: vec![
+ lsp::Diagnostic {
+ range: lsp::Range::new(
+ lsp::Position::new(0, 11),
+ lsp::Position::new(0, 12),
+ ),
+ severity: Some(lsp::DiagnosticSeverity::ERROR),
+ ..Default::default()
+ },
+ lsp::Diagnostic {
+ range: lsp::Range::new(
+ lsp::Position::new(0, 12),
+ lsp::Position::new(0, 15),
+ ),
+ severity: Some(lsp::DiagnosticSeverity::ERROR),
+ ..Default::default()
+ },
+ lsp::Diagnostic {
+ range: lsp::Range::new(
+ lsp::Position::new(0, 25),
+ lsp::Position::new(0, 28),
+ ),
+ severity: Some(lsp::DiagnosticSeverity::ERROR),
+ ..Default::default()
+ },
+ ],
+ },
+ &[],
+ cx,
+ )
+ .unwrap()
+ });
+ });
+
+ deterministic.run_until_parked();
+
+ cx.update_editor(|editor, cx| {
+ editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, cx);
+ });
+
+ cx.assert_editor_state(indoc! {"
+ fn func(abc def: i32) -> Λu32 {
+ }
+ "});
+
+ cx.update_editor(|editor, cx| {
+ editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, cx);
+ });
+
+ cx.assert_editor_state(indoc! {"
+ fn func(abc Λdef: i32) -> u32 {
+ }
+ "});
+
+ cx.update_editor(|editor, cx| {
+ editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, cx);
+ });
+
+ cx.assert_editor_state(indoc! {"
+ fn func(abcΛ def: i32) -> u32 {
+ }
+ "});
+
+ cx.update_editor(|editor, cx| {
+ editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, cx);
+ });
+
+ cx.assert_editor_state(indoc! {"
+ fn func(abc def: i32) -> Λu32 {
+ }
+ "});
+}
+
#[gpui::test]
async fn go_to_hunk(deterministic: Arc<Deterministic>, cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
@@ -6799,6 +6895,46 @@ async fn go_to_hunk(deterministic: Arc<Deterministic>, cx: &mut gpui::TestAppCon
.unindent(),
);
+ cx.update_editor(|editor, cx| {
+ editor.go_to_prev_hunk(&GoToPrevHunk, cx);
+ });
+
+ cx.assert_editor_state(
+ &r#"
+ use some::modified;
+
+ Λ
+ fn main() {
+ println!("hello there");
+
+ println!("around the");
+ println!("world");
+ }
+ "#
+ .unindent(),
+ );
+
+ cx.update_editor(|editor, cx| {
+ for _ in 0..3 {
+ editor.go_to_prev_hunk(&GoToPrevHunk, cx);
+ }
+ });
+
+ cx.assert_editor_state(
+ &r#"
+ use some::modified;
+
+
+ fn main() {
+ Λ println!("hello there");
+
+ println!("around the");
+ println!("world");
+ }
+ "#
+ .unindent(),
+ );
+
cx.update_editor(|editor, cx| {
editor.fold(&Fold, cx);
@@ -36,7 +36,7 @@ impl DisplayDiffHunk {
DisplayDiffHunk::Unfolded {
display_row_range, ..
- } => display_row_range.start..=display_row_range.end - 1,
+ } => display_row_range.start..=display_row_range.end,
};
range.contains(&display_row)
@@ -77,8 +77,8 @@ pub fn diff_hunk_to_display(hunk: DiffHunk<u32>, snapshot: &DisplaySnapshot) ->
} else {
let start = hunk_start_point.to_display_point(snapshot).row();
- let hunk_end_row_inclusive = hunk.buffer_range.end.max(hunk.buffer_range.start);
- let hunk_end_point = Point::new(hunk_end_row_inclusive, 0);
+ let hunk_end_row = hunk.buffer_range.end.max(hunk.buffer_range.start);
+ let hunk_end_point = Point::new(hunk_end_row, 0);
let end = hunk_end_point.to_display_point(snapshot).row();
DisplayDiffHunk::Unfolded {
@@ -87,3 +87,196 @@ pub fn diff_hunk_to_display(hunk: DiffHunk<u32>, snapshot: &DisplaySnapshot) ->
}
}
}
+
+#[cfg(any(test, feature = "test_support"))]
+mod tests {
+ use crate::editor_tests::init_test;
+ use crate::Point;
+ use gpui::TestAppContext;
+ use multi_buffer::{ExcerptRange, MultiBuffer};
+ use project::{FakeFs, Project};
+ use unindent::Unindent;
+ #[gpui::test]
+ async fn test_diff_hunks_in_range(cx: &mut TestAppContext) {
+ use git::diff::DiffHunkStatus;
+ init_test(cx, |_| {});
+
+ let fs = FakeFs::new(cx.background());
+ let project = Project::test(fs, [], cx).await;
+
+ // buffer has two modified hunks with two rows each
+ let buffer_1 = project
+ .update(cx, |project, cx| {
+ project.create_buffer(
+ "
+ 1.zero
+ 1.ONE
+ 1.TWO
+ 1.three
+ 1.FOUR
+ 1.FIVE
+ 1.six
+ "
+ .unindent()
+ .as_str(),
+ None,
+ cx,
+ )
+ })
+ .unwrap();
+ buffer_1.update(cx, |buffer, cx| {
+ buffer.set_diff_base(
+ Some(
+ "
+ 1.zero
+ 1.one
+ 1.two
+ 1.three
+ 1.four
+ 1.five
+ 1.six
+ "
+ .unindent(),
+ ),
+ cx,
+ );
+ });
+
+ // buffer has a deletion hunk and an insertion hunk
+ let buffer_2 = project
+ .update(cx, |project, cx| {
+ project.create_buffer(
+ "
+ 2.zero
+ 2.one
+ 2.two
+ 2.three
+ 2.four
+ 2.five
+ 2.six
+ "
+ .unindent()
+ .as_str(),
+ None,
+ cx,
+ )
+ })
+ .unwrap();
+ buffer_2.update(cx, |buffer, cx| {
+ buffer.set_diff_base(
+ Some(
+ "
+ 2.zero
+ 2.one
+ 2.one-and-a-half
+ 2.two
+ 2.three
+ 2.four
+ 2.six
+ "
+ .unindent(),
+ ),
+ cx,
+ );
+ });
+
+ cx.foreground().run_until_parked();
+
+ let multibuffer = cx.add_model(|cx| {
+ let mut multibuffer = MultiBuffer::new(0);
+ multibuffer.push_excerpts(
+ buffer_1.clone(),
+ [
+ // excerpt ends in the middle of a modified hunk
+ ExcerptRange {
+ context: Point::new(0, 0)..Point::new(1, 5),
+ primary: Default::default(),
+ },
+ // excerpt begins in the middle of a modified hunk
+ ExcerptRange {
+ context: Point::new(5, 0)..Point::new(6, 5),
+ primary: Default::default(),
+ },
+ ],
+ cx,
+ );
+ multibuffer.push_excerpts(
+ buffer_2.clone(),
+ [
+ // excerpt ends at a deletion
+ ExcerptRange {
+ context: Point::new(0, 0)..Point::new(1, 5),
+ primary: Default::default(),
+ },
+ // excerpt starts at a deletion
+ ExcerptRange {
+ context: Point::new(2, 0)..Point::new(2, 5),
+ primary: Default::default(),
+ },
+ // excerpt fully contains a deletion hunk
+ ExcerptRange {
+ context: Point::new(1, 0)..Point::new(2, 5),
+ primary: Default::default(),
+ },
+ // excerpt fully contains an insertion hunk
+ ExcerptRange {
+ context: Point::new(4, 0)..Point::new(6, 5),
+ primary: Default::default(),
+ },
+ ],
+ cx,
+ );
+ multibuffer
+ });
+
+ let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx));
+
+ assert_eq!(
+ snapshot.text(),
+ "
+ 1.zero
+ 1.ONE
+ 1.FIVE
+ 1.six
+ 2.zero
+ 2.one
+ 2.two
+ 2.one
+ 2.two
+ 2.four
+ 2.five
+ 2.six"
+ .unindent()
+ );
+
+ let expected = [
+ (DiffHunkStatus::Modified, 1..2),
+ (DiffHunkStatus::Modified, 2..3),
+ //TODO: Define better when and where removed hunks show up at range extremities
+ (DiffHunkStatus::Removed, 6..6),
+ (DiffHunkStatus::Removed, 8..8),
+ (DiffHunkStatus::Added, 10..11),
+ ];
+
+ assert_eq!(
+ snapshot
+ .git_diff_hunks_in_range(0..12)
+ .map(|hunk| (hunk.status(), hunk.buffer_range))
+ .collect::<Vec<_>>(),
+ &expected,
+ );
+
+ assert_eq!(
+ snapshot
+ .git_diff_hunks_in_range_rev(0..12)
+ .map(|hunk| (hunk.status(), hunk.buffer_range))
+ .collect::<Vec<_>>(),
+ expected
+ .iter()
+ .rev()
+ .cloned()
+ .collect::<Vec<_>>()
+ .as_slice(),
+ );
+ }
+}
@@ -8,6 +8,7 @@ use crate::{
use gpui::{ModelHandle, ViewContext};
+use project::Project;
use util::test::{marked_text_offsets, marked_text_ranges};
#[cfg(test)]
@@ -63,9 +64,20 @@ pub fn assert_text_with_selections(
assert_eq!(editor.selections.ranges(cx), text_ranges);
}
+// RA thinks this is dead code even though it is used in a whole lot of tests
+#[allow(dead_code)]
+#[cfg(any(test, feature = "test-support"))]
pub(crate) fn build_editor(
buffer: ModelHandle<MultiBuffer>,
cx: &mut ViewContext<Editor>,
) -> Editor {
Editor::new(EditorMode::Full, buffer, None, None, cx)
}
+
+pub(crate) fn build_editor_with_project(
+ project: ModelHandle<Project>,
+ buffer: ModelHandle<MultiBuffer>,
+ cx: &mut ViewContext<Editor>,
+) -> Editor {
+ Editor::new(EditorMode::Full, buffer, Some(project), None, cx)
+}
@@ -6,18 +6,18 @@ use std::{
use anyhow::Result;
+use crate::{Editor, ToPoint};
use collections::HashSet;
use futures::Future;
use gpui::{json, ViewContext, ViewHandle};
use indoc::indoc;
use language::{point_to_lsp, FakeLspAdapter, Language, LanguageConfig, LanguageQueries};
use lsp::{notification, request};
+use multi_buffer::ToPointUtf16;
use project::Project;
use smol::stream::StreamExt;
use workspace::{AppState, Workspace, WorkspaceHandle};
-use crate::{multi_buffer::ToPointUtf16, Editor, ToPoint};
-
use super::editor_test_context::EditorTestContext;
pub struct EditorLspTestContext<'a> {
@@ -18,7 +18,7 @@ use util::{
test::{generate_marked_text, marked_text_ranges},
};
-use super::build_editor;
+use super::build_editor_with_project;
pub struct EditorTestContext<'a> {
pub cx: &'a mut gpui::TestAppContext,
@@ -29,13 +29,24 @@ pub struct EditorTestContext<'a> {
impl<'a> EditorTestContext<'a> {
pub async fn new(cx: &'a mut gpui::TestAppContext) -> EditorTestContext<'a> {
let fs = FakeFs::new(cx.background());
- let project = Project::test(fs, [], cx).await;
+ // fs.insert_file("/file", "".to_owned()).await;
+ fs.insert_tree(
+ "/root",
+ gpui::serde_json::json!({
+ "file": "",
+ }),
+ )
+ .await;
+ let project = Project::test(fs, ["/root".as_ref()], cx).await;
let buffer = project
- .update(cx, |project, cx| project.create_buffer("", None, cx))
+ .update(cx, |project, cx| {
+ project.open_local_buffer("/root/file", cx)
+ })
+ .await
.unwrap();
let window = cx.add_window(|cx| {
cx.focus_self();
- build_editor(MultiBuffer::build_from_buffer(buffer, cx), cx)
+ build_editor_with_project(project, MultiBuffer::build_from_buffer(buffer, cx), cx)
});
let editor = window.root(cx);
Self {
@@ -38,7 +38,7 @@ use std::{
path::{Path, PathBuf},
str,
sync::{
- atomic::{AtomicUsize, Ordering::SeqCst},
+ atomic::{AtomicU64, AtomicUsize, Ordering::SeqCst},
Arc,
},
};
@@ -115,6 +115,7 @@ pub struct CachedLspAdapter {
pub disk_based_diagnostics_progress_token: Option<String>,
pub language_ids: HashMap<String, String>,
pub adapter: Arc<dyn LspAdapter>,
+ pub reinstall_attempt_count: AtomicU64,
}
impl CachedLspAdapter {
@@ -133,6 +134,7 @@ impl CachedLspAdapter {
disk_based_diagnostics_progress_token,
language_ids,
adapter,
+ reinstall_attempt_count: AtomicU64::new(0),
})
}
@@ -645,7 +647,7 @@ struct LanguageRegistryState {
pub struct PendingLanguageServer {
pub server_id: LanguageServerId,
- pub task: Task<Result<Option<lsp::LanguageServer>>>,
+ pub task: Task<Result<lsp::LanguageServer>>,
pub container_dir: Option<Arc<Path>>,
}
@@ -884,6 +886,7 @@ impl LanguageRegistry {
pub fn create_pending_language_server(
self: &Arc<Self>,
+ stderr_capture: Arc<Mutex<Option<String>>>,
language: Arc<Language>,
adapter: Arc<CachedLspAdapter>,
root_path: Arc<Path>,
@@ -923,7 +926,7 @@ impl LanguageRegistry {
})
.detach();
- Ok(Some(server))
+ Ok(server)
});
return Some(PendingLanguageServer {
@@ -971,24 +974,23 @@ impl LanguageRegistry {
.clone();
drop(lock);
- let binary = match entry.clone().await.log_err() {
- Some(binary) => binary,
- None => return Ok(None),
+ let binary = match entry.clone().await {
+ Ok(binary) => binary,
+ Err(err) => anyhow::bail!("{err}"),
};
if let Some(task) = adapter.will_start_server(&delegate, &mut cx) {
- if task.await.log_err().is_none() {
- return Ok(None);
- }
+ task.await?;
}
- Ok(Some(lsp::LanguageServer::new(
+ lsp::LanguageServer::new(
+ stderr_capture,
server_id,
binary,
&root_path,
adapter.code_action_kinds(),
cx,
- )?))
+ )
})
};
@@ -136,6 +136,7 @@ struct Error {
impl LanguageServer {
pub fn new(
+ stderr_capture: Arc<Mutex<Option<String>>>,
server_id: LanguageServerId,
binary: LanguageServerBinary,
root_path: &Path,
@@ -165,6 +166,7 @@ impl LanguageServer {
stdin,
stdout,
Some(stderr),
+ stderr_capture,
Some(server),
root_path,
code_action_kinds,
@@ -197,6 +199,7 @@ impl LanguageServer {
stdin: Stdin,
stdout: Stdout,
stderr: Option<Stderr>,
+ stderr_capture: Arc<Mutex<Option<String>>>,
server: Option<Child>,
root_path: &Path,
code_action_kinds: Option<Vec<CodeActionKind>>,
@@ -218,20 +221,23 @@ impl LanguageServer {
let io_handlers = Arc::new(Mutex::new(HashMap::default()));
let stdout_input_task = cx.spawn(|cx| {
- {
- Self::handle_input(
- stdout,
- on_unhandled_notification.clone(),
- notification_handlers.clone(),
- response_handlers.clone(),
- io_handlers.clone(),
- cx,
- )
- }
+ Self::handle_input(
+ stdout,
+ on_unhandled_notification.clone(),
+ notification_handlers.clone(),
+ response_handlers.clone(),
+ io_handlers.clone(),
+ cx,
+ )
.log_err()
});
let stderr_input_task = stderr
- .map(|stderr| cx.spawn(|_| Self::handle_stderr(stderr, io_handlers.clone()).log_err()))
+ .map(|stderr| {
+ cx.spawn(|_| {
+ Self::handle_stderr(stderr, io_handlers.clone(), stderr_capture.clone())
+ .log_err()
+ })
+ })
.unwrap_or_else(|| Task::Ready(Some(None)));
let input_task = cx.spawn(|_| async move {
let (stdout, stderr) = futures::join!(stdout_input_task, stderr_input_task);
@@ -353,12 +359,14 @@ impl LanguageServer {
async fn handle_stderr<Stderr>(
stderr: Stderr,
io_handlers: Arc<Mutex<HashMap<usize, IoHandler>>>,
+ stderr_capture: Arc<Mutex<Option<String>>>,
) -> anyhow::Result<()>
where
Stderr: AsyncRead + Unpin + Send + 'static,
{
let mut stderr = BufReader::new(stderr);
let mut buffer = Vec::new();
+
loop {
buffer.clear();
stderr.read_until(b'\n', &mut buffer).await?;
@@ -367,6 +375,10 @@ impl LanguageServer {
for handler in io_handlers.lock().values_mut() {
handler(IoKind::StdErr, message);
}
+
+ if let Some(stderr) = stderr_capture.lock().as_mut() {
+ stderr.push_str(message);
+ }
}
// Don't starve the main thread when receiving lots of messages at once.
@@ -938,6 +950,7 @@ impl LanguageServer {
stdin_writer,
stdout_reader,
None::<async_pipe::PipeReader>,
+ Arc::new(Mutex::new(None)),
None,
Path::new("/"),
None,
@@ -950,6 +963,7 @@ impl LanguageServer {
stdout_writer,
stdin_reader,
None::<async_pipe::PipeReader>,
+ Arc::new(Mutex::new(None)),
None,
Path::new("/"),
None,
@@ -0,0 +1,80 @@
+[package]
+name = "multi_buffer"
+version = "0.1.0"
+edition = "2021"
+publish = false
+
+[lib]
+path = "src/multi_buffer.rs"
+doctest = false
+
+[features]
+test-support = [
+ "copilot/test-support",
+ "text/test-support",
+ "language/test-support",
+ "gpui/test-support",
+ "util/test-support",
+ "tree-sitter-rust",
+ "tree-sitter-typescript"
+]
+
+[dependencies]
+client = { path = "../client" }
+clock = { path = "../clock" }
+collections = { path = "../collections" }
+context_menu = { path = "../context_menu" }
+git = { path = "../git" }
+gpui = { path = "../gpui" }
+language = { path = "../language" }
+lsp = { path = "../lsp" }
+rich_text = { path = "../rich_text" }
+settings = { path = "../settings" }
+snippet = { path = "../snippet" }
+sum_tree = { path = "../sum_tree" }
+text = { path = "../text" }
+theme = { path = "../theme" }
+util = { path = "../util" }
+
+aho-corasick = "1.1"
+anyhow.workspace = true
+convert_case = "0.6.0"
+futures.workspace = true
+indoc = "1.0.4"
+itertools = "0.10"
+lazy_static.workspace = true
+log.workspace = true
+ordered-float.workspace = true
+parking_lot.workspace = true
+postage.workspace = true
+pulldown-cmark = { version = "0.9.2", default-features = false }
+rand.workspace = true
+schemars.workspace = true
+serde.workspace = true
+serde_derive.workspace = true
+smallvec.workspace = true
+smol.workspace = true
+
+tree-sitter-rust = { workspace = true, optional = true }
+tree-sitter-html = { workspace = true, optional = true }
+tree-sitter-typescript = { workspace = true, optional = true }
+
+[dev-dependencies]
+copilot = { path = "../copilot", features = ["test-support"] }
+text = { path = "../text", features = ["test-support"] }
+language = { path = "../language", features = ["test-support"] }
+lsp = { path = "../lsp", features = ["test-support"] }
+gpui = { path = "../gpui", features = ["test-support"] }
+util = { path = "../util", features = ["test-support"] }
+project = { path = "../project", features = ["test-support"] }
+settings = { path = "../settings", features = ["test-support"] }
+workspace = { path = "../workspace", features = ["test-support"] }
+
+ctor.workspace = true
+env_logger.workspace = true
+rand.workspace = true
+unindent.workspace = true
+tree-sitter.workspace = true
+tree-sitter-rust.workspace = true
+tree-sitter-html.workspace = true
+tree-sitter-typescript.workspace = true
@@ -8,9 +8,9 @@ use sum_tree::Bias;
#[derive(Clone, Copy, Eq, PartialEq, Debug, Hash)]
pub struct Anchor {
- pub(crate) buffer_id: Option<u64>,
- pub(crate) excerpt_id: ExcerptId,
- pub(crate) text_anchor: text::Anchor,
+ pub buffer_id: Option<u64>,
+ pub excerpt_id: ExcerptId,
+ pub text_anchor: text::Anchor,
}
impl Anchor {
@@ -30,10 +30,6 @@ impl Anchor {
}
}
- pub fn excerpt_id(&self) -> ExcerptId {
- self.excerpt_id
- }
-
pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering {
let excerpt_id_cmp = self.excerpt_id.cmp(&other.excerpt_id, snapshot);
if excerpt_id_cmp.is_eq() {
@@ -303,7 +303,7 @@ impl MultiBuffer {
self.snapshot.borrow().clone()
}
- pub(crate) fn read(&self, cx: &AppContext) -> Ref<MultiBufferSnapshot> {
+ pub fn read(&self, cx: &AppContext) -> Ref<MultiBufferSnapshot> {
self.sync(cx);
self.snapshot.borrow()
}
@@ -589,7 +589,7 @@ impl MultiBuffer {
self.start_transaction_at(Instant::now(), cx)
}
- pub(crate) fn start_transaction_at(
+ pub fn start_transaction_at(
&mut self,
now: Instant,
cx: &mut ModelContext<Self>,
@@ -608,7 +608,7 @@ impl MultiBuffer {
self.end_transaction_at(Instant::now(), cx)
}
- pub(crate) fn end_transaction_at(
+ pub fn end_transaction_at(
&mut self,
now: Instant,
cx: &mut ModelContext<Self>,
@@ -1508,7 +1508,7 @@ impl MultiBuffer {
"untitled".into()
}
- #[cfg(test)]
+ #[cfg(any(test, feature = "test-support"))]
pub fn is_parsing(&self, cx: &AppContext) -> bool {
self.as_singleton().unwrap().read(cx).is_parsing()
}
@@ -3198,7 +3198,7 @@ impl MultiBufferSnapshot {
theme: Option<&SyntaxTheme>,
) -> Option<(u64, Vec<OutlineItem<Anchor>>)> {
let anchor = self.anchor_before(offset);
- let excerpt_id = anchor.excerpt_id();
+ let excerpt_id = anchor.excerpt_id;
let excerpt = self.excerpt(excerpt_id)?;
Some((
excerpt.buffer_id,
@@ -4129,17 +4129,13 @@ where
#[cfg(test)]
mod tests {
- use crate::editor_tests::init_test;
-
use super::*;
use futures::StreamExt;
use gpui::{AppContext, TestAppContext};
use language::{Buffer, Rope};
- use project::{FakeFs, Project};
use rand::prelude::*;
use settings::SettingsStore;
use std::{env, rc::Rc};
- use unindent::Unindent;
use util::test::sample_text;
#[gpui::test]
@@ -4838,190 +4834,6 @@ mod tests {
);
}
- #[gpui::test]
- async fn test_diff_hunks_in_range(cx: &mut TestAppContext) {
- use git::diff::DiffHunkStatus;
- init_test(cx, |_| {});
-
- let fs = FakeFs::new(cx.background());
- let project = Project::test(fs, [], cx).await;
-
- // buffer has two modified hunks with two rows each
- let buffer_1 = project
- .update(cx, |project, cx| {
- project.create_buffer(
- "
- 1.zero
- 1.ONE
- 1.TWO
- 1.three
- 1.FOUR
- 1.FIVE
- 1.six
- "
- .unindent()
- .as_str(),
- None,
- cx,
- )
- })
- .unwrap();
- buffer_1.update(cx, |buffer, cx| {
- buffer.set_diff_base(
- Some(
- "
- 1.zero
- 1.one
- 1.two
- 1.three
- 1.four
- 1.five
- 1.six
- "
- .unindent(),
- ),
- cx,
- );
- });
-
- // buffer has a deletion hunk and an insertion hunk
- let buffer_2 = project
- .update(cx, |project, cx| {
- project.create_buffer(
- "
- 2.zero
- 2.one
- 2.two
- 2.three
- 2.four
- 2.five
- 2.six
- "
- .unindent()
- .as_str(),
- None,
- cx,
- )
- })
- .unwrap();
- buffer_2.update(cx, |buffer, cx| {
- buffer.set_diff_base(
- Some(
- "
- 2.zero
- 2.one
- 2.one-and-a-half
- 2.two
- 2.three
- 2.four
- 2.six
- "
- .unindent(),
- ),
- cx,
- );
- });
-
- cx.foreground().run_until_parked();
-
- let multibuffer = cx.add_model(|cx| {
- let mut multibuffer = MultiBuffer::new(0);
- multibuffer.push_excerpts(
- buffer_1.clone(),
- [
- // excerpt ends in the middle of a modified hunk
- ExcerptRange {
- context: Point::new(0, 0)..Point::new(1, 5),
- primary: Default::default(),
- },
- // excerpt begins in the middle of a modified hunk
- ExcerptRange {
- context: Point::new(5, 0)..Point::new(6, 5),
- primary: Default::default(),
- },
- ],
- cx,
- );
- multibuffer.push_excerpts(
- buffer_2.clone(),
- [
- // excerpt ends at a deletion
- ExcerptRange {
- context: Point::new(0, 0)..Point::new(1, 5),
- primary: Default::default(),
- },
- // excerpt starts at a deletion
- ExcerptRange {
- context: Point::new(2, 0)..Point::new(2, 5),
- primary: Default::default(),
- },
- // excerpt fully contains a deletion hunk
- ExcerptRange {
- context: Point::new(1, 0)..Point::new(2, 5),
- primary: Default::default(),
- },
- // excerpt fully contains an insertion hunk
- ExcerptRange {
- context: Point::new(4, 0)..Point::new(6, 5),
- primary: Default::default(),
- },
- ],
- cx,
- );
- multibuffer
- });
-
- let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx));
-
- assert_eq!(
- snapshot.text(),
- "
- 1.zero
- 1.ONE
- 1.FIVE
- 1.six
- 2.zero
- 2.one
- 2.two
- 2.one
- 2.two
- 2.four
- 2.five
- 2.six"
- .unindent()
- );
-
- let expected = [
- (DiffHunkStatus::Modified, 1..2),
- (DiffHunkStatus::Modified, 2..3),
- //TODO: Define better when and where removed hunks show up at range extremities
- (DiffHunkStatus::Removed, 6..6),
- (DiffHunkStatus::Removed, 8..8),
- (DiffHunkStatus::Added, 10..11),
- ];
-
- assert_eq!(
- snapshot
- .git_diff_hunks_in_range(0..12)
- .map(|hunk| (hunk.status(), hunk.buffer_range))
- .collect::<Vec<_>>(),
- &expected,
- );
-
- assert_eq!(
- snapshot
- .git_diff_hunks_in_range_rev(0..12)
- .map(|hunk| (hunk.status(), hunk.buffer_range))
- .collect::<Vec<_>>(),
- expected
- .iter()
- .rev()
- .cloned()
- .collect::<Vec<_>>()
- .as_slice(),
- );
- }
-
#[gpui::test(iterations = 100)]
fn test_random_multibuffer(cx: &mut AppContext, mut rng: StdRng) {
let operations = env::var("OPERATIONS")
@@ -27,6 +27,7 @@ serde_derive.workspace = true
serde_json.workspace = true
anyhow.workspace = true
futures.workspace = true
+parking_lot.workspace = true
[dev-dependencies]
language = { path = "../language", features = ["test-support"] }
@@ -67,91 +67,39 @@ impl Prettier {
starting_path: Option<LocateStart>,
fs: Arc<dyn Fs>,
) -> anyhow::Result<PathBuf> {
+ fn is_node_modules(path_component: &std::path::Component<'_>) -> bool {
+ path_component.as_os_str().to_string_lossy() == "node_modules"
+ }
+
let paths_to_check = match starting_path.as_ref() {
Some(starting_path) => {
let worktree_root = starting_path
.worktree_root_path
.components()
.into_iter()
- .take_while(|path_component| {
- path_component.as_os_str().to_string_lossy() != "node_modules"
- })
+ .take_while(|path_component| !is_node_modules(path_component))
.collect::<PathBuf>();
-
if worktree_root != starting_path.worktree_root_path.as_ref() {
vec![worktree_root]
} else {
- let (worktree_root_metadata, start_path_metadata) = if starting_path
- .starting_path
- .as_ref()
- == Path::new("")
- {
- let worktree_root_data =
- fs.metadata(&worktree_root).await.with_context(|| {
- format!(
- "FS metadata fetch for worktree root path {worktree_root:?}",
- )
- })?;
- (worktree_root_data.unwrap_or_else(|| {
- panic!("cannot query prettier for non existing worktree root at {worktree_root_data:?}")
- }), None)
+ if starting_path.starting_path.as_ref() == Path::new("") {
+ worktree_root
+ .parent()
+ .map(|path| vec![path.to_path_buf()])
+ .unwrap_or_default()
} else {
- let full_starting_path = worktree_root.join(&starting_path.starting_path);
- let (worktree_root_data, start_path_data) = futures::try_join!(
- fs.metadata(&worktree_root),
- fs.metadata(&full_starting_path),
- )
- .with_context(|| {
- format!("FS metadata fetch for starting path {full_starting_path:?}",)
- })?;
- (
- worktree_root_data.unwrap_or_else(|| {
- panic!("cannot query prettier for non existing worktree root at {worktree_root_data:?}")
- }),
- start_path_data,
- )
- };
-
- match start_path_metadata {
- Some(start_path_metadata) => {
- anyhow::ensure!(worktree_root_metadata.is_dir,
- "For non-empty start path, worktree root {starting_path:?} should be a directory");
- anyhow::ensure!(
- !start_path_metadata.is_dir,
- "For non-empty start path, it should not be a directory {starting_path:?}"
- );
- anyhow::ensure!(
- !start_path_metadata.is_symlink,
- "For non-empty start path, it should not be a symlink {starting_path:?}"
- );
-
- let file_to_format = starting_path.starting_path.as_ref();
- let mut paths_to_check = VecDeque::from(vec![worktree_root.clone()]);
- let mut current_path = worktree_root;
- for path_component in file_to_format.components().into_iter() {
- current_path = current_path.join(path_component);
- paths_to_check.push_front(current_path.clone());
- if path_component.as_os_str().to_string_lossy() == "node_modules" {
- break;
- }
+ let file_to_format = starting_path.starting_path.as_ref();
+ let mut paths_to_check = VecDeque::new();
+ let mut current_path = worktree_root;
+ for path_component in file_to_format.components().into_iter() {
+ let new_path = current_path.join(path_component);
+ let old_path = std::mem::replace(&mut current_path, new_path);
+ paths_to_check.push_front(old_path);
+ if is_node_modules(&path_component) {
+ break;
}
- paths_to_check.pop_front(); // last one is the file itself or node_modules, skip it
- Vec::from(paths_to_check)
- }
- None => {
- anyhow::ensure!(
- !worktree_root_metadata.is_dir,
- "For empty start path, worktree root should not be a directory {starting_path:?}"
- );
- anyhow::ensure!(
- !worktree_root_metadata.is_symlink,
- "For empty start path, worktree root should not be a symlink {starting_path:?}"
- );
- worktree_root
- .parent()
- .map(|path| vec![path.to_path_buf()])
- .unwrap_or_default()
}
+ Vec::from(paths_to_check)
}
}
}
@@ -210,6 +158,7 @@ impl Prettier {
.spawn(async move { node.binary_path().await })
.await?;
let server = LanguageServer::new(
+ Arc::new(parking_lot::Mutex::new(None)),
server_id,
LanguageServerBinary {
path: node_path,
@@ -52,6 +52,7 @@ use lsp::{
};
use lsp_command::*;
use node_runtime::NodeRuntime;
+use parking_lot::Mutex;
use postage::watch;
use prettier::{LocateStart, Prettier};
use project_settings::{LspSettings, ProjectSettings};
@@ -90,6 +91,8 @@ pub use fs::*;
pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX;
pub use worktree::*;
+const MAX_SERVER_REINSTALL_ATTEMPT_COUNT: u64 = 4;
+
pub trait Item {
fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
@@ -2721,12 +2724,18 @@ impl Project {
language: Arc<Language>,
cx: &mut ModelContext<Self>,
) {
+ if adapter.reinstall_attempt_count.load(SeqCst) > MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
+ return;
+ }
+
let key = (worktree_id, adapter.name.clone());
if self.language_server_ids.contains_key(&key) {
return;
}
+ let stderr_capture = Arc::new(Mutex::new(Some(String::new())));
let pending_server = match self.languages.create_pending_language_server(
+ stderr_capture.clone(),
language.clone(),
adapter.clone(),
worktree_path,
@@ -2763,29 +2772,41 @@ impl Project {
.await;
match result {
- Ok(server) => server,
+ Ok(server) => {
+ stderr_capture.lock().take();
+ Some(server)
+ }
Err(err) => {
- log::error!("failed to start language server {:?}: {}", server_name, err);
-
- if let Some(this) = this.upgrade(&cx) {
- if let Some(container_dir) = container_dir {
- let installation_test_binary = adapter
- .installation_test_binary(container_dir.to_path_buf())
- .await;
-
- this.update(&mut cx, |_, cx| {
- Self::check_errored_server(
- language,
- adapter,
- server_id,
- installation_test_binary,
- cx,
- )
- });
- }
+ log::error!("failed to start language server {server_name:?}: {err}");
+ log::error!("server stderr: {:?}", stderr_capture.lock().take());
+
+ let this = this.upgrade(&cx)?;
+ let container_dir = container_dir?;
+
+ let attempt_count = adapter.reinstall_attempt_count.fetch_add(1, SeqCst);
+ if attempt_count >= MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
+ let max = MAX_SERVER_REINSTALL_ATTEMPT_COUNT;
+ log::error!(
+ "Hit {max} max reinstallation attempts for {server_name:?}"
+ );
+ return None;
}
+ let installation_test_binary = adapter
+ .installation_test_binary(container_dir.to_path_buf())
+ .await;
+
+ this.update(&mut cx, |_, cx| {
+ Self::check_errored_server(
+ language,
+ adapter,
+ server_id,
+ installation_test_binary,
+ cx,
+ )
+ });
+
None
}
}
@@ -2862,20 +2883,17 @@ impl Project {
server_id: LanguageServerId,
key: (WorktreeId, LanguageServerName),
cx: &mut AsyncAppContext,
- ) -> Result<Option<Arc<LanguageServer>>> {
- let setup = Self::setup_pending_language_server(
+ ) -> Result<Arc<LanguageServer>> {
+ let language_server = Self::setup_pending_language_server(
this,
override_initialization_options,
pending_server,
adapter.clone(),
server_id,
cx,
- );
+ )
+ .await?;
- let language_server = match setup.await? {
- Some(language_server) => language_server,
- None => return Ok(None),
- };
let this = match this.upgrade(cx) {
Some(this) => this,
None => return Err(anyhow!("failed to upgrade project handle")),
@@ -2892,7 +2910,7 @@ impl Project {
)
})?;
- Ok(Some(language_server))
+ Ok(language_server)
}
async fn setup_pending_language_server(
@@ -2902,12 +2920,9 @@ impl Project {
adapter: Arc<CachedLspAdapter>,
server_id: LanguageServerId,
cx: &mut AsyncAppContext,
- ) -> Result<Option<Arc<LanguageServer>>> {
+ ) -> Result<Arc<LanguageServer>> {
let workspace_config = cx.update(|cx| adapter.workspace_configuration(cx)).await;
- let language_server = match pending_server.task.await? {
- Some(server) => server,
- None => return Ok(None),
- };
+ let language_server = pending_server.task.await?;
language_server
.on_notification::<lsp::notification::PublishDiagnostics, _>({
@@ -2978,6 +2993,7 @@ impl Project {
},
)
.detach();
+
language_server
.on_request::<lsp::request::RegisterCapability, _, _>({
move |params, mut cx| async move {
@@ -3043,6 +3059,7 @@ impl Project {
}
})
.detach();
+
let mut initialization_options = adapter.adapter.initialization_options().await;
match (&mut initialization_options, override_options) {
(Some(initialization_options), Some(override_options)) => {
@@ -3062,7 +3079,7 @@ impl Project {
)
.ok();
- Ok(Some(language_server))
+ Ok(language_server)
}
fn insert_newly_running_language_server(
@@ -4,7 +4,7 @@ use collections::HashMap;
use gpui::{AppContext, AssetSource};
use serde_derive::Deserialize;
-use util::{iife, paths::PathExt};
+use util::{maybe, paths::PathExt};
#[derive(Deserialize, Debug)]
struct TypeConfig {
@@ -42,12 +42,12 @@ impl FileAssociations {
}
pub fn get_icon(path: &Path, cx: &AppContext) -> Arc<str> {
- iife!({
+ maybe!({
let this = cx.has_global::<Self>().then(|| cx.global::<Self>())?;
// FIXME: Associate a type with the languages and have the file's langauge
// override these associations
- iife!({
+ maybe!({
let suffix = path.icon_suffix()?;
this.suffixes
@@ -61,7 +61,7 @@ impl FileAssociations {
}
pub fn get_folder_icon(expanded: bool, cx: &AppContext) -> Arc<str> {
- iife!({
+ maybe!({
let this = cx.has_global::<Self>().then(|| cx.global::<Self>())?;
let key = if expanded {
@@ -78,7 +78,7 @@ impl FileAssociations {
}
pub fn get_chevron_icon(expanded: bool, cx: &AppContext) -> Arc<str> {
- iife!({
+ maybe!({
let this = cx.has_global::<Self>().then(|| cx.global::<Self>())?;
let key = if expanded {
@@ -9,4 +9,4 @@ pub use notification::*;
pub use peer::*;
mod macros;
-pub const PROTOCOL_VERSION: u32 = 65;
+pub const PROTOCOL_VERSION: u32 = 66;
@@ -1,5 +1,5 @@
use crate::http::HttpClient;
-use anyhow::{anyhow, Context, Result};
+use anyhow::{anyhow, bail, Context, Result};
use futures::AsyncReadExt;
use serde::Deserialize;
use std::sync::Arc;
@@ -46,6 +46,14 @@ pub async fn latest_github_release(
.await
.context("error reading latest release")?;
+ if response.status().is_client_error() {
+ let text = String::from_utf8_lossy(body.as_slice());
+ bail!(
+ "status error {}, response: {text:?}",
+ response.status().as_u16()
+ );
+ }
+
let releases = match serde_json::from_slice::<Vec<GithubRelease>>(body.as_slice()) {
Ok(releases) => releases,
@@ -349,19 +349,19 @@ pub fn unzip_option<T, U>(option: Option<(T, U)>) -> (Option<T>, Option<U>) {
}
}
-/// Immediately invoked function expression. Good for using the ? operator
+/// Evaluates to an immediately invoked function expression. Good for using the ? operator
/// in functions which do not return an Option or Result
#[macro_export]
-macro_rules! iife {
+macro_rules! maybe {
($block:block) => {
(|| $block)()
};
}
-/// Async Immediately invoked function expression. Good for using the ? operator
-/// in functions which do not return an Option or Result. Async version of above
+/// Evaluates to an immediately invoked function expression. Good for using the ? operator
+/// in functions which do not return an Option or Result, but async.
#[macro_export]
-macro_rules! async_iife {
+macro_rules! async_maybe {
($block:block) => {
(|| async move { $block })()
};
@@ -434,7 +434,7 @@ mod tests {
None
}
- let foo = iife!({
+ let foo = maybe!({
option_returning_function()?;
Some(())
});
@@ -16,7 +16,7 @@ actions!(branches, [OpenRecent]);
pub fn init(cx: &mut AppContext) {
Picker::<BranchListDelegate>::init(cx);
- cx.add_async_action(toggle);
+ cx.add_action(toggle);
}
pub type BranchList = Picker<BranchListDelegate>;
@@ -24,30 +24,29 @@ pub fn build_branch_list(
workspace: ViewHandle<Workspace>,
cx: &mut ViewContext<BranchList>,
) -> Result<BranchList> {
- Ok(Picker::new(BranchListDelegate::new(workspace, 29, cx)?, cx)
- .with_theme(|theme| theme.picker.clone()))
+ let delegate = workspace.read_with(cx, |workspace, cx| {
+ BranchListDelegate::new(workspace, cx.handle(), 29, cx)
+ })?;
+
+ Ok(Picker::new(delegate, cx).with_theme(|theme| theme.picker.clone()))
}
fn toggle(
- _: &mut Workspace,
+ workspace: &mut Workspace,
_: &OpenRecent,
cx: &mut ViewContext<Workspace>,
-) -> Option<Task<Result<()>>> {
- Some(cx.spawn(|workspace, mut cx| async move {
- workspace.update(&mut cx, |workspace, cx| {
- // Modal branch picker has a longer trailoff than a popover one.
- let delegate = BranchListDelegate::new(cx.handle(), 70, cx)?;
- workspace.toggle_modal(cx, |_, cx| {
- cx.add_view(|cx| {
- Picker::new(delegate, cx)
- .with_theme(|theme| theme.picker.clone())
- .with_max_size(800., 1200.)
- })
- });
- Ok::<_, anyhow::Error>(())
- })??;
- Ok(())
- }))
+) -> Result<()> {
+ // Modal branch picker has a longer trailoff than a popover one.
+ let delegate = BranchListDelegate::new(workspace, cx.handle(), 70, cx)?;
+ workspace.toggle_modal(cx, |_, cx| {
+ cx.add_view(|cx| {
+ Picker::new(delegate, cx)
+ .with_theme(|theme| theme.picker.clone())
+ .with_max_size(800., 1200.)
+ })
+ });
+
+ Ok(())
}
pub struct BranchListDelegate {
@@ -62,15 +61,16 @@ pub struct BranchListDelegate {
impl BranchListDelegate {
fn new(
- workspace: ViewHandle<Workspace>,
+ workspace: &Workspace,
+ handle: ViewHandle<Workspace>,
branch_name_trailoff_after: usize,
cx: &AppContext,
) -> Result<Self> {
- let project = workspace.read(cx).project().read(&cx);
-
+ let project = workspace.project().read(&cx);
let Some(worktree) = project.visible_worktrees(cx).next() else {
bail!("Cannot update branch list as there are no visible worktrees")
};
+
let mut cwd = worktree.read(cx).abs_path().to_path_buf();
cwd.push(".git");
let Some(repo) = project.fs().open_repo(&cwd) else {
@@ -79,13 +79,14 @@ impl BranchListDelegate {
let all_branches = repo.lock().branches()?;
Ok(Self {
matches: vec![],
- workspace,
+ workspace: handle,
all_branches,
selected_index: 0,
last_query: Default::default(),
branch_name_trailoff_after,
})
}
+
fn display_error_toast(&self, message: String, cx: &mut ViewContext<BranchList>) {
const GIT_CHECKOUT_FAILURE_ID: usize = 2048;
self.workspace.update(cx, |model, ctx| {
@@ -1,7 +1,10 @@
use editor::scroll::VERTICAL_SCROLL_MARGIN;
use indoc::indoc;
use settings::SettingsStore;
-use std::ops::{Deref, DerefMut};
+use std::{
+ ops::{Deref, DerefMut},
+ panic, thread,
+};
use collections::{HashMap, HashSet};
use gpui::{geometry::vector::vec2f, ContextHandle};
@@ -59,12 +62,22 @@ pub struct NeovimBackedTestContext<'a> {
impl<'a> NeovimBackedTestContext<'a> {
pub async fn new(cx: &'a mut gpui::TestAppContext) -> NeovimBackedTestContext<'a> {
- let function_name = cx.function_name.clone();
- let cx = VimTestContext::new(cx, true).await;
+ // rust stores the name of the test on the current thread.
+ // We use this to automatically name a file that will store
+ // the neovim connection's requests/responses so that we can
+ // run without neovim on CI.
+ let thread = thread::current();
+ let test_name = thread
+ .name()
+ .expect("thread is not named")
+ .split(":")
+ .last()
+ .unwrap()
+ .to_string();
Self {
- cx,
+ cx: VimTestContext::new(cx, true).await,
exemptions: Default::default(),
- neovim: NeovimConnection::new(function_name).await,
+ neovim: NeovimConnection::new(test_name).await,
last_set_state: None,
recent_keystrokes: Default::default(),
@@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathansobo@gmail.com>"]
description = "The fast, collaborative code editor."
edition = "2021"
name = "zed"
-version = "0.110.0"
+version = "0.111.0"
publish = false
[lib]
@@ -2,8 +2,6 @@
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
- <key>com.apple.developer.associated-domains</key>
- <array><string>applinks:zed.dev</string></array>
<key>com.apple.security.automation.apple-events</key>
<true/>
<key>com.apple.security.cs.allow-jit</key>
@@ -12,8 +10,14 @@
<true/>
<key>com.apple.security.device.camera</key>
<true/>
- <key>com.apple.security.keychain-access-groups</key>
- <array><string>MQ55VZLNZQ.dev.zed.Shared</string></array>
+ <key>com.apple.security.personal-information.addressbook</key>
+ <true/>
+ <key>com.apple.security.personal-information.calendars</key>
+ <true/>
+ <key>com.apple.security.personal-information.location</key>
+ <true/>
+ <key>com.apple.security.personal-information.photos-library</key>
+ <true/>
<!-- <key>com.apple.security.cs.disable-library-validation</key>
<true/> -->
</dict>
@@ -19,7 +19,7 @@ use std::{
},
};
use util::{
- async_iife,
+ async_maybe,
fs::remove_matching,
github::{latest_github_release, GitHubLspBinaryVersion},
ResultExt,
@@ -421,7 +421,7 @@ impl LspAdapter for NextLspAdapter {
}
async fn get_cached_server_binary_next(container_dir: PathBuf) -> Option<LanguageServerBinary> {
- async_iife!({
+ async_maybe!({
let mut last_binary_path = None;
let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await {
@@ -8,7 +8,7 @@ use lsp::LanguageServerBinary;
use smol::fs;
use std::{any::Any, env::consts, path::PathBuf};
use util::{
- async_iife,
+ async_maybe,
github::{latest_github_release, GitHubLspBinaryVersion},
ResultExt,
};
@@ -106,7 +106,7 @@ impl super::LspAdapter for LuaLspAdapter {
}
async fn get_cached_server_binary(container_dir: PathBuf) -> Option<LanguageServerBinary> {
- async_iife!({
+ async_maybe!({
let mut last_binary_path = None;
let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await {
@@ -1,4 +1,4 @@
-use anyhow::{anyhow, Result};
+use anyhow::{anyhow, ensure, Result};
use async_trait::async_trait;
use futures::StreamExt;
pub use language::*;
@@ -98,7 +98,10 @@ impl super::LspAdapter for VueLspAdapter {
)
.await?;
}
- assert!(fs::metadata(&server_path).await.is_ok());
+ ensure!(
+ fs::metadata(&server_path).await.is_ok(),
+ "@vue/language-server package installation failed"
+ );
if fs::metadata(&ts_path).await.is_err() {
self.node
.npm_install_packages(
@@ -108,7 +111,10 @@ impl super::LspAdapter for VueLspAdapter {
.await?;
}
- assert!(fs::metadata(&ts_path).await.is_ok());
+ ensure!(
+ fs::metadata(&ts_path).await.is_ok(),
+ "typescript for Vue package installation failed"
+ );
*self.typescript_install_path.lock() = Some(ts_path);
Ok(LanguageServerBinary {
path: self.node.binary_path().await?,
@@ -34,7 +34,7 @@ use std::{
Arc, Weak,
},
thread,
- time::{Duration, SystemTime, UNIX_EPOCH},
+ time::{SystemTime, UNIX_EPOCH},
};
use util::{
channel::{parse_zed_link, ReleaseChannel},
@@ -684,7 +684,7 @@ fn load_embedded_fonts(app: &App) {
#[cfg(debug_assertions)]
async fn watch_themes(fs: Arc<dyn Fs>, mut cx: AsyncAppContext) -> Option<()> {
let mut events = fs
- .watch("styles/src".as_ref(), Duration::from_millis(100))
+ .watch("styles/src".as_ref(), std::time::Duration::from_millis(100))
.await;
while (events.next().await).is_some() {
let output = Command::new("npm")
@@ -710,7 +710,7 @@ async fn watch_languages(fs: Arc<dyn Fs>, languages: Arc<LanguageRegistry>) -> O
let mut events = fs
.watch(
"crates/zed/src/languages".as_ref(),
- Duration::from_millis(100),
+ std::time::Duration::from_millis(100),
)
.await;
while (events.next().await).is_some() {
@@ -725,7 +725,7 @@ fn watch_file_types(fs: Arc<dyn Fs>, cx: &mut AppContext) {
let mut events = fs
.watch(
"assets/icons/file_icons/file_types.json".as_ref(),
- Duration::from_millis(100),
+ std::time::Duration::from_millis(100),
)
.await;
while (events.next().await).is_some() {
@@ -147,8 +147,9 @@ if [[ -n $MACOS_CERTIFICATE && -n $MACOS_CERTIFICATE_PASSWORD && -n $APPLE_NOTAR
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k "$MACOS_CERTIFICATE_PASSWORD" zed.keychain
# sequence of codesign commands modeled after this example: https://developer.apple.com/forums/thread/701514
- /usr/bin/codesign --force --timestamp --sign "Zed Industries, Inc." "${app_path}/Contents/Frameworks/WebRTC.framework" -v
- /usr/bin/codesign --force --timestamp --options runtime --sign "Zed Industries, Inc." "${app_path}/Contents/MacOS/cli" -v
+ /usr/bin/codesign --deep --force --timestamp --sign "Zed Industries, Inc." "${app_path}/Contents/Frameworks/WebRTC.framework" -v
+ /usr/bin/codesign --deep --force --timestamp --options runtime --sign "Zed Industries, Inc." "${app_path}/Contents/MacOS/cli" -v
+ /usr/bin/codesign --deep --force --timestamp --options runtime --entitlements crates/zed/resources/zed.entitlements --sign "Zed Industries, Inc." "${app_path}/Contents/MacOS/zed" -v
/usr/bin/codesign --force --timestamp --options runtime --entitlements crates/zed/resources/zed.entitlements --sign "Zed Industries, Inc." "${app_path}" -v
security default-keychain -s login.keychain